remove unnecessary header files, combine obs by sum of counts

This commit is contained in:
2024-07-04 11:49:28 +02:00
parent 2b14ed84aa
commit 7a15b02b04
3 changed files with 93 additions and 90 deletions

View File

@@ -233,24 +233,24 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
# FWHM of FOC have been estimated at about 0.03" across 1500-5000 Angstrom band, which is about 2 detector pixels wide
# see Jedrzejewski, R.; Nota, A.; Hack, W. J., A Comparison Between FOC and WFPC2
# Bibcode : 1995chst.conf...10J
I_stokes, Q_stokes, U_stokes, Stokes_cov = proj_red.compute_Stokes(
I_stokes, Q_stokes, U_stokes, Stokes_cov, header_stokes = proj_red.compute_Stokes(
data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr
)
I_bkg, Q_bkg, U_bkg, S_cov_bkg = proj_red.compute_Stokes(
I_bkg, Q_bkg, U_bkg, S_cov_bkg, header_bkg = proj_red.compute_Stokes(
background, background_error, np.array(True).reshape(1, 1), headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False
)
# Step 3:
# Rotate images to have North up
if rotate_North:
I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers = proj_red.rotate_Stokes(
I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers, SNRi_cut=None
I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, header_stokes = proj_red.rotate_Stokes(
I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, header_stokes, SNRi_cut=None
)
I_bkg, Q_bkg, U_bkg, S_cov_bkg, _, _ = proj_red.rotate_Stokes(I_bkg, Q_bkg, U_bkg, S_cov_bkg, np.array(True).reshape(1, 1), headers, SNRi_cut=None)
I_bkg, Q_bkg, U_bkg, S_cov_bkg, _, _ = proj_red.rotate_Stokes(I_bkg, Q_bkg, U_bkg, S_cov_bkg, np.array(True).reshape(1, 1), header_bkg, SNRi_cut=None)
# Compute polarimetric parameters (polarization degree and angle).
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers)
P_bkg, debiased_P_bkg, s_P_bkg, s_P_P_bkg, PA_bkg, s_PA_bkg, s_PA_P_bkg = proj_red.compute_pol(I_bkg, Q_bkg, U_bkg, S_cov_bkg, headers)
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, header_stokes)
P_bkg, debiased_P_bkg, s_P_bkg, s_P_P_bkg, PA_bkg, s_PA_bkg, s_PA_P_bkg = proj_red.compute_pol(I_bkg, Q_bkg, U_bkg, S_cov_bkg, header_bkg)
# Step 4:
# Save image to FITS.
@@ -267,7 +267,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
PA,
s_PA,
s_PA_P,
headers,
header_stokes,
data_mask,
figname,
data_folder=data_folder,
@@ -286,21 +286,21 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
data_mask = Stokes_hdul["data_mask"].data.astype(bool)
print(
"F_int({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(
headers[0]["photplam"],
header_stokes["photplam"],
*sci_not(
Stokes_hdul[0].data[data_mask].sum() * headers[0]["photflam"],
np.sqrt(Stokes_hdul[3].data[0, 0][data_mask].sum()) * headers[0]["photflam"],
Stokes_hdul[0].data[data_mask].sum() * header_stokes["photflam"],
np.sqrt(Stokes_hdul[3].data[0, 0][data_mask].sum()) * header_stokes["photflam"],
2,
out=int,
),
)
)
print("P_int = {0:.1f} ± {1:.1f} %".format(headers[0]["p_int"] * 100.0, np.ceil(headers[0]["sP_int"] * 1000.0) / 10.0))
print("PA_int = {0:.1f} ± {1:.1f} °".format(princ_angle(headers[0]["pa_int"]), princ_angle(np.ceil(headers[0]["sPA_int"] * 10.0) / 10.0)))
print("P_int = {0:.1f} ± {1:.1f} %".format(header_stokes["p_int"] * 100.0, np.ceil(header_stokes["sP_int"] * 1000.0) / 10.0))
print("PA_int = {0:.1f} ± {1:.1f} °".format(princ_angle(header_stokes["pa_int"]), princ_angle(np.ceil(header_stokes["sPA_int"] * 10.0) / 10.0)))
# Background values
print(
"F_bkg({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(
headers[0]["photplam"], *sci_not(I_bkg[0, 0] * headers[0]["photflam"], np.sqrt(S_cov_bkg[0, 0][0, 0]) * headers[0]["photflam"], 2, out=int)
header_stokes["photplam"], *sci_not(I_bkg[0, 0] * header_stokes["photflam"], np.sqrt(S_cov_bkg[0, 0][0, 0]) * header_stokes["photflam"], 2, out=int)
)
)
print("P_bkg = {0:.1f} ± {1:.1f} %".format(debiased_P_bkg[0, 0] * 100.0, np.ceil(s_P_bkg[0, 0] * 1000.0) / 10.0))

View File

@@ -93,7 +93,7 @@ def get_obs_data(infiles, data_folder="", compute_flux=False):
def save_Stokes(
I_stokes, Q_stokes, U_stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, headers, data_mask, filename, data_folder="", return_hdul=False
I_stokes, Q_stokes, U_stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, header_stokes, data_mask, filename, data_folder="", return_hdul=False
):
"""
Save computed polarimetry parameters to a single fits file,
@@ -130,9 +130,8 @@ def save_Stokes(
Only returned if return_hdul is True.
"""
# Create new WCS object given the modified images
ref_header = headers[0]
exp_tot = np.array([header["exptime"] for header in headers]).sum()
new_wcs = WCS(ref_header).deepcopy()
exp_tot = header_stokes['exptime']
new_wcs = WCS(header_stokes).deepcopy()
if data_mask.shape != (1, 1):
vertex = clean_ROI(data_mask)
@@ -141,23 +140,23 @@ def save_Stokes(
new_wcs.wcs.crpix = np.array(new_wcs.wcs.crpix) - vertex[0::-2]
header = new_wcs.to_header()
header["TELESCOP"] = (ref_header["telescop"] if "TELESCOP" in list(ref_header.keys()) else "HST", "telescope used to acquire data")
header["INSTRUME"] = (ref_header["instrume"] if "INSTRUME" in list(ref_header.keys()) else "FOC", "identifier for instrument used to acuire data")
header["PHOTPLAM"] = (ref_header["photplam"], "Pivot Wavelength")
header["PHOTFLAM"] = (ref_header["photflam"], "Inverse Sensitivity in DN/sec/cm**2/Angst")
header["TELESCOP"] = (header_stokes["telescop"] if "TELESCOP" in list(header_stokes.keys()) else "HST", "telescope used to acquire data")
header["INSTRUME"] = (header_stokes["instrume"] if "INSTRUME" in list(header_stokes.keys()) else "FOC", "identifier for instrument used to acuire data")
header["PHOTPLAM"] = (header_stokes["photplam"], "Pivot Wavelength")
header["PHOTFLAM"] = (header_stokes["photflam"], "Inverse Sensitivity in DN/sec/cm**2/Angst")
header["EXPTOT"] = (exp_tot, "Total exposure time in sec")
header["PROPOSID"] = (ref_header["proposid"], "PEP proposal identifier for observation")
header["TARGNAME"] = (ref_header["targname"], "Target name")
header["PROPOSID"] = (header_stokes["proposid"], "PEP proposal identifier for observation")
header["TARGNAME"] = (header_stokes["targname"], "Target name")
header["ORIENTAT"] = (np.arccos(new_wcs.wcs.pc[0, 0]) * 180.0 / np.pi, "Angle between North and the y-axis of the image")
header["FILENAME"] = (filename, "Original filename")
header["BKG_TYPE"] = (ref_header["BKG_TYPE"], "Bkg estimation method used during reduction")
header["BKG_SUB"] = (ref_header["BKG_SUB"], "Amount of bkg subtracted from images")
header["SMOOTH"] = (ref_header["SMOOTH"], "Smoothing method used during reduction")
header["SAMPLING"] = (ref_header["SAMPLING"], "Resampling performed during reduction")
header["P_INT"] = (ref_header["P_int"], "Integrated polarization degree")
header["sP_INT"] = (ref_header["sP_int"], "Integrated polarization degree error")
header["PA_INT"] = (ref_header["PA_int"], "Integrated polarization angle")
header["sPA_INT"] = (ref_header["sPA_int"], "Integrated polarization angle error")
header["BKG_TYPE"] = (header_stokes["BKG_TYPE"], "Bkg estimation method used during reduction")
header["BKG_SUB"] = (header_stokes["BKG_SUB"], "Amount of bkg subtracted from images")
header["SMOOTH"] = (header_stokes["SMOOTH"], "Smoothing method used during reduction")
header["SAMPLING"] = (header_stokes["SAMPLING"], "Resampling performed during reduction")
header["P_INT"] = (header_stokes["P_int"], "Integrated polarization degree")
header["sP_INT"] = (header_stokes["sP_int"], "Integrated polarization degree error")
header["PA_INT"] = (header_stokes["PA_int"], "Integrated polarization angle")
header["sPA_INT"] = (header_stokes["sPA_int"], "Integrated polarization angle error")
# Crop Data to mask
if data_mask.shape != (1, 1):

View File

@@ -521,23 +521,23 @@ def get_error(data_array, headers, error_array=None, data_mask=None, sub_type=No
n_data_array, c_error_bkg, headers, background = bkg_hist(
data, error, mask, headers, subtract_error=subtract_error, display=display, savename=savename, plots_folder=plots_folder
)
sub_type, subtract_error = "histogram", str(int(subtract_error>0.))
sub_type, subtract_error = "histogram ", str(int(subtract_error>0.))
elif isinstance(sub_type, str):
if sub_type.lower() in ["auto"]:
n_data_array, c_error_bkg, headers, background = bkg_fit(
data, error, mask, headers, subtract_error=subtract_error, display=display, savename=savename, plots_folder=plots_folder
)
sub_type, subtract_error = "histogram fit", "bkg+%.1fsigma"%subtract_error
sub_type, subtract_error = "histogram fit ", "mean+%.1fsigma"%subtract_error if subtract_error != 0. else 0.
else:
n_data_array, c_error_bkg, headers, background = bkg_hist(
data, error, mask, headers, sub_type=sub_type, subtract_error=subtract_error, display=display, savename=savename, plots_folder=plots_folder
)
sub_type, subtract_error = "histogram", str(int(subtract_error>0.))
sub_type, subtract_error = "histogram ", "mean+%.1fsigma"%subtract_error if subtract_error != 0. else 0.
elif isinstance(sub_type, tuple):
n_data_array, c_error_bkg, headers, background = bkg_mini(
data, error, mask, headers, sub_shape=sub_type, subtract_error=subtract_error, display=display, savename=savename, plots_folder=plots_folder
)
sub_type, subtract_error = "minimal flux", str(int(subtract_error>0.))
sub_type, subtract_error = "minimal flux ", "mean+%.1fsigma"%subtract_error if subtract_error != 0. else 0.
else:
print("Warning: Invalid subtype.")
@@ -964,7 +964,7 @@ def smooth_data(data_array, error_array, data_mask, headers, FWHM=1.5, scale="pi
raise ValueError("{} is not a valid smoothing option".format(smoothing))
for header in headers:
header["SMOOTH"] = (" ".join([smoothing,FWHM_size,scale]),"Smoothing method used during reduction")
header["SMOOTH"] = (" ".join([smoothing,FWHM_size,FWHM_scale]),"Smoothing method used during reduction")
return smoothed, error
@@ -1229,8 +1229,10 @@ def compute_Stokes(data_array, error_array, data_mask, headers, FWHM=None, scale
transmit *= transmit2 * transmit3 * transmit4
pol_eff = np.array([globals()["pol_efficiency"]["pol0"], globals()["pol_efficiency"]["pol60"], globals()["pol_efficiency"]["pol120"]])
# Calculating correction factor
# Calculating correction factor: allows all pol_filt to share same exptime and inverse sensitivity (taken to be the one from POL0)
corr = np.array([1.0 * h["photflam"] / h["exptime"] for h in pol_headers]) * pol_headers[0]["exptime"] / pol_headers[0]["photflam"]
pol_headers[1]['photflam'], pol_headers[1]['exptime'] = pol_headers[0]['photflam'], pol_headers[1]['exptime']
pol_headers[2]['photflam'], pol_headers[2]['exptime'] = pol_headers[0]['photflam'], pol_headers[2]['exptime']
# Orientation and error for each polarizer
# fmax = np.finfo(np.float64).max
@@ -1441,25 +1443,34 @@ def compute_Stokes(data_array, error_array, data_mask, headers, FWHM=None, scale
Stokes_cov[1, 1] += s_Q2_axis + s_Q2_stat
Stokes_cov[2, 2] += s_U2_axis + s_U2_stat
# Save values to single header
header_stokes = pol_headers[0]
else:
all_I_stokes = np.zeros((np.unique(rotate).size, data_array.shape[1], data_array.shape[2]))
all_Q_stokes = np.zeros((np.unique(rotate).size, data_array.shape[1], data_array.shape[2]))
all_U_stokes = np.zeros((np.unique(rotate).size, data_array.shape[1], data_array.shape[2]))
all_Stokes_cov = np.zeros((np.unique(rotate).size, 3, 3, data_array.shape[1], data_array.shape[2]))
all_header_stokes = [{},]*np.unique(rotate).size
for i,rot in enumerate(np.unique(rotate)):
rot_mask = (rotate == rot)
all_I_stokes[i], all_Q_stokes[i], all_U_stokes[i], all_Stokes_cov[i] = compute_Stokes(data_array[rot_mask], error_array[rot_mask], data_mask, [headers[i] for i in np.arange(len(headers))[rot_mask]], FWHM=FWHM, scale=scale, smoothing=smoothing, transmitcorr=transmitcorr, integrate=False)
all_I_stokes[i], all_Q_stokes[i], all_U_stokes[i], all_Stokes_cov[i], all_header_stokes[i] = compute_Stokes(data_array[rot_mask], error_array[rot_mask], data_mask, [headers[i] for i in np.arange(len(headers))[rot_mask]], FWHM=FWHM, scale=scale, smoothing=smoothing, transmitcorr=transmitcorr, integrate=False)
all_exp = np.array([float(h['exptime']) for h in all_header_stokes])
I_stokes = all_I_stokes.sum(axis=0)/np.unique(rotate).size
Q_stokes = all_Q_stokes.sum(axis=0)/np.unique(rotate).size
U_stokes = all_U_stokes.sum(axis=0)/np.unique(rotate).size
I_stokes = np.sum([exp*I for exp, I in zip(all_exp, all_I_stokes)],axis=0) / all_exp.sum()
Q_stokes = np.sum([exp*Q for exp, Q in zip(all_exp, all_Q_stokes)],axis=0) / all_exp.sum()
U_stokes = np.sum([exp*U for exp, U in zip(all_exp, all_U_stokes)],axis=0) / all_exp.sum()
Stokes_cov = np.zeros((3, 3, I_stokes.shape[0], I_stokes.shape[1]))
for i in range(3):
Stokes_cov[i,i] = np.sum(all_Stokes_cov[:,i,i],axis=0)/np.unique(rotate).size
Stokes_cov[i,i] = np.sum([exp*cov for exp, cov in zip(all_exp, all_Stokes_cov[:,i,i])], axis=0) / all_exp.sum()
for j in [x for x in range(3) if x!=i]:
Stokes_cov[i,j] = np.sqrt(np.sum(all_Stokes_cov[:,i,j]**2,axis=0)/np.unique(rotate).size)
Stokes_cov[j,i] = np.sqrt(np.sum(all_Stokes_cov[:,j,i]**2,axis=0)/np.unique(rotate).size)
Stokes_cov[i,j] = np.sqrt(np.sum([exp*cov**2 for exp, cov in zip(all_exp, all_Stokes_cov[:,i,j])], axis=0) / all_exp.sum())
Stokes_cov[j,i] = np.sqrt(np.sum([exp*cov**2 for exp, cov in zip(all_exp, all_Stokes_cov[:,j,i])], axis=0) / all_exp.sum())
# Save values to single header
header_stokes = all_header_stokes[0]
header_stokes['exptime'] = all_exp.sum()
# Nan handling :
fmax = np.finfo(np.float64).max
@@ -1497,16 +1508,15 @@ def compute_Stokes(data_array, error_array, data_mask, headers, FWHM=None, scale
U_diluted**2 * Q_diluted_err**2 + Q_diluted**2 * U_diluted_err**2 - 2.0 * Q_diluted * U_diluted * QU_diluted_err
)
for header in headers:
header["P_int"] = (P_diluted, "Integrated polarization degree")
header["sP_int"] = (np.ceil(P_diluted_err * 1000.0) / 1000.0, "Integrated polarization degree error")
header["PA_int"] = (PA_diluted, "Integrated polarization angle")
header["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
header_stokes["P_int"] = (P_diluted, "Integrated polarization degree")
header_stokes["sP_int"] = (np.ceil(P_diluted_err * 1000.0) / 1000.0, "Integrated polarization degree error")
header_stokes["PA_int"] = (PA_diluted, "Integrated polarization angle")
header_stokes["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
return I_stokes, Q_stokes, U_stokes, Stokes_cov
return I_stokes, Q_stokes, U_stokes, Stokes_cov, header_stokes
def compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers):
def compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, header_stokes):
"""
Compute the polarization degree (in %) and angle (in deg) and their
respective errors from given Stokes parameters.
@@ -1523,8 +1533,8 @@ def compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers):
+45/-45deg linear polarization intensity
Stokes_cov : numpy.ndarray
Covariance matrix of the Stokes parameters I, Q, U.
headers : header list
List of headers corresponding to the images in data_array.
header_stokes : astropy.fits.header.Header
Header file associated with the Stokes fluxes.
----------
Returns:
P : numpy.ndarray
@@ -1543,9 +1553,6 @@ def compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers):
s_PA_P : numpy.ndarray
Image (2D floats) containing the Poisson noise error on the
polarization angle.
new_headers : header list
Updated list of headers corresponding to the reduced images accounting
for the new orientation angle.
"""
# Polarization degree and angle computation
mask = I_stokes > 0.0
@@ -1595,7 +1602,7 @@ def compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers):
# Compute the total exposure time so that
# I_stokes*exp_tot = N_tot the total number of events
exp_tot = np.array([header["exptime"] for header in headers]).sum()
exp_tot = header_stokes["exptime"]
# print("Total exposure time : {} sec".format(exp_tot))
N_obs = I_stokes * exp_tot
@@ -1616,7 +1623,7 @@ def compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers):
return P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P
def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers, SNRi_cut=None):
def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, header_stokes, SNRi_cut=None):
"""
Use scipy.ndimage.rotate to rotate I_stokes to an angle, and a rotation
matrix to rotate Q, U of a given angle in degrees and update header
@@ -1636,8 +1643,8 @@ def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers,
Covariance matrix of the Stokes parameters I, Q, U.
data_mask : numpy.ndarray
2D boolean array delimiting the data to work on.
headers : header list
List of headers corresponding to the reduced images.
header_stokes : astropy.fits.header.Header
Header file associated with the Stokes fluxes.
SNRi_cut : float, optional
Cut that should be applied to the signal-to-noise ratio on I.
Any SNR < SNRi_cut won't be displayed. If None, cut won't be applied.
@@ -1655,8 +1662,8 @@ def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers,
accounting for +45/-45deg linear polarization intensity.
new_Stokes_cov : numpy.ndarray
Updated covariance matrix of the Stokes parameters I, Q, U.
new_headers : header list
Updated list of headers corresponding to the reduced images accounting
new_header_stokes : astropy.fits.header.Header
Updated Header file associated with the Stokes fluxes accounting
for the new orientation angle.
new_data_mask : numpy.ndarray
Updated 2D boolean array delimiting the data to work on.
@@ -1674,11 +1681,12 @@ def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers,
U_stokes[i, j] = eps * np.sqrt(Stokes_cov[2, 2][i, j])
# Rotate I_stokes, Q_stokes, U_stokes using rotation matrix
ang = np.zeros((len(headers),))
for i, head in enumerate(headers):
pc = WCS(head).celestial.wcs.pc[0,0]
ang[i] = -np.arccos(WCS(head).celestial.wcs.pc[0,0]) * 180.0 / np.pi if np.abs(pc) < 1. else 0.
ang = ang.mean()
# ang = np.zeros((len(headers),))
# for i, head in enumerate(headers):
# pc = WCS(head).celestial.wcs.pc[0,0]
# ang[i] = -np.arccos(WCS(head).celestial.wcs.pc[0,0]) * 180.0 / np.pi if np.abs(pc) < 1. else 0.
pc = WCS(header_stokes).celestial.wcs.pc[0,0]
ang = -np.arccos(WCS(header_stokes).celestial.wcs.pc[0,0]) * 180.0 / np.pi if np.abs(pc) < 1. else 0.
alpha = np.pi / 180.0 * ang
mrot = np.array([[1.0, 0.0, 0.0], [0.0, np.cos(2.0 * alpha), np.sin(2.0 * alpha)], [0, -np.sin(2.0 * alpha), np.cos(2.0 * alpha)]])
@@ -1714,25 +1722,22 @@ def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers,
new_Stokes_cov[:, :, i, j] = np.dot(mrot, np.dot(new_Stokes_cov[:, :, i, j], mrot.T))
# Update headers to new angle
new_headers = []
mrot = np.array([[np.cos(-alpha), -np.sin(-alpha)], [np.sin(-alpha), np.cos(-alpha)]])
for header in headers:
new_header = deepcopy(header)
new_header["orientat"] = header["orientat"] + ang
new_wcs = WCS(header).celestial.deepcopy()
new_wcs.wcs.pc = np.dot(mrot, new_wcs.wcs.pc)
new_wcs.wcs.crpix = np.dot(mrot, new_wcs.wcs.crpix - old_center[::-1]) + new_center[::-1]
new_wcs.wcs.set()
for key, val in new_wcs.to_header().items():
new_header.set(key, val)
if new_wcs.wcs.pc[0, 0] == 1.0:
new_header.set("PC1_1", 1.0)
if new_wcs.wcs.pc[1, 1] == 1.0:
new_header.set("PC2_2", 1.0)
new_header["orientat"] = header["orientat"] + ang
new_header_stokes = deepcopy(header_stokes)
new_header_stokes["orientat"] = header_stokes["orientat"] + ang
new_wcs = WCS(header_stokes).celestial.deepcopy()
new_headers.append(new_header)
new_wcs.wcs.pc = np.dot(mrot, new_wcs.wcs.pc)
new_wcs.wcs.crpix = np.dot(mrot, new_wcs.wcs.crpix - old_center[::-1]) + new_center[::-1]
new_wcs.wcs.set()
for key, val in new_wcs.to_header().items():
new_header_stokes.set(key, val)
if new_wcs.wcs.pc[0, 0] == 1.0:
new_header_stokes.set("PC1_1", 1.0)
if new_wcs.wcs.pc[1, 1] == 1.0:
new_header_stokes.set("PC2_2", 1.0)
new_header_stokes["orientat"] = header_stokes["orientat"] + ang
# Nan handling :
fmax = np.finfo(np.float64).max
@@ -1769,13 +1774,12 @@ def rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers,
U_diluted**2 * Q_diluted_err**2 + Q_diluted**2 * U_diluted_err**2 - 2.0 * Q_diluted * U_diluted * QU_diluted_err
)
for header in new_headers:
header["P_int"] = (P_diluted, "Integrated polarization degree")
header["sP_int"] = (np.ceil(P_diluted_err * 1000.0) / 1000.0, "Integrated polarization degree error")
header["PA_int"] = (PA_diluted, "Integrated polarization angle")
header["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
new_header_stokes["P_int"] = (P_diluted, "Integrated polarization degree")
new_header_stokes["sP_int"] = (np.ceil(P_diluted_err * 1000.0) / 1000.0, "Integrated polarization degree error")
new_header_stokes["PA_int"] = (PA_diluted, "Integrated polarization angle")
new_header_stokes["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
return new_I_stokes, new_Q_stokes, new_U_stokes, new_Stokes_cov, new_data_mask, new_headers
return new_I_stokes, new_Q_stokes, new_U_stokes, new_Stokes_cov, new_data_mask, new_header_stokes
def rotate_data(data_array, error_array, data_mask, headers):