clean Combine.py add orientat computation from wcs
This commit is contained in:
@@ -157,8 +157,8 @@ def combine_Stokes(infiles):
|
|||||||
def main(infiles, target=None, output_dir="./data/"):
|
def main(infiles, target=None, output_dir="./data/"):
|
||||||
""" """
|
""" """
|
||||||
from lib.fits import save_Stokes
|
from lib.fits import save_Stokes
|
||||||
from lib.reduction import compute_pol
|
|
||||||
from lib.plots import pol_map
|
from lib.plots import pol_map
|
||||||
|
from lib.reduction import compute_pol, rotate_Stokes
|
||||||
|
|
||||||
if target is None:
|
if target is None:
|
||||||
target = input("Target name:\n>")
|
target = input("Target name:\n>")
|
||||||
@@ -167,43 +167,33 @@ def main(infiles, target=None, output_dir="./data/"):
|
|||||||
data_folder = prod[0][0]
|
data_folder = prod[0][0]
|
||||||
files = [p[1] for p in prod]
|
files = [p[1] for p in prod]
|
||||||
|
|
||||||
|
# Reduction parameters
|
||||||
|
kwargs = {}
|
||||||
|
# Polarization map output
|
||||||
|
kwargs["SNRp_cut"] = 3.0
|
||||||
|
kwargs["SNRi_cut"] = 1.0
|
||||||
|
kwargs["flux_lim"] = 1e-19, 3e-17
|
||||||
|
kwargs["scale_vec"] = 5
|
||||||
|
kwargs["step_vec"] = 1
|
||||||
|
|
||||||
if not same_reduction(infiles):
|
if not same_reduction(infiles):
|
||||||
from FOC_reduction import main as FOC_reduction
|
from FOC_reduction import main as FOC_reduction
|
||||||
|
|
||||||
# Reduction parameters
|
|
||||||
kwargs = {}
|
|
||||||
# Background estimation
|
|
||||||
kwargs["error_sub_type"] = "freedman-diaconis" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51))
|
|
||||||
kwargs["subtract_error"] = 0.7
|
|
||||||
|
|
||||||
# Data binning
|
|
||||||
kwargs["pxsize"] = 0.1
|
|
||||||
kwargs["pxscale"] = "arcsec" # pixel, arcsec or full
|
|
||||||
|
|
||||||
# Smoothing
|
|
||||||
kwargs["smoothing_function"] = "combine" # gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine
|
|
||||||
kwargs["smoothing_FWHM"] = 0.2 # If None, no smoothing is done
|
|
||||||
kwargs["smoothing_scale"] = "arcsec" # pixel or arcsec
|
|
||||||
|
|
||||||
# Polarization map output
|
|
||||||
kwargs["SNRp_cut"] = 3.0 # P measurments with SNR>3
|
|
||||||
kwargs["SNRi_cut"] = 1.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%.
|
|
||||||
kwargs["flux_lim"] = 1e-19, 3e-17 # lowest and highest flux displayed on plot, defaults to bkg and maximum in cut if None
|
|
||||||
kwargs["scale_vec"] = 5
|
|
||||||
kwargs["step_vec"] = (
|
|
||||||
1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length
|
|
||||||
)
|
|
||||||
grouped_infiles = same_obs(files, data_folder)
|
grouped_infiles = same_obs(files, data_folder)
|
||||||
|
|
||||||
new_infiles = []
|
new_infiles = []
|
||||||
for i, group in enumerate(grouped_infiles):
|
for i, group in enumerate(grouped_infiles):
|
||||||
new_infiles.append(
|
new_infiles.append(
|
||||||
FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True, **kwargs)
|
FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True)[0]
|
||||||
)
|
)
|
||||||
|
|
||||||
infiles = new_infiles
|
infiles = new_infiles
|
||||||
|
|
||||||
I_combined, Q_combined, U_combined, IQU_cov_combined, data_mask_combined, header_combined = combine_Stokes(infiles)
|
I_combined, Q_combined, U_combined, IQU_cov_combined, data_mask_combined, header_combined = combine_Stokes(infiles=infiles)
|
||||||
|
I_combined, Q_combined, U_combined, IQU_cov_combined, data_mask_combined, header_combined = rotate_Stokes(
|
||||||
|
I_stokes=I_combined, Q_stokes=Q_combined, U_stokes=U_combined, Stokes_cov=IQU_cov_combined, data_mask=data_mask_combined, header_stokes=header_combined
|
||||||
|
)
|
||||||
|
|
||||||
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = compute_pol(
|
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = compute_pol(
|
||||||
I_stokes=I_combined, Q_stokes=Q_combined, U_stokes=U_combined, Stokes_cov=IQU_cov_combined, header_stokes=header_combined
|
I_stokes=I_combined, Q_stokes=Q_combined, U_stokes=U_combined, Stokes_cov=IQU_cov_combined, header_stokes=header_combined
|
||||||
)
|
)
|
||||||
@@ -228,7 +218,7 @@ def main(infiles, target=None, output_dir="./data/"):
|
|||||||
return_hdul=True,
|
return_hdul=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
pol_map(Stokes_combined)
|
pol_map(Stokes_combined, **kwargs)
|
||||||
|
|
||||||
return "/".join([data_folder, figname + ".fits"])
|
return "/".join([data_folder, figname + ".fits"])
|
||||||
|
|
||||||
|
|||||||
@@ -258,7 +258,7 @@ def bkg_fit(data, error, mask, headers, subtract_error=True, display=False, save
|
|||||||
n_error_array[i] = np.sqrt(n_error_array[i] ** 2 + error_bkg[i] ** 2)
|
n_error_array[i] = np.sqrt(n_error_array[i] ** 2 + error_bkg[i] ** 2)
|
||||||
|
|
||||||
# Substract background
|
# Substract background
|
||||||
if subtract_error > 0:
|
if np.abs(subtract_error) > 0:
|
||||||
n_data_array[i][mask] = n_data_array[i][mask] - bkg
|
n_data_array[i][mask] = n_data_array[i][mask] - bkg
|
||||||
n_data_array[i][np.logical_and(mask, n_data_array[i] <= 1e-3 * bkg)] = 1e-3 * bkg
|
n_data_array[i][np.logical_and(mask, n_data_array[i] <= 1e-3 * bkg)] = 1e-3 * bkg
|
||||||
|
|
||||||
@@ -367,7 +367,7 @@ def bkg_hist(data, error, mask, headers, sub_type=None, subtract_error=True, dis
|
|||||||
n_error_array[i] = np.sqrt(n_error_array[i] ** 2 + error_bkg[i] ** 2)
|
n_error_array[i] = np.sqrt(n_error_array[i] ** 2 + error_bkg[i] ** 2)
|
||||||
|
|
||||||
# Substract background
|
# Substract background
|
||||||
if subtract_error > 0:
|
if np.abs(subtract_error) > 0:
|
||||||
n_data_array[i][mask] = n_data_array[i][mask] - bkg
|
n_data_array[i][mask] = n_data_array[i][mask] - bkg
|
||||||
n_data_array[i][np.logical_and(mask, n_data_array[i] <= 1e-3 * bkg)] = 1e-3 * bkg
|
n_data_array[i][np.logical_and(mask, n_data_array[i] <= 1e-3 * bkg)] = 1e-3 * bkg
|
||||||
|
|
||||||
@@ -464,7 +464,7 @@ def bkg_mini(data, error, mask, headers, sub_shape=(15, 15), subtract_error=True
|
|||||||
n_error_array[i] = np.sqrt(n_error_array[i] ** 2 + error_bkg[i] ** 2)
|
n_error_array[i] = np.sqrt(n_error_array[i] ** 2 + error_bkg[i] ** 2)
|
||||||
|
|
||||||
# Substract background
|
# Substract background
|
||||||
if subtract_error > 0.0:
|
if np.abs(subtract_error) > 0:
|
||||||
n_data_array[i][mask] = n_data_array[i][mask] - bkg
|
n_data_array[i][mask] = n_data_array[i][mask] - bkg
|
||||||
n_data_array[i][np.logical_and(mask, n_data_array[i] <= 1e-3 * bkg)] = 1e-3 * bkg
|
n_data_array[i][np.logical_and(mask, n_data_array[i] <= 1e-3 * bkg)] = 1e-3 * bkg
|
||||||
|
|
||||||
|
|||||||
@@ -45,3 +45,18 @@ def sci_not(v, err, rnd=1, out=str):
|
|||||||
return output[0] + r")e{0}".format(-power)
|
return output[0] + r")e{0}".format(-power)
|
||||||
else:
|
else:
|
||||||
return *output[1:], -power
|
return *output[1:], -power
|
||||||
|
|
||||||
|
def wcs_PA(PC21, PC22):
|
||||||
|
"""
|
||||||
|
Return the position angle in degrees to the North direction of a wcs
|
||||||
|
from the values of coefficient of its transformation matrix.
|
||||||
|
"""
|
||||||
|
if (abs(PC21) > abs(PC22)) and (PC21 >= 0):
|
||||||
|
orient = -np.arccos(PC22) * 180.0 / np.pi
|
||||||
|
elif (abs(PC21) > abs(PC22)) and (PC21 < 0):
|
||||||
|
orient = np.arccos(PC22) * 180.0 / np.pi
|
||||||
|
elif (abs(PC21) < abs(PC22)) and (PC22 >= 0):
|
||||||
|
orient = np.arccos(PC22) * 180.0 / np.pi
|
||||||
|
elif (abs(PC21) < abs(PC22)) and (PC22 < 0):
|
||||||
|
orient = -np.arccos(PC22) * 180.0 / np.pi
|
||||||
|
return orient
|
||||||
|
|||||||
Reference in New Issue
Block a user