finish Combine.py
This commit is contained in:
@@ -10,8 +10,9 @@ def same_reduction(infiles):
|
|||||||
Test if infiles are pipeline productions with same parameters.
|
Test if infiles are pipeline productions with same parameters.
|
||||||
"""
|
"""
|
||||||
from astropy.io.fits import open as fits_open
|
from astropy.io.fits import open as fits_open
|
||||||
|
from astropy.wcs import WCS
|
||||||
|
|
||||||
params = {"IQU": [], "TARGNAME": [], "BKG_SUB": [], "SAMPLING": [], "SMOOTHING": []}
|
params = {"IQU": [], "ROT": [], "SIZE": [], "TARGNAME": [], "BKG_SUB": [], "SAMPLING": [], "SMOOTH": []}
|
||||||
for file in infiles:
|
for file in infiles:
|
||||||
with fits_open(file) as f:
|
with fits_open(file) as f:
|
||||||
# test for presence of I, Q, U images
|
# test for presence of I, Q, U images
|
||||||
@@ -25,15 +26,28 @@ def same_reduction(infiles):
|
|||||||
for look in ["I_stokes", "Q_stokes", "U_stokes", "IQU_cov_matrix"]:
|
for look in ["I_stokes", "Q_stokes", "U_stokes", "IQU_cov_matrix"]:
|
||||||
test_IQU *= look in datatype
|
test_IQU *= look in datatype
|
||||||
params["IQU"].append(test_IQU)
|
params["IQU"].append(test_IQU)
|
||||||
|
# test for orientation and pixel size
|
||||||
|
wcs = WCS(f[0].header).celestial
|
||||||
|
if wcs.wcs.has_cd() or (wcs.wcs.cdelt[:2] == np.array([1.0, 1.0])).all():
|
||||||
|
cdelt = np.linalg.eig(wcs.wcs.cd)[0]
|
||||||
|
pc = np.dot(wcs.wcs.cd, np.diag(1.0 / cdelt))
|
||||||
|
else:
|
||||||
|
cdelt = wcs.wcs.cdelt
|
||||||
|
pc = wcs.wcs.pc
|
||||||
|
params["ROT"].append(np.round(np.arccos(pc[0, 0]), 2) if np.abs(pc[0, 0]) < 1.0 else 0.0)
|
||||||
|
params["SIZE"].append(np.round(np.max(np.abs(cdelt * 3600.0)), 2))
|
||||||
# look for information on reduction procedure
|
# look for information on reduction procedure
|
||||||
for key in ["TARGNAME", "BKG_SUB", "SAMPLING", "SMOOTHING"]:
|
for key in [k for k in params.keys() if k not in ["IQU", "ROT", "SIZE"]]:
|
||||||
try:
|
try:
|
||||||
params[key].append(f[0].header[key])
|
params[key].append(f[0].header[key])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
params[key].append("null")
|
params[key].append("null")
|
||||||
result = np.all(params["IQU"])
|
result = np.all(params["IQU"])
|
||||||
for key in ["TARGNAME", "BKG_SUB", "SAMPLING", "SMOOTHING"]:
|
for key in [k for k in params.keys() if k != "IQU"]:
|
||||||
result *= np.unique(params[key]).size == 1
|
result *= np.unique(params[key]).size == 1
|
||||||
|
if np.all(params["IQU"]) and not result:
|
||||||
|
print(np.unique(params["SIZE"]))
|
||||||
|
raise ValueError("Not all observations were reduced with the same parameters, please provide the raw files.")
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -52,7 +66,7 @@ def same_obs(infiles, data_folder):
|
|||||||
files = {}
|
files = {}
|
||||||
files["PROPOSID"] = np.array([str(head["PROPOSID"]) for head in headers], dtype=str)
|
files["PROPOSID"] = np.array([str(head["PROPOSID"]) for head in headers], dtype=str)
|
||||||
files["ROOTNAME"] = np.array([head["ROOTNAME"].lower() + "_c0f.fits" for head in headers], dtype=str)
|
files["ROOTNAME"] = np.array([head["ROOTNAME"].lower() + "_c0f.fits" for head in headers], dtype=str)
|
||||||
files["EXPSTART"] = np.array([Time(head["EXPSTART"],format='mjd') for head in headers])
|
files["EXPSTART"] = np.array([Time(head["EXPSTART"], format="mjd") for head in headers])
|
||||||
products = Table(files)
|
products = Table(files)
|
||||||
|
|
||||||
new_infiles = []
|
new_infiles = []
|
||||||
@@ -73,22 +87,89 @@ def combine_Stokes(infiles):
|
|||||||
"""
|
"""
|
||||||
Combine I, Q, U from different observations of a same object.
|
Combine I, Q, U from different observations of a same object.
|
||||||
"""
|
"""
|
||||||
print("not implemented yet")
|
from astropy.io.fits import open as fits_open
|
||||||
|
from lib.reduction import align_data, zeropad
|
||||||
|
from scipy.ndimage import shift as sc_shift
|
||||||
|
|
||||||
return infiles
|
I_array, Q_array, U_array, IQU_cov_array, data_mask, headers = [], [], [], [], [], []
|
||||||
|
shape = np.array([0, 0])
|
||||||
|
for file in infiles:
|
||||||
|
with fits_open(file) as f:
|
||||||
|
headers.append(f[0].header)
|
||||||
|
I_array.append(f["I_stokes"].data)
|
||||||
|
Q_array.append(f["Q_stokes"].data)
|
||||||
|
U_array.append(f["U_stokes"].data)
|
||||||
|
IQU_cov_array.append(f["IQU_cov_matrix"].data)
|
||||||
|
data_mask.append(f["data_mask"].data.astype(bool))
|
||||||
|
shape[0] = np.max([shape[0], f["I_stokes"].data.shape[0]])
|
||||||
|
shape[1] = np.max([shape[1], f["I_stokes"].data.shape[1]])
|
||||||
|
|
||||||
|
exposure_array = np.array([float(head["EXPTIME"]) for head in headers])
|
||||||
|
|
||||||
|
shape += np.array([5, 5])
|
||||||
|
data_mask = np.sum([zeropad(mask, shape) for mask in data_mask], axis=0).astype(bool)
|
||||||
|
I_array = np.array([zeropad(I, shape) for I in I_array])
|
||||||
|
Q_array = np.array([zeropad(Q, shape) for Q in Q_array])
|
||||||
|
U_array = np.array([zeropad(U, shape) for U in U_array])
|
||||||
|
IQU_cov_array = np.array([[[zeropad(cov[i, j], shape) for j in range(3)] for i in range(3)] for cov in IQU_cov_array])
|
||||||
|
|
||||||
|
sI_array = np.sqrt(IQU_cov_array[:, 0, 0])
|
||||||
|
sQ_array = np.sqrt(IQU_cov_array[:, 1, 1])
|
||||||
|
sU_array = np.sqrt(IQU_cov_array[:, 2, 2])
|
||||||
|
|
||||||
|
_, _, _, _, shifts, errors = align_data(I_array, headers, error_array=sI_array, data_mask=data_mask, ref_center="center", return_shifts=True)
|
||||||
|
data_mask_aligned = np.sum([sc_shift(data_mask, s, order=1, cval=0.0) for s in shifts], axis=0).astype(bool)
|
||||||
|
I_aligned, sI_aligned = (
|
||||||
|
np.array([sc_shift(I, s, order=1, cval=0.0) for I, s in zip(I_array, shifts)]),
|
||||||
|
np.array([sc_shift(sI, s, order=1, cval=0.0) for sI, s in zip(sI_array, shifts)]),
|
||||||
|
)
|
||||||
|
Q_aligned, sQ_aligned = (
|
||||||
|
np.array([sc_shift(Q, s, order=1, cval=0.0) for Q, s in zip(Q_array, shifts)]),
|
||||||
|
np.array([sc_shift(sQ, s, order=1, cval=0.0) for sQ, s in zip(sQ_array, shifts)]),
|
||||||
|
)
|
||||||
|
U_aligned, sU_aligned = (
|
||||||
|
np.array([sc_shift(U, s, order=1, cval=0.0) for U, s in zip(U_array, shifts)]),
|
||||||
|
np.array([sc_shift(sU, s, order=1, cval=0.0) for sU, s in zip(sU_array, shifts)]),
|
||||||
|
)
|
||||||
|
IQU_cov_aligned = np.array([[[sc_shift(cov[i, j], s, order=1, cval=0.0) for j in range(3)] for i in range(3)] for cov, s in zip(IQU_cov_array, shifts)])
|
||||||
|
|
||||||
|
I_combined = np.sum([exp * I for exp, I in zip(exposure_array, I_aligned)], axis=0) / exposure_array.sum()
|
||||||
|
Q_combined = np.sum([exp * Q for exp, Q in zip(exposure_array, Q_aligned)], axis=0) / exposure_array.sum()
|
||||||
|
U_combined = np.sum([exp * U for exp, U in zip(exposure_array, U_aligned)], axis=0) / exposure_array.sum()
|
||||||
|
|
||||||
|
IQU_cov_combined = np.zeros((3, 3, shape[0], shape[1]))
|
||||||
|
for i in range(3):
|
||||||
|
IQU_cov_combined[i, i] = np.sum([exp**2 * cov for exp, cov in zip(exposure_array, IQU_cov_aligned[:, i, i])], axis=0) / exposure_array.sum() ** 2
|
||||||
|
for j in [x for x in range(3) if x != i]:
|
||||||
|
IQU_cov_combined[i, j] = np.sqrt(
|
||||||
|
np.sum([exp**2 * cov**2 for exp, cov in zip(exposure_array, IQU_cov_aligned[:, i, j])], axis=0) / exposure_array.sum() ** 2
|
||||||
|
)
|
||||||
|
IQU_cov_combined[j, i] = np.sqrt(
|
||||||
|
np.sum([exp**2 * cov**2 for exp, cov in zip(exposure_array, IQU_cov_aligned[:, j, i])], axis=0) / exposure_array.sum() ** 2
|
||||||
|
)
|
||||||
|
|
||||||
|
header_combined = headers[0]
|
||||||
|
header_combined["EXPTIME"] = exposure_array.sum()
|
||||||
|
|
||||||
|
return I_combined, Q_combined, U_combined, IQU_cov_combined, data_mask_aligned, header_combined
|
||||||
|
|
||||||
|
|
||||||
def main(infiles, target=None, output_dir="./data/"):
|
def main(infiles, target=None, output_dir="./data/"):
|
||||||
""" """
|
""" """
|
||||||
|
from lib.fits import save_Stokes
|
||||||
|
from lib.reduction import compute_pol
|
||||||
|
from lib.plots import pol_map
|
||||||
|
|
||||||
if target is None:
|
if target is None:
|
||||||
target = input("Target name:\n>")
|
target = input("Target name:\n>")
|
||||||
|
|
||||||
if not same_reduction(infiles):
|
|
||||||
print("NOT SAME REDUC")
|
|
||||||
from FOC_reduction import main as FOC_reduction
|
|
||||||
prod = np.array([["/".join(filepath.split("/")[:-1]), filepath.split("/")[-1]] for filepath in infiles], dtype=str)
|
prod = np.array([["/".join(filepath.split("/")[:-1]), filepath.split("/")[-1]] for filepath in infiles], dtype=str)
|
||||||
data_folder = prod[0][0]
|
data_folder = prod[0][0]
|
||||||
infiles = [p[1] for p in prod]
|
files = [p[1] for p in prod]
|
||||||
|
|
||||||
|
if not same_reduction(infiles):
|
||||||
|
from FOC_reduction import main as FOC_reduction
|
||||||
|
|
||||||
# Reduction parameters
|
# Reduction parameters
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
# Background estimation
|
# Background estimation
|
||||||
@@ -112,20 +193,44 @@ def main(infiles, target=None, output_dir="./data/"):
|
|||||||
kwargs["step_vec"] = (
|
kwargs["step_vec"] = (
|
||||||
1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length
|
1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length
|
||||||
)
|
)
|
||||||
grouped_infiles = same_obs(infiles, data_folder)
|
grouped_infiles = same_obs(files, data_folder)
|
||||||
print(grouped_infiles)
|
|
||||||
|
|
||||||
new_infiles = []
|
new_infiles = []
|
||||||
for i, group in enumerate(grouped_infiles):
|
for i, group in enumerate(grouped_infiles):
|
||||||
new_infiles.append(FOC_reduction(target=target+"-"+str(i+1), infiles=["/".join([data_folder,file]) for file in group], interactive=True, **kwargs))
|
new_infiles.append(
|
||||||
|
FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True, **kwargs)
|
||||||
|
)
|
||||||
|
|
||||||
combined_Stokes = combine_Stokes(new_infiles)
|
infiles = new_infiles
|
||||||
|
|
||||||
else:
|
I_combined, Q_combined, U_combined, IQU_cov_combined, data_mask_combined, header_combined = combine_Stokes(infiles)
|
||||||
print("SAME REDUC")
|
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = compute_pol(
|
||||||
combined_Stokes = combine_Stokes(infiles)
|
I_stokes=I_combined, Q_stokes=Q_combined, U_stokes=U_combined, Stokes_cov=IQU_cov_combined, header_stokes=header_combined
|
||||||
|
)
|
||||||
|
filename = header_combined["FILENAME"]
|
||||||
|
figname = "_".join([target, filename[filename.find("FOC_"):], "combined"])
|
||||||
|
Stokes_combined = save_Stokes(
|
||||||
|
I_stokes=I_combined,
|
||||||
|
Q_stokes=Q_combined,
|
||||||
|
U_stokes=U_combined,
|
||||||
|
Stokes_cov=IQU_cov_combined,
|
||||||
|
P=P,
|
||||||
|
debiased_P=debiased_P,
|
||||||
|
s_P=s_P,
|
||||||
|
s_P_P=s_P_P,
|
||||||
|
PA=PA,
|
||||||
|
s_PA=s_PA,
|
||||||
|
s_PA_P=s_PA_P,
|
||||||
|
header_stokes=header_combined,
|
||||||
|
data_mask=data_mask_combined,
|
||||||
|
filename=figname,
|
||||||
|
data_folder=data_folder,
|
||||||
|
return_hdul=True,
|
||||||
|
)
|
||||||
|
|
||||||
return combined_Stokes
|
pol_map(Stokes_combined)
|
||||||
|
|
||||||
|
return "/".join([data_folder, figname+".fits"])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -63,8 +63,8 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
# Polarization map output
|
# Polarization map output
|
||||||
SNRp_cut = 3.0 # P measurments with SNR>3
|
SNRp_cut = 3.0 # P measurments with SNR>3
|
||||||
SNRi_cut = 1.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%.
|
SNRi_cut = 1.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%.
|
||||||
flux_lim = 1e-19, 3e-17 # lowest and highest flux displayed on plot, defaults to bkg and maximum in cut if None
|
flux_lim = None # lowest and highest flux displayed on plot, defaults to bkg and maximum in cut if None
|
||||||
scale_vec = 5
|
scale_vec = 3
|
||||||
step_vec = 1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length
|
step_vec = 1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length
|
||||||
|
|
||||||
# Pipeline start
|
# Pipeline start
|
||||||
@@ -421,7 +421,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
elif pxscale.lower() not in ["full", "integrate"]:
|
elif pxscale.lower() not in ["full", "integrate"]:
|
||||||
proj_plots.pol_map(Stokes_hdul, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim)
|
proj_plots.pol_map(Stokes_hdul, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim)
|
||||||
|
|
||||||
outfiles.append(Stokes_hdul[0].header["FILENAME"])
|
outfiles.append("/".join([data_folder, Stokes_hdul[0].header["FILENAME"]+".fits"]))
|
||||||
|
|
||||||
return outfiles
|
return outfiles
|
||||||
|
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ def save_Stokes(
|
|||||||
header["INSTRUME"] = (header_stokes["instrume"] if "INSTRUME" in list(header_stokes.keys()) else "FOC", "identifier for instrument used to acuire data")
|
header["INSTRUME"] = (header_stokes["instrume"] if "INSTRUME" in list(header_stokes.keys()) else "FOC", "identifier for instrument used to acuire data")
|
||||||
header["PHOTPLAM"] = (header_stokes["photplam"], "Pivot Wavelength")
|
header["PHOTPLAM"] = (header_stokes["photplam"], "Pivot Wavelength")
|
||||||
header["PHOTFLAM"] = (header_stokes["photflam"], "Inverse Sensitivity in DN/sec/cm**2/Angst")
|
header["PHOTFLAM"] = (header_stokes["photflam"], "Inverse Sensitivity in DN/sec/cm**2/Angst")
|
||||||
header["EXPTOT"] = (exp_tot, "Total exposure time in sec")
|
header["EXPTIME"] = (exp_tot, "Total exposure time in sec")
|
||||||
header["PROPOSID"] = (header_stokes["proposid"], "PEP proposal identifier for observation")
|
header["PROPOSID"] = (header_stokes["proposid"], "PEP proposal identifier for observation")
|
||||||
header["TARGNAME"] = (header_stokes["targname"], "Target name")
|
header["TARGNAME"] = (header_stokes["targname"], "Target name")
|
||||||
header["ORIENTAT"] = (np.arccos(new_wcs.wcs.pc[0, 0]) * 180.0 / np.pi, "Angle between North and the y-axis of the image")
|
header["ORIENTAT"] = (np.arccos(new_wcs.wcs.pc[0, 0]) * 180.0 / np.pi, "Angle between North and the y-axis of the image")
|
||||||
|
|||||||
@@ -3225,7 +3225,7 @@ if __name__ == "__main__":
|
|||||||
"-sc", "--scale-vec", metavar="scale_vec", required=False, help="Size of the 100% polarization vector in pixel units", type=float, default=3.0
|
"-sc", "--scale-vec", metavar="scale_vec", required=False, help="Size of the 100% polarization vector in pixel units", type=float, default=3.0
|
||||||
)
|
)
|
||||||
parser.add_argument("-pa", "--pang-err", action="store_true", required=False, help="Whether the polarization angle uncertainties should be displayed")
|
parser.add_argument("-pa", "--pang-err", action="store_true", required=False, help="Whether the polarization angle uncertainties should be displayed")
|
||||||
parser.add_argument("-l", "--lim", metavar="flux_lim", nargs=2, required=False, help="Limits for the intensity map", default=None)
|
parser.add_argument("-l", "--lim", metavar="flux_lim", nargs=2, required=False, help="Limits for the intensity map", type=float, default=None)
|
||||||
parser.add_argument("-pdf", "--static-pdf", metavar="static_pdf", required=False, help="Whether the analysis tool or the static pdfs should be outputed", default=None)
|
parser.add_argument("-pdf", "--static-pdf", metavar="static_pdf", required=False, help="Whether the analysis tool or the static pdfs should be outputed", default=None)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|||||||
@@ -797,8 +797,8 @@ def align_data(data_array, headers, error_array=None, data_mask=None, background
|
|||||||
if do_shift:
|
if do_shift:
|
||||||
shift, error, _ = phase_cross_correlation(ref_data / ref_data.max(), image / image.max(), upsample_factor=upsample_factor)
|
shift, error, _ = phase_cross_correlation(ref_data / ref_data.max(), image / image.max(), upsample_factor=upsample_factor)
|
||||||
else:
|
else:
|
||||||
shift = globals["pol_shift"][headers[i]["filtnam1"].lower()]
|
shift = globals()["pol_shift"][headers[i]["filtnam1"].lower()]
|
||||||
error = globals["sigma_shift"][headers[i]["filtnam1"].lower()]
|
error = globals()["sigma_shift"][headers[i]["filtnam1"].lower()]
|
||||||
# Rescale image to requested output
|
# Rescale image to requested output
|
||||||
rescaled_image[i, res_shift[0] : res_shift[0] + shape[1], res_shift[1] : res_shift[1] + shape[2]] = deepcopy(image)
|
rescaled_image[i, res_shift[0] : res_shift[0] + shape[1], res_shift[1] : res_shift[1] + shape[2]] = deepcopy(image)
|
||||||
rescaled_error[i, res_shift[0] : res_shift[0] + shape[1], res_shift[1] : res_shift[1] + shape[2]] = deepcopy(error_array[i])
|
rescaled_error[i, res_shift[0] : res_shift[0] + shape[1], res_shift[1] : res_shift[1] + shape[2]] = deepcopy(error_array[i])
|
||||||
|
|||||||
Reference in New Issue
Block a user