diff --git a/README.md b/README.md index 1731062..3fe06c9 100755 --- a/README.md +++ b/README.md @@ -1,2 +1,6 @@ # FOC_Reduction FOC reduction pipeline + +TODO: + - Add all polarimetry capables instruments from HST (starting with FOS and ACS) + - Build science case for future UV polarimeters (AGN outflow geometry, dust scattering, torus outline ?) diff --git a/doc/pipeline.pdf b/doc/pipeline.pdf new file mode 100644 index 0000000..d6e7e05 Binary files /dev/null and b/doc/pipeline.pdf differ diff --git a/license.md b/license.md new file mode 100644 index 0000000..5ee3421 --- /dev/null +++ b/license.md @@ -0,0 +1,10 @@ +MIT License + +Copyright (c) 2024 Thibault Barnouin + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/package/FOC_reduction.py b/package/FOC_reduction.py index d93e453..eab1da7 100755 --- a/package/FOC_reduction.py +++ b/package/FOC_reduction.py @@ -1,9 +1,14 @@ -#!/usr/bin/python +#!/usr/bin/env python3 # -*- coding:utf-8 -*- """ Main script where are progressively added the steps for the FOC pipeline reduction. """ +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) + # Project libraries from copy import deepcopy from os import system @@ -25,7 +30,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= # from lib.deconvolve import from_file_psf psf = "gaussian" # Can be user-defined as well # psf = from_file_psf(data_folder+psf_file) - psf_FWHM = 3.1 + psf_FWHM = 1.55 psf_scale = "px" psf_shape = None # (151, 151) iterations = 1 @@ -35,13 +40,13 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= display_crop = False # Background estimation - error_sub_type = "freedman-diaconis" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51)) - subtract_error = 1.0 - display_bkg = False + error_sub_type = "scott" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51)) + subtract_error = 2.0 + display_bkg = True # Data binning - pxsize = 2 - pxscale = "px" # pixel, arcsec or full + pxsize = 0.05 + pxscale = "arcsec" # pixel, arcsec or full rebin_operation = "sum" # sum or average # Alignement @@ -54,17 +59,17 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= # Smoothing smoothing_function = "combine" # gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine - smoothing_FWHM = 2.0 # If None, no smoothing is done - smoothing_scale = "px" # pixel or arcsec + smoothing_FWHM = 0.075 # If None, no smoothing is done + smoothing_scale = "arcsec" # pixel or arcsec # Rotation rotate_North = True # Polarization map output - SNRp_cut = 3.0 # P measurments with SNR>3 - SNRi_cut = 1.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%. + P_cut = 5 # if >=1.0 cut on the signal-to-noise else cut on the confidence level in Q, U + SNRi_cut = 5.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%. flux_lim = None # lowest and highest flux displayed on plot, defaults to bkg and maximum in cut if None - scale_vec = 5 + scale_vec = 3 step_vec = 1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length # Pipeline start @@ -171,6 +176,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= proj_plots.plot_obs( data_array, headers, + shifts=shifts, savename="_".join([figname, str(align_center)]), plots_folder=plots_folder, norm=LogNorm(vmin=data_array[data_array > 0.0].min() * headers[0]["photflam"], vmax=data_array[data_array > 0.0].max() * headers[0]["photflam"]), @@ -292,7 +298,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= proj_plots.polarization_map( deepcopy(Stokes_hdul), data_mask, - SNRp_cut=SNRp_cut, + P_cut=P_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, @@ -300,108 +306,31 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= savename="_".join([figname]), plots_folder=plots_folder, ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "I"]), - plots_folder=plots_folder, - display="Intensity", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "P_flux"]), - plots_folder=plots_folder, - display="Pol_Flux", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "P"]), - plots_folder=plots_folder, - display="Pol_deg", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "PA"]), - plots_folder=plots_folder, - display="Pol_ang", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "I_err"]), - plots_folder=plots_folder, - display="I_err", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "P_err"]), - plots_folder=plots_folder, - display="Pol_deg_err", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "SNRi"]), - plots_folder=plots_folder, - display="SNRi", - ) - proj_plots.polarization_map( - deepcopy(Stokes_hdul), - data_mask, - SNRp_cut=SNRp_cut, - SNRi_cut=SNRi_cut, - flux_lim=flux_lim, - step_vec=step_vec, - scale_vec=scale_vec, - savename="_".join([figname, "SNRp"]), - plots_folder=plots_folder, - display="SNRp", - ) + for figtype, figsuffix in zip( + ["Intensity", "Pol_flux", "Pol_deg", "Pol_ang", "I_err", "P_err", "SNRi", "SNRp", "confp"], + ["I", "P_flux", "P", "PA", "I_err", "P_err", "SNRi", "SNRp", "confP"], + ): + try: + proj_plots.polarization_map( + deepcopy(Stokes_hdul), + data_mask, + P_cut=P_cut, + SNRi_cut=SNRi_cut, + flux_lim=flux_lim, + step_vec=step_vec, + scale_vec=scale_vec, + savename="_".join([figname, figsuffix]), + plots_folder=plots_folder, + display=figtype, + ) + except ValueError: + pass elif not interactive: proj_plots.polarization_map( - deepcopy(Stokes_hdul), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, savename=figname, plots_folder=plots_folder, display="integrate" + deepcopy(Stokes_hdul), data_mask, P_cut=P_cut, SNRi_cut=SNRi_cut, savename=figname, plots_folder=plots_folder, display="integrate" ) elif pxscale.lower() not in ["full", "integrate"]: - proj_plots.pol_map(Stokes_hdul, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim) + proj_plots.pol_map(Stokes_hdul, P_cut=P_cut, SNRi_cut=SNRi_cut, step_vec=step_vec, scale_vec=scale_vec, flux_lim=flux_lim) return outfiles diff --git a/package/__init__.py b/package/__init__.py index 094aa13..a8ca1bd 100644 --- a/package/__init__.py +++ b/package/__init__.py @@ -1,3 +1,2 @@ -from . import lib -from . import src +from .lib import * from . import FOC_reduction diff --git a/package/lib/deconvolve.py b/package/lib/deconvolve.py index f89eee5..d2afd64 100755 --- a/package/lib/deconvolve.py +++ b/package/lib/deconvolve.py @@ -286,11 +286,13 @@ def richardson_lucy(image, psf, iterations=20, clip=True, filter_epsilon=None): image = image.astype(float_type, copy=False) psf = psf.astype(float_type, copy=False) psf /= psf.sum() - im_deconv = image.copy() + im_deconv = np.full(image.shape, 0.5, dtype=float_type) psf_mirror = np.flip(psf) + eps = 1e-20 + for _ in range(iterations): - conv = convolve(im_deconv, psf, mode="same") + conv = convolve(im_deconv, psf, mode="same") + eps if filter_epsilon: relative_blur = np.where(conv < filter_epsilon, 0, image / conv) else: diff --git a/package/lib/fits.py b/package/lib/fits.py index 1506a29..6ea842a 100755 --- a/package/lib/fits.py +++ b/package/lib/fits.py @@ -141,6 +141,7 @@ def save_Stokes( header["TELESCOP"] = (header_stokes["TELESCOP"] if "TELESCOP" in list(header_stokes.keys()) else "HST", "telescope used to acquire data") header["INSTRUME"] = (header_stokes["INSTRUME"] if "INSTRUME" in list(header_stokes.keys()) else "FOC", "identifier for instrument used to acuire data") header["PHOTPLAM"] = (header_stokes["PHOTPLAM"], "Pivot Wavelength") + header["PHOTBW"] = (header_stokes["PHOTBW"], "RMS Bandwidth of the Filter and Detector") header["PHOTFLAM"] = (header_stokes["PHOTFLAM"], "Inverse Sensitivity in DN/sec/cm**2/Angst") header["EXPTIME"] = (header_stokes["EXPTIME"], "Total exposure time in sec") header["PROPOSID"] = (header_stokes["PROPOSID"], "PEP proposal identifier for observation") diff --git a/package/lib/plots.py b/package/lib/plots.py index 9cba169..8120809 100755 --- a/package/lib/plots.py +++ b/package/lib/plots.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python3 """ Library functions for displaying informations using matplotlib @@ -9,7 +9,7 @@ prototypes : - plot_Stokes(Stokes, savename, plots_folder) Plot the I/Q/U maps from the Stokes HDUList. - - polarization_map(Stokes, data_mask, rectangle, SNRp_cut, SNRi_cut, step_vec, savename, plots_folder, display) -> fig, ax + - polarization_map(Stokes, data_mask, rectangle, P_cut, SNRi_cut, step_vec, savename, plots_folder, display) -> fig, ax Plots polarization map of polarimetric parameters saved in an HDUList. class align_maps(map, other_map, **kwargs) @@ -36,7 +36,7 @@ prototypes : class aperture(img, cdelt, radius, fig, ax) Class to interactively simulate aperture integration. - class pol_map(Stokes, SNRp_cut, SNRi_cut, selection) + class pol_map(Stokes, P_cut, SNRi_cut, selection) Class to interactively study polarization maps making use of the cropping and selecting tools. """ @@ -61,12 +61,12 @@ from mpl_toolkits.axes_grid1.anchored_artists import ( from scipy.ndimage import zoom as sc_zoom try: - from .utils import princ_angle, rot2D, sci_not + from .utils import PCconf, princ_angle, rot2D, sci_not except ImportError: - from utils import princ_angle, rot2D, sci_not + from utils import PCconf, princ_angle, rot2D, sci_not -def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="", **kwargs): +def plot_obs(data_array, headers, rectangle=None, shifts=None, savename=None, plots_folder="", **kwargs): """ Plots raw observation imagery with some information on the instrument and filters. @@ -77,16 +77,14 @@ def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="" single observation with multiple polarizers of an instrument headers : header list List of headers corresponding to the images in data_array - vmin : float, optional - Min pixel value that should be displayed. - Defaults to 0. - vmax : float, optional - Max pixel value that should be displayed. - Defaults to 6. rectangle : numpy.ndarray, optional Array of parameters for matplotlib.patches.Rectangle objects that will be displayed on each output image. If None, no rectangle displayed. Defaults to None. + shifts : numpy.ndarray, optional + Array of vector coordinates corresponding to images shifts with respect + to the others. If None, no shifts displayed. + Defaults to None. savename : str, optional Name of the figure the map should be saved to. If None, the map won't be saved (only displayed). @@ -99,7 +97,8 @@ def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="" plt.rcParams.update({"font.size": 10}) nb_obs = np.max([np.sum([head["filtnam1"] == curr_pol for head in headers]) for curr_pol in ["POL0", "POL60", "POL120"]]) shape = np.array((3, nb_obs)) - fig, ax = plt.subplots(shape[0], shape[1], figsize=(3 * shape[1], 3 * shape[0]), dpi=200, layout="constrained", sharex=True, sharey=True) + fig, ax = plt.subplots(shape[0], shape[1], figsize=(3 * shape[1], 3 * shape[0]), layout="compressed", sharex=True, sharey=True) + used = np.zeros(shape, dtype=bool) r_pol = dict(pol0=0, pol60=1, pol120=2) c_pol = dict(pol0=0, pol60=0, pol120=0) for i, (data, head) in enumerate(zip(data_array, headers)): @@ -112,15 +111,17 @@ def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="" c_pol[filt.lower()] += 1 if shape[1] != 1: ax_curr = ax[r_ax][c_ax] + used[r_ax][c_ax] = True else: ax_curr = ax[r_ax] + ax_curr[r_ax] = True # plots if "vmin" in kwargs.keys() or "vmax" in kwargs.keys(): vmin, vmax = kwargs["vmin"], kwargs["vmax"] del kwargs["vmin"], kwargs["vmax"] else: vmin, vmax = convert * data[data > 0.0].min() / 10.0, convert * data[data > 0.0].max() - for key, value in [["cmap", [["cmap", "gray"]]], ["norm", [["norm", LogNorm(vmin, vmax)]]]]: + for key, value in [["cmap", [["cmap", "inferno"]]], ["norm", [["norm", LogNorm(vmin, vmax)]]]]: try: _ = kwargs[key] except KeyError: @@ -129,26 +130,38 @@ def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="" # im = ax[r_ax][c_ax].imshow(convert*data, origin='lower', **kwargs) data[data * convert < vmin * 10.0] = vmin * 10.0 / convert im = ax_curr.imshow(convert * data, origin="lower", **kwargs) + if shifts is not None: + x, y = np.array(data.shape[::-1]) / 2.0 - shifts[i] + dx, dy = shifts[i] + ax_curr.arrow(x, y, dx, dy, length_includes_head=True, width=0.1, head_width=0.3, color="g") + ax_curr.plot([x, x], [0, data.shape[0] - 1], "--", lw=2, color="g", alpha=0.85) + ax_curr.plot([0, data.shape[1] - 1], [y, y], "--", lw=2, color="g", alpha=0.85) if rectangle is not None: x, y, width, height, angle, color = rectangle[i] ax_curr.add_patch(Rectangle((x, y), width, height, angle=angle, edgecolor=color, fill=False)) # position of centroid - ax_curr.plot([data.shape[1] / 2, data.shape[1] / 2], [0, data.shape[0] - 1], "--", lw=1, color="grey", alpha=0.5) - ax_curr.plot([0, data.shape[1] - 1], [data.shape[1] / 2, data.shape[1] / 2], "--", lw=1, color="grey", alpha=0.5) + ax_curr.plot([data.shape[1] / 2, data.shape[1] / 2], [0, data.shape[0] - 1], "--", lw=2, color="b", alpha=0.85) + ax_curr.plot([0, data.shape[1] - 1], [data.shape[0] / 2, data.shape[0] / 2], "--", lw=2, color="b", alpha=0.85) ax_curr.annotate( - instr + ":" + rootname, color="white", fontsize=5, xy=(0.01, 1.00), xycoords="axes fraction", verticalalignment="top", horizontalalignment="left" + instr + ":" + rootname, color="white", fontsize=10, xy=(0.01, 1.00), xycoords="axes fraction", verticalalignment="top", horizontalalignment="left" ) - ax_curr.annotate(filt, color="white", fontsize=10, xy=(0.01, 0.01), xycoords="axes fraction", verticalalignment="bottom", horizontalalignment="left") - ax_curr.annotate(exptime, color="white", fontsize=5, xy=(1.00, 0.01), xycoords="axes fraction", verticalalignment="bottom", horizontalalignment="right") + ax_curr.annotate(filt, color="white", fontsize=15, xy=(0.01, 0.01), xycoords="axes fraction", verticalalignment="bottom", horizontalalignment="left") + ax_curr.annotate( + exptime, color="white", fontsize=10, xy=(1.00, 0.01), xycoords="axes fraction", verticalalignment="bottom", horizontalalignment="right" + ) + unused = np.logical_not(used) + ii, jj = np.indices(shape) + for i, j in zip(ii[unused], jj[unused]): + fig.delaxes(ax[i][j]) # fig.subplots_adjust(hspace=0.01, wspace=0.01, right=1.02) - fig.colorbar(im, ax=ax, location="right", shrink=0.75, aspect=50, pad=0.025, label=r"Flux [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") + fig.colorbar(im, ax=ax, location="right", shrink=0.75, aspect=50, pad=0.025, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") if savename is not None: # fig.suptitle(savename) if savename[-4:] not in [".png", ".jpg", ".pdf"]: savename += ".pdf" - fig.savefig(path_join(plots_folder, savename), bbox_inches="tight") + fig.savefig(path_join(plots_folder, savename), bbox_inches="tight", dpi=150, facecolor="None") plt.show() return 0 @@ -183,9 +196,9 @@ def plot_Stokes(Stokes, savename=None, plots_folder=""): # Plot figure plt.rcParams.update({"font.size": 14}) - ratiox = max(int(stkI.shape[1]/stkI.shape[0]),1) - ratioy = max(int(stkI.shape[0]/stkI.shape[1]),1) - fig, (axI, axQ, axU) = plt.subplots(ncols=3, figsize=(15*ratiox, 6*ratioy), subplot_kw=dict(projection=wcs)) + ratiox = max(int(stkI.shape[1] / stkI.shape[0]), 1) + ratioy = max(int(stkI.shape[0] / stkI.shape[1]), 1) + fig, (axI, axQ, axU) = plt.subplots(ncols=3, figsize=(15 * ratiox, 6 * ratioy), subplot_kw=dict(projection=wcs)) fig.subplots_adjust(hspace=0, wspace=0.50, bottom=0.01, top=0.99, left=0.07, right=0.97) fig.suptitle("I, Q, U Stokes parameters") @@ -207,8 +220,7 @@ def plot_Stokes(Stokes, savename=None, plots_folder=""): savename += "_IQU.pdf" else: savename = savename[:-4] + "_IQU" + savename[-4:] - fig.savefig(path_join(plots_folder, savename), bbox_inches="tight") - plt.show() + fig.savefig(path_join(plots_folder, savename), bbox_inches="tight", dpi=150, facecolor="None") return 0 @@ -216,14 +228,17 @@ def polarization_map( Stokes, data_mask=None, rectangle=None, - SNRp_cut=3.0, - SNRi_cut=3.0, + P_cut=0.99, + SNRi_cut=1.0, flux_lim=None, step_vec=1, - scale_vec=2.0, + scale_vec=3.0, savename=None, plots_folder="", display="default", + fig=None, + ax=None, + **kwargs, ): """ Plots polarization map from Stokes HDUList. @@ -236,9 +251,9 @@ def polarization_map( Array of parameters for matplotlib.patches.Rectangle objects that will be displayed on each output image. If None, no rectangle displayed. Defaults to None. - SNRp_cut : float, optional + P_cut : float, optional Cut that should be applied to the signal-to-noise ratio on P. - Any SNR < SNRp_cut won't be displayed. + Any SNR < P_cut won't be displayed. Defaults to 3. SNRi_cut : float, optional Cut that should be applied to the signal-to-noise ratio on I. @@ -255,6 +270,7 @@ def polarization_map( scale_vec : float, optional Pixel length of displayed 100% polarization vector. If scale_vec = 2, a vector of 50% polarization will be 1 pixel wide. + If scale_vec = 0, all polarization vectors will be displayed at full length. Defaults to 2. savename : str, optional Name of the figure the map should be saved to. If None, the map won't @@ -276,15 +292,24 @@ def polarization_map( """ # Get data stkI = Stokes["I_stokes"].data.copy() + stkQ = Stokes["Q_stokes"].data.copy() + stkU = Stokes["U_stokes"].data.copy() stk_cov = Stokes["IQU_cov_matrix"].data.copy() pol = Stokes["Pol_deg_debiased"].data.copy() pol_err = Stokes["Pol_deg_err"].data.copy() pang = Stokes["Pol_ang"].data.copy() - try: - if data_mask is None: + if data_mask is None: + try: data_mask = Stokes["Data_mask"].data.astype(bool).copy() - except KeyError: - data_mask = np.ones(stkI.shape).astype(bool) + except KeyError: + data_mask = np.zeros(stkI.shape).astype(bool) + data_mask[stkI > 0.0] = True + + # Compute confidence level map + QN, UN, QN_ERR, UN_ERR = np.full((4, stkI.shape[0], stkI.shape[1]), np.nan) + for nflux, sflux in zip([QN, UN, QN_ERR, UN_ERR], [stkQ, stkU, np.sqrt(stk_cov[1, 1]), np.sqrt(stk_cov[2, 2])]): + nflux[stkI > 0.0] = sflux[stkI > 0.0] / stkI[stkI > 0.0] + confP = PCconf(QN, UN, QN_ERR, UN_ERR) for dataset in [stkI, pol, pol_err, pang]: dataset[np.logical_not(data_mask)] = np.nan @@ -292,9 +317,9 @@ def polarization_map( for j in range(3): stk_cov[i][j][np.logical_not(data_mask)] = np.nan + wcs = WCS(Stokes[0]).deepcopy() pivot_wav = Stokes[0].header["photplam"] convert_flux = Stokes[0].header["photflam"] - wcs = WCS(Stokes[0]).deepcopy() # Plot Stokes parameters map if display is None or display.lower() in ["default"]: @@ -302,15 +327,23 @@ def polarization_map( # Compute SNR and apply cuts poldata, pangdata = pol.copy(), pang.copy() - maskP = pol_err > 0 - SNRp = np.ones(pol.shape) * np.nan - SNRp[maskP] = pol[maskP] / pol_err[maskP] + SNRi = np.full(stkI.shape, np.nan) + SNRi[stk_cov[0, 0] > 0.0] = stkI[stk_cov[0, 0] > 0.0] / np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.0]) + maskI = np.zeros(stkI.shape, dtype=bool) + maskI[stk_cov[0, 0] > 0.0] = SNRi[stk_cov[0, 0] > 0.0] > SNRi_cut - maskI = stk_cov[0, 0] > 0 - SNRi = np.ones(stkI.shape) * np.nan - SNRi[maskI] = stkI[maskI] / np.sqrt(stk_cov[0, 0][maskI]) + SNRp = np.full(pol.shape, np.nan) + SNRp[pol_err > 0.0] = pol[pol_err > 0.0] / pol_err[pol_err > 0.0] + maskP = np.zeros(pol.shape, dtype=bool) - mask = (SNRp > SNRp_cut) * (SNRi > SNRi_cut) + if P_cut >= 1.0: + # MaskP on the signal-to-noise value + maskP[pol_err > 0.0] = SNRp[pol_err > 0.0] > P_cut + else: + # MaskP on the confidence value + maskP = confP > P_cut + + mask = np.logical_and(maskI, maskP) poldata[np.logical_not(mask)] = np.nan pangdata[np.logical_not(mask)] = np.nan @@ -324,10 +357,13 @@ def polarization_map( # Plot the map plt.rcParams.update({"font.size": 14}) plt.rcdefaults() - ratiox = max(int(stkI.shape[1]/(stkI.shape[0])),1) - ratioy = max(int((stkI.shape[0])/stkI.shape[1]),1) - fig, ax = plt.subplots(figsize=(6*ratiox, 6*ratioy), layout="compressed", subplot_kw=dict(projection=wcs)) - ax.set(aspect="equal", fc="k", ylim=[-stkI.shape[0]*0.10,stkI.shape[0]*1.15]) + if fig is None: + ratiox = max(int(stkI.shape[1] / (stkI.shape[0])), 1) + ratioy = max(int((stkI.shape[0]) / stkI.shape[1]), 1) + fig = plt.figure(figsize=(7 * ratiox, 7 * ratioy), layout="constrained") + if ax is None: + ax = fig.add_subplot(111, projection=wcs) + ax.set(aspect="equal", fc="k", ylim=[-stkI.shape[0] * 0.01, stkI.shape[0] * 1.01]) # fig.subplots_adjust(hspace=0, wspace=0, left=0.102, right=1.02) # ax.coords.grid(True, color='white', ls='dotted', alpha=0.5) @@ -336,7 +372,48 @@ def polarization_map( ax.coords[0].set_ticklabel_position("t") ax.set_ylabel("Declination (J2000)", labelpad=-1) - if display.lower() in ["intensity"]: + vmin, vmax = 0.0, stkI.max() * convert_flux + for key, value in [ + ["cmap", [["cmap", "inferno"]]], + ["width", [["width", 0.5]]], + ["linewidth", [["linewidth", 0.3]]], + ]: + try: + _ = kwargs[key] + except KeyError: + for key_i, val_i in value: + kwargs[key_i] = val_i + if kwargs["cmap"] in [ + "inferno", + "magma", + "Greys_r", + "binary_r", + "gist_yarg_r", + "gist_gray", + "gray", + "bone", + "pink", + "hot", + "afmhot", + "gist_heat", + "copper", + "gist_earth", + "gist_stern", + "gnuplot", + "gnuplot2", + "CMRmap", + "cubehelix", + "nipy_spectral", + "gist_ncar", + "viridis", + ]: + ax.set_facecolor("black") + font_color = "white" + else: + ax.set_facecolor("white") + font_color = "black" + + if display.lower() in ["i", "intensity"]: # If no display selected, show intensity map display = "i" if flux_lim is None: @@ -346,43 +423,46 @@ def polarization_map( vmin, vmax = 1.0 / 2.0 * np.median(np.sqrt(stk_cov[0, 0][stkI > 0.0]) * convert_flux), np.max(stkI[stkI > 0.0] * convert_flux) else: vmin, vmax = flux_lim - im = ax.imshow(stkI * convert_flux, norm=LogNorm(vmin, vmax), aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(stkI * convert_flux, norm=LogNorm(vmin, vmax), aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=30, shrink=0.75, pad=0.025, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") levelsI = np.array([0.8, 2.0, 5.0, 10.0, 20.0, 50.0]) / 100.0 * vmax - print("Total flux contour levels : ", levelsI) + print("Flux density contour levels : ", levelsI) ax.contour(stkI * convert_flux, levels=levelsI, colors="grey", linewidths=0.5) - elif display.lower() in ["pol_flux"]: + elif display.lower() in ["pf", "pol_flux"]: # Display polarization flux display = "pf" if flux_lim is None: if mask.sum() > 0.0: vmin, vmax = 1.0 / 2.0 * np.median(np.sqrt(stk_cov[0, 0][mask]) * convert_flux), np.max(stkI[stkI > 0.0] * convert_flux) + pfmax = (stkI[mask] * pol[mask] * convert_flux).max() else: vmin, vmax = 1.0 / 2.0 * np.median(np.sqrt(stk_cov[0, 0][stkI > 0.0]) * convert_flux), np.max(stkI[stkI > 0.0] * convert_flux) + pfmax = (stkI[stkI > 0.0] * pol[stkI > 0.0] * convert_flux).max() else: vmin, vmax = flux_lim - im = ax.imshow(stkI * convert_flux * pol, norm=LogNorm(vmin, vmax), aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(stkI * convert_flux * pol, norm=LogNorm(vmin, vmax), aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") - levelsPf = np.linspace(vmax * 0.01, vmax * 0.99, 10) - print("Polarized flux contour levels : ", levelsPf) + # levelsPf = np.linspace(0.0175, 0.50, 5) * pfmax + levelsPf = np.array([1.73, 13.0, 33.0, 66.0]) / 100.0 * pfmax + print("Polarized flux density contour levels : ", levelsPf) ax.contour(stkI * convert_flux * pol, levels=levelsPf, colors="grey", linewidths=0.5) elif display.lower() in ["p", "pol", "pol_deg"]: # Display polarization degree map display = "p" vmin, vmax = 0.0, 100.0 - im = ax.imshow(pol * 100.0, vmin=vmin, vmax=vmax, aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(pol * 100.0, vmin=vmin, vmax=vmax, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$P$ [%]") elif display.lower() in ["pa", "pang", "pol_ang"]: # Display polarization degree map display = "pa" vmin, vmax = 0.0, 180.0 - im = ax.imshow(princ_angle(pang), vmin=vmin, vmax=vmax, aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(princ_angle(pang), vmin=vmin, vmax=vmax, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$\theta_P$ [°]") elif display.lower() in ["s_p", "pol_err", "pol_deg_err"]: # Display polarization degree error map display = "s_p" - if (SNRp > SNRp_cut).any(): - vmin, vmax = 0.0, np.max([pol_err[SNRp > SNRp_cut].max(), 1.0]) * 100.0 + if (SNRp > P_cut).any(): + vmin, vmax = 0.0, np.max([pol_err[SNRp > P_cut].max(), 1.0]) * 100.0 im = ax.imshow(pol_err * 100.0, vmin=vmin, vmax=vmax, aspect="equal", cmap="inferno_r", alpha=1.0) else: vmin, vmax = 0.0, 100.0 @@ -398,39 +478,48 @@ def polarization_map( ) im = ax.imshow(np.sqrt(stk_cov[0, 0]) * convert_flux, norm=LogNorm(vmin, vmax), aspect="equal", cmap="inferno_r", alpha=1.0) else: - im = ax.imshow(np.sqrt(stk_cov[0, 0]) * convert_flux, aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(np.sqrt(stk_cov[0, 0]) * convert_flux, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$\sigma_I$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") - elif display.lower() in ["snr", "snri"]: + elif display.lower() in ["snri"]: # Display I_stokes signal-to-noise map display = "snri" vmin, vmax = 0.0, np.max(SNRi[np.isfinite(SNRi)]) if vmax * 0.99 > SNRi_cut: - im = ax.imshow(SNRi, vmin=vmin, vmax=vmax, aspect="equal", cmap="inferno", alpha=1.0) - levelsSNRi = np.linspace(SNRi_cut, vmax * 0.99, 5) + im = ax.imshow(SNRi, vmin=vmin, vmax=vmax, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) + levelsSNRi = np.linspace(SNRi_cut, vmax * 0.99, 5).astype(int) print("SNRi contour levels : ", levelsSNRi) ax.contour(SNRi, levels=levelsSNRi, colors="grey", linewidths=0.5) else: - im = ax.imshow(SNRi, aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(SNRi, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$I_{Stokes}/\sigma_{I}$") - elif display.lower() in ["snrp"]: + elif display.lower() in ["snr", "snrp"]: # Display polarization degree signal-to-noise map display = "snrp" vmin, vmax = 0.0, np.max(SNRp[np.isfinite(SNRp)]) - if vmax * 0.99 > SNRp_cut: - im = ax.imshow(SNRp, vmin=vmin, vmax=vmax, aspect="equal", cmap="inferno", alpha=1.0) - levelsSNRp = np.linspace(SNRp_cut, vmax * 0.99, 5) + if vmax * 0.99 > P_cut: + im = ax.imshow(SNRp, vmin=vmin, vmax=vmax, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) + levelsSNRp = np.linspace(P_cut, vmax * 0.99, 5).astype(int) print("SNRp contour levels : ", levelsSNRp) ax.contour(SNRp, levels=levelsSNRp, colors="grey", linewidths=0.5) else: - im = ax.imshow(SNRp, aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(SNRp, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$P/\sigma_{P}$") + elif display.lower() in ["conf", "confp"]: + # Display polarization degree signal-to-noise map + display = "confp" + vmin, vmax = 0.0, 1.0 + im = ax.imshow(confP, vmin=vmin, vmax=vmax, aspect="equal", cmap=kwargs["cmap"], alpha=1.0) + levelsconfp = np.array([0.500, 0.900, 0.990, 0.999]) + print("confp contour levels : ", levelsconfp) + ax.contour(confP, levels=levelsconfp, colors="grey", linewidths=0.5) + fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$Conf_{P}$") else: # Defaults to intensity map if mask.sum() > 0.0: vmin, vmax = 1.0 * np.mean(np.sqrt(stk_cov[0, 0][mask]) * convert_flux), np.max(stkI[stkI > 0.0] * convert_flux) else: vmin, vmax = 1.0 * np.mean(np.sqrt(stk_cov[0, 0][stkI > 0.0]) * convert_flux), np.max(stkI[stkI > 0.0] * convert_flux) - im = ax.imshow(stkI * convert_flux, norm=LogNorm(vmin, vmax), aspect="equal", cmap="inferno", alpha=1.0) + im = ax.imshow(stkI * convert_flux, norm=LogNorm(vmin, vmax), aspect="equal", cmap=kwargs["cmap"], alpha=1.0) fig.colorbar(im, ax=ax, aspect=50, shrink=0.75, pad=0.025, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA$]") # Get integrated values from header @@ -444,27 +533,27 @@ def polarization_map( plt.rcParams.update({"font.size": 10}) px_size = wcs.wcs.get_cdelt()[0] * 3600.0 - px_sc = AnchoredSizeBar(ax.transData, 1.0 / px_size, "1 arcsec", 3, pad=0.25, sep=5, borderpad=0.25, frameon=False, size_vertical=0.005, color="w") + px_sc = AnchoredSizeBar(ax.transData, 1.0 / px_size, "1 arcsec", 3, pad=0.25, sep=5, borderpad=0.25, frameon=False, size_vertical=0.005, color=font_color) north_dir = AnchoredDirectionArrows( ax.transAxes, "E", "N", - length=-0.05, - fontsize=0.02, + length=-0.07, + fontsize=0.03, loc=1, - aspect_ratio=-(stkI.shape[1]/(stkI.shape[0]*1.25)), + aspect_ratio=-(stkI.shape[1] / (stkI.shape[0] * 1.25)), sep_y=0.01, sep_x=0.01, back_length=0.0, head_length=10.0, head_width=10.0, angle=-Stokes[0].header["orientat"], - text_props={"ec": "k", "fc": "w", "alpha": 1, "lw": 0.4}, - arrow_props={"ec": "k", "fc": "w", "alpha": 1, "lw": 1}, + text_props={"ec": "k", "fc": font_color, "alpha": 1, "lw": 0.4}, + arrow_props={"ec": "k", "fc": font_color, "alpha": 1, "lw": 1}, ) - if display.lower() in ["i", "s_i", "snri", "pf", "p", "pa", "s_p", "snrp"]: - if step_vec == 0: + if display.lower() in ["i", "s_i", "snri", "pf", "p", "pa", "s_p", "snrp", "confp"] and step_vec != 0: + if scale_vec == -1: poldata[np.isfinite(poldata)] = 1.0 / 2.0 step_vec = 1 scale_vec = 2.0 @@ -483,12 +572,14 @@ def polarization_map( headwidth=0.0, headlength=0.0, headaxislength=0.0, - width=0.5, - linewidth=0.75, + width=kwargs["width"], + linewidth=kwargs["linewidth"], color="w", edgecolor="k", ) - pol_sc = AnchoredSizeBar(ax.transData, scale_vec, r"$P$= 100 %", 4, pad=0.25, sep=5, borderpad=0.25, frameon=False, size_vertical=0.005, color="w") + pol_sc = AnchoredSizeBar( + ax.transData, scale_vec, r"$P$= 100 %", 4, pad=0.25, sep=5, borderpad=0.25, frameon=False, size_vertical=0.005, color=font_color + ) ax.add_artist(pol_sc) ax.add_artist(px_sc) @@ -534,9 +625,8 @@ def polarization_map( if savename is not None: if savename[-4:] not in [".png", ".jpg", ".pdf"]: savename += ".pdf" - fig.savefig(path_join(plots_folder, savename), bbox_inches="tight", dpi=200) + fig.savefig(path_join(plots_folder, savename), bbox_inches="tight", dpi=300, facecolor="None") - plt.show() return fig, ax @@ -604,7 +694,7 @@ class align_maps(object): except KeyError: for key_i, val_i in value: kwargs[key_i] = val_i - self.map_ax.imshow(self.map_data * self.map_convert, aspect="equal", **kwargs) + self.im = self.map_ax.imshow(self.map_data * self.map_convert, aspect="equal", **kwargs) if kwargs["cmap"] in [ "inferno", @@ -670,7 +760,7 @@ class align_maps(object): length=-0.08, fontsize=0.03, loc=1, - aspect_ratio=-(self.map_data.shape[1]/self.map_data.shape[0]), + aspect_ratio=-(self.map_ax.get_xbound()[1] - self.map_ax.get_xbound()[0]) / (self.map_ax.get_ybound()[1] - self.map_ax.get_ybound()[0]), sep_y=0.01, sep_x=0.01, angle=-self.map_header["orientat"], @@ -693,7 +783,7 @@ class align_maps(object): except KeyError: for key_i, val_i in value: other_kwargs[key_i] = val_i - self.other_ax.imshow(self.other_data * self.other_convert, aspect="equal", **other_kwargs) + self.other_im = self.other_ax.imshow(self.other_data * self.other_convert, aspect="equal", **other_kwargs) px_size2 = self.other_wcs.wcs.get_cdelt()[0] * 3600.0 px_sc2 = AnchoredSizeBar( @@ -722,13 +812,13 @@ class align_maps(object): ) if "ORIENTAT" in list(self.other_header.keys()): north_dir2 = AnchoredDirectionArrows( - self.map_ax.transAxes, + self.other_ax.transAxes, "E", "N", length=-0.08, fontsize=0.03, loc=1, - aspect_ratio=-(self.other_data.shape[1]/self.other_data.shape[0]), + aspect_ratio=-(self.other_ax.get_xbound()[1] - self.other_ax.get_xbound()[0]) / (self.other_ax.get_ybound()[1] - self.other_ax.get_ybound()[0]), sep_y=0.01, sep_x=0.01, angle=-self.other_header["orientat"], @@ -765,14 +855,14 @@ class align_maps(object): x = event.xdata y = event.ydata - self.cr_map.set(data=[x, y]) + self.cr_map.set(data=[[x], [y]]) self.fig_align.canvas.draw_idle() if (event.inaxes is not None) and (event.inaxes == self.other_ax): x = event.xdata y = event.ydata - self.cr_other.set(data=[x, y]) + self.cr_other.set(data=[[x], [y]]) self.fig_align.canvas.draw_idle() def reset_align(self, event): @@ -843,16 +933,23 @@ class overplot_radio(align_maps): Inherit from class align_maps in order to get the same WCS on both maps. """ - def overplot(self, levels=None, SNRp_cut=3.0, SNRi_cut=3.0, scale_vec=2, savename=None, **kwargs): + def overplot(self, levels=None, P_cut=0.99, SNRi_cut=1.0, scale_vec=2, savename=None, **kwargs): self.Stokes_UV = self.map self.wcs_UV = self.map_wcs # Get Data obj = self.Stokes_UV[0].header["targname"] stkI = self.Stokes_UV["I_STOKES"].data + stkQ = self.Stokes_UV["Q_STOKES"].data + stkU = self.Stokes_UV["U_STOKES"].data stk_cov = self.Stokes_UV["IQU_COV_MATRIX"].data pol = deepcopy(self.Stokes_UV["POL_DEG_DEBIASED"].data) pol_err = self.Stokes_UV["POL_DEG_ERR"].data pang = self.Stokes_UV["POL_ANG"].data + # Compute confidence level map + QN, UN, QN_ERR, UN_ERR = np.full((4, stkI.shape[0], stkI.shape[1]), np.nan) + for nflux, sflux in zip([QN, UN, QN_ERR, UN_ERR], [stkQ, stkU, np.sqrt(stk_cov[1, 1]), np.sqrt(stk_cov[2, 2])]): + nflux[stkI > 0.0] = sflux[stkI > 0.0] / stkI[stkI > 0.0] + confP = PCconf(QN, UN, QN_ERR, UN_ERR) other_data = self.other_data self.other_convert = 1.0 @@ -864,12 +961,17 @@ class overplot_radio(align_maps): self.map_convert = self.Stokes_UV[0].header["photflam"] # Compute SNR and apply cuts - pol[pol == 0.0] = np.nan - SNRp = pol / pol_err - SNRp[np.isnan(SNRp)] = 0.0 - pol[SNRp < SNRp_cut] = np.nan - SNRi = stkI / np.sqrt(stk_cov[0, 0]) - SNRi[np.isnan(SNRi)] = 0.0 + maskP = np.zeros(pol.shape, dtype=bool) + if P_cut >= 1.0: + SNRp = np.zeros(pol.shape) + SNRp[pol_err > 0.0] = pol[pol_err > 0.0] / pol_err[pol_err > 0.0] + maskP = SNRp > P_cut + else: + maskP = confP > P_cut + pol[np.logical_not(maskP)] = np.nan + + SNRi = np.zeros(stkI.shape) + SNRi[stk_cov[0, 0] > 0.0] = stkI[stk_cov[0, 0] > 0.0] / np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.0]) pol[SNRi < SNRi_cut] = np.nan plt.rcParams.update({"font.size": 16}) @@ -992,7 +1094,7 @@ class overplot_radio(align_maps): length=-0.08, fontsize=0.03, loc=1, - aspect_ratio=-(stkI.shape[1]/stkI.shape[0]), + aspect_ratio=-(stkI.shape[1] / stkI.shape[0]), sep_y=0.01, sep_x=0.01, angle=-self.Stokes_UV[0].header["orientat"], @@ -1025,7 +1127,7 @@ class overplot_radio(align_maps): (0, 0), (0, 1), arrowstyle="-", fc="w", ec="k", lw=2 ) labels.append("{0:s} contour".format(self.other_observer)) - handles.append(Rectangle((0, 0), 1, 1, fill=False, lw=2, ec=other_cont.collections[0].get_edgecolor()[0])) + handles.append(Rectangle((0, 0), 1, 1, fill=False, lw=2, ec=other_cont.get_edgecolor()[0])) self.legend = self.ax_overplot.legend( handles=handles, labels=labels, bbox_to_anchor=(0.0, 1.02, 1.0, 0.102), loc="lower left", mode="expand", borderaxespad=0.0 ) @@ -1033,14 +1135,14 @@ class overplot_radio(align_maps): if savename is not None: if savename[-4:] not in [".png", ".jpg", ".pdf"]: savename += ".pdf" - self.fig_overplot.savefig(savename, bbox_inches="tight", dpi=200) + self.fig_overplot.savefig(savename, bbox_inches="tight", dpi=150, facecolor="None") self.fig_overplot.canvas.draw() - def plot(self, levels=None, SNRp_cut=3.0, SNRi_cut=3.0, savename=None, **kwargs) -> None: + def plot(self, levels=None, P_cut=0.99, SNRi_cut=1.0, savename=None, **kwargs) -> None: while not self.aligned: self.align() - self.overplot(levels=levels, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, savename=savename, **kwargs) + self.overplot(levels=levels, P_cut=P_cut, SNRi_cut=SNRi_cut, savename=savename, **kwargs) plt.show(block=True) @@ -1050,16 +1152,23 @@ class overplot_chandra(align_maps): Inherit from class align_maps in order to get the same WCS on both maps. """ - def overplot(self, levels=None, SNRp_cut=3.0, SNRi_cut=3.0, scale_vec=2, zoom=1, savename=None, **kwargs): + def overplot(self, levels=None, P_cut=0.99, SNRi_cut=1.0, scale_vec=2, zoom=1, savename=None, **kwargs): self.Stokes_UV = self.map self.wcs_UV = self.map_wcs # Get Data obj = self.Stokes_UV[0].header["targname"] stkI = self.Stokes_UV["I_STOKES"].data + stkQ = self.Stokes_UV["Q_STOKES"].data + stkU = self.Stokes_UV["U_STOKES"].data stk_cov = self.Stokes_UV["IQU_COV_MATRIX"].data pol = deepcopy(self.Stokes_UV["POL_DEG_DEBIASED"].data) pol_err = self.Stokes_UV["POL_DEG_ERR"].data pang = self.Stokes_UV["POL_ANG"].data + # Compute confidence level map + QN, UN, QN_ERR, UN_ERR = np.full((4, stkI.shape[0], stkI.shape[1]), np.nan) + for nflux, sflux in zip([QN, UN, QN_ERR, UN_ERR], [stkQ, stkU, np.sqrt(stk_cov[1, 1]), np.sqrt(stk_cov[2, 2])]): + nflux[stkI > 0.0] = sflux[stkI > 0.0] / stkI[stkI > 0.0] + confP = PCconf(QN, UN, QN_ERR, UN_ERR) other_data = deepcopy(self.other_data) other_wcs = self.other_wcs.deepcopy() @@ -1070,12 +1179,17 @@ class overplot_chandra(align_maps): self.other_unit = "counts" # Compute SNR and apply cuts - pol[pol == 0.0] = np.nan - SNRp = pol / pol_err - SNRp[np.isnan(SNRp)] = 0.0 - pol[SNRp < SNRp_cut] = np.nan - SNRi = stkI / np.sqrt(stk_cov[0, 0]) - SNRi[np.isnan(SNRi)] = 0.0 + maskP = np.zeros(pol.shape, dtype=bool) + if P_cut >= 1.0: + SNRp = np.zeros(pol.shape) + SNRp[pol_err > 0.0] = pol[pol_err > 0.0] / pol_err[pol_err > 0.0] + maskP = SNRp > P_cut + else: + maskP = confP > P_cut + pol[np.logical_not(maskP)] = np.nan + + SNRi = np.zeros(stkI.shape) + SNRi[stk_cov[0, 0] > 0.0] = stkI[stk_cov[0, 0] > 0.0] / np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.0]) pol[SNRi < SNRi_cut] = np.nan plt.rcParams.update({"font.size": 16}) @@ -1194,7 +1308,7 @@ class overplot_chandra(align_maps): length=-0.08, fontsize=0.03, loc=1, - aspect_ratio=-(stkI.shape[1]/stkI.shape[0]), + aspect_ratio=-(stkI.shape[1] / stkI.shape[0]), sep_y=0.01, sep_x=0.01, angle=-self.Stokes_UV[0].header["orientat"], @@ -1224,7 +1338,7 @@ class overplot_chandra(align_maps): (0, 0), (0, 1), arrowstyle="-", fc="w", ec="k", lw=2 ) labels.append("{0:s} contour in counts".format(self.other_observer)) - handles.append(Rectangle((0, 0), 1, 1, fill=False, lw=2, ec=other_cont.collections[0].get_edgecolor()[0])) + handles.append(Rectangle((0, 0), 1, 1, fill=False, lw=2, ec=other_cont.get_edgecolor()[0])) self.legend = self.ax_overplot.legend( handles=handles, labels=labels, bbox_to_anchor=(0.0, 1.02, 1.0, 0.102), loc="lower left", mode="expand", borderaxespad=0.0 ) @@ -1232,14 +1346,14 @@ class overplot_chandra(align_maps): if savename is not None: if savename[-4:] not in [".png", ".jpg", ".pdf"]: savename += ".pdf" - self.fig_overplot.savefig(savename, bbox_inches="tight", dpi=200) + self.fig_overplot.savefig(savename, bbox_inches="tight", dpi=150, facecolor="None") self.fig_overplot.canvas.draw() - def plot(self, levels=None, SNRp_cut=3.0, SNRi_cut=3.0, zoom=1, savename=None, **kwargs) -> None: + def plot(self, levels=None, P_cut=0.99, SNRi_cut=1.0, zoom=1, savename=None, **kwargs) -> None: while not self.aligned: self.align() - self.overplot(levels=levels, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, zoom=zoom, savename=savename, **kwargs) + self.overplot(levels=levels, P_cut=P_cut, SNRi_cut=SNRi_cut, zoom=zoom, savename=savename, **kwargs) plt.show(block=True) @@ -1249,44 +1363,57 @@ class overplot_pol(align_maps): Inherit from class align_maps in order to get the same WCS on both maps. """ - def overplot(self, levels=None, SNRp_cut=3.0, SNRi_cut=3.0, scale_vec=2.0, savename=None, **kwargs): + def overplot(self, levels=None, P_cut=0.99, SNRi_cut=1.0, step_vec=1, scale_vec=2.0, disptype="i", savename=None, **kwargs): self.Stokes_UV = self.map self.wcs_UV = self.map_wcs # Get Data obj = self.Stokes_UV[0].header["targname"] stkI = self.Stokes_UV["I_STOKES"].data + stkQ = self.Stokes_UV["Q_STOKES"].data + stkU = self.Stokes_UV["U_STOKES"].data stk_cov = self.Stokes_UV["IQU_COV_MATRIX"].data pol = deepcopy(self.Stokes_UV["POL_DEG_DEBIASED"].data) pol_err = self.Stokes_UV["POL_DEG_ERR"].data pang = self.Stokes_UV["POL_ANG"].data + # Compute confidence level map + QN, UN, QN_ERR, UN_ERR = np.full((4, stkI.shape[0], stkI.shape[1]), np.nan) + for nflux, sflux in zip([QN, UN, QN_ERR, UN_ERR], [stkQ, stkU, np.sqrt(stk_cov[1, 1]), np.sqrt(stk_cov[2, 2])]): + nflux[stkI > 0.0] = sflux[stkI > 0.0] / stkI[stkI > 0.0] + confP = PCconf(QN, UN, QN_ERR, UN_ERR) other_data = self.other_data # Compute SNR and apply cuts - pol[pol == 0.0] = np.nan - SNRp = pol / pol_err - SNRp[np.isnan(SNRp)] = 0.0 - pol[SNRp < SNRp_cut] = np.nan - SNRi = stkI / np.sqrt(stk_cov[0, 0]) - SNRi[np.isnan(SNRi)] = 0.0 + maskP = np.zeros(pol.shape, dtype=bool) + if P_cut >= 1.0: + SNRp = np.zeros(pol.shape) + SNRp[pol_err > 0.0] = pol[pol_err > 0.0] / pol_err[pol_err > 0.0] + maskP = SNRp > P_cut + else: + maskP = confP > P_cut + pol[np.logical_not(maskP)] = np.nan + + SNRi = np.zeros(stkI.shape) + SNRi[stk_cov[0, 0] > 0.0] = stkI[stk_cov[0, 0] > 0.0] / np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.0]) pol[SNRi < SNRi_cut] = np.nan plt.rcParams.update({"font.size": 16}) - self.fig_overplot, self.ax_overplot = plt.subplots(figsize=(11, 10), subplot_kw=dict(projection=self.other_wcs)) + ratiox = max(int(stkI.shape[1] / stkI.shape[0]), 1) + ratioy = max(int(stkI.shape[0] / stkI.shape[1]), 1) + self.px_scale = self.wcs_UV.wcs.get_cdelt()[0] / self.other_wcs.wcs.get_cdelt()[0] + self.fig_overplot, self.ax_overplot = plt.subplots(figsize=(10 * ratiox, 12 * ratioy), subplot_kw=dict(projection=self.other_wcs)) self.fig_overplot.subplots_adjust(hspace=0, wspace=0, bottom=0.1, left=0.1, top=0.80, right=1.02) self.ax_overplot.set_xlabel(label="Right Ascension (J2000)") self.ax_overplot.set_ylabel(label="Declination (J2000)", labelpad=-1) - self.fig_overplot.suptitle( - "{0:s} observation from {1:s} overplotted with polarization vectors and Stokes I contours from {2:s}".format( - obj, self.other_observer, self.map_observer - ), - wrap=True, - ) - # Display "other" intensity map vmin, vmax = other_data[other_data > 0.0].max() / 1e3 * self.other_convert, other_data[other_data > 0.0].max() * self.other_convert - for key, value in [["cmap", [["cmap", "inferno"]]], ["norm", [["vmin", vmin], ["vmax", vmax]]]]: + for key, value in [ + ["cmap", [["cmap", "inferno"]]], + ["norm", [["vmin", vmin], ["vmax", vmax]]], + ["width", [["width", 0.5 * self.px_scale]]], + ["linewidth", [["linewidth", 0.3 * self.px_scale]]], + ]: try: _ = kwargs[key] except KeyError: @@ -1321,66 +1448,93 @@ class overplot_pol(align_maps): else: self.ax_overplot.set_facecolor("white") font_color = "black" - self.im = self.ax_overplot.imshow(other_data * self.other_convert, alpha=1.0, label="{0:s} observation".format(self.other_observer), **kwargs) + self.im = self.ax_overplot.imshow( + other_data * self.other_convert, alpha=1.0, label="{0:s} observation".format(self.other_observer), cmap=kwargs["cmap"], norm=kwargs["norm"] + ) self.cbar = self.fig_overplot.colorbar( self.im, ax=self.ax_overplot, aspect=80, shrink=0.75, pad=0.025, label=r"$F_{{\lambda}}$ [{0:s}]".format(self.other_unit) ) # Display full size polarization vectors - if scale_vec is None: - self.scale_vec = 2.0 - pol[np.isfinite(pol)] = 1.0 / 2.0 - else: - self.scale_vec = scale_vec - step_vec = 1 - self.X, self.Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0])) - self.U, self.V = pol * np.cos(np.pi / 2.0 + pang * np.pi / 180.0), pol * np.sin(np.pi / 2.0 + pang * np.pi / 180.0) - self.Q = self.ax_overplot.quiver( - self.X[::step_vec, ::step_vec], - self.Y[::step_vec, ::step_vec], - self.U[::step_vec, ::step_vec], - self.V[::step_vec, ::step_vec], - units="xy", - angles="uv", - scale=1. / self.scale_vec, - scale_units="xy", - pivot="mid", - headwidth=0.0, - headlength=0.0, - headaxislength=0.0, - width=0.5, - linewidth=0.75, - color="white", - edgecolor="black", - transform=self.ax_overplot.get_transform(self.wcs_UV), - label="{0:s} polarization map".format(self.map_observer), - ) + vecstr = "" + if step_vec != 0: + vecstr = "polarization vectors " + if scale_vec is None: + self.scale_vec = 2.0 * self.px_scale + pol[np.isfinite(pol)] = 1.0 / 2.0 + else: + self.scale_vec = scale_vec * self.px_scale + self.X, self.Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0])) + self.U, self.V = pol * np.cos(np.pi / 2.0 + pang * np.pi / 180.0), pol * np.sin(np.pi / 2.0 + pang * np.pi / 180.0) + self.Q = self.ax_overplot.quiver( + self.X[::step_vec, ::step_vec], + self.Y[::step_vec, ::step_vec], + self.U[::step_vec, ::step_vec], + self.V[::step_vec, ::step_vec], + units="xy", + angles="uv", + scale=1.0 / self.scale_vec, + scale_units="xy", + pivot="mid", + headwidth=0.0, + headlength=0.0, + headaxislength=0.0, + width=kwargs["width"], + linewidth=kwargs["linewidth"], + color="white", + edgecolor="black", + transform=self.ax_overplot.get_transform(self.wcs_UV), + label="{0:s} polarization map".format(self.map_observer), + ) - # Display Stokes I as contours - if levels is None: - levels = np.array([2.0, 5.0, 10.0, 20.0, 90.0]) / 100.0 * np.max(stkI[stkI > 0.0]) * self.map_convert - cont_stkI = self.ax_overplot.contour( - stkI * self.map_convert, levels=levels, colors="grey", alpha=0.75, transform=self.ax_overplot.get_transform(self.wcs_UV) - ) - # self.ax_overplot.clabel(cont_stkI, inline=True, fontsize=5) + # Display Stokes as contours + disptypestr = "" + if disptype.lower() == "p": + disptypestr = "polarization degree" + if levels is None: + levels = np.array([2.0, 5.0, 10.0, 20.0, 90.0]) / 100.0 * np.max(pol[stkI > 0.0]) + cont_stk = self.ax_overplot.contour( + pol * 100.0, levels=levels * 100.0, colors="grey", alpha=0.75, transform=self.ax_overplot.get_transform(self.wcs_UV) + ) + if disptype.lower() == "pf": + disptypestr = "polarized flux" + if levels is None: + levels = np.array([2.0, 5.0, 10.0, 20.0, 90.0]) / 100.0 * np.max(stkI[stkI > 0.0] * pol[stkI > 0.0]) * self.map_convert + cont_stk = self.ax_overplot.contour( + stkI * pol * self.map_convert, levels=levels, colors="grey", alpha=0.75, transform=self.ax_overplot.get_transform(self.wcs_UV) + ) + if disptype.lower() == "snri": + disptypestr = "Stokes I signal-to-noise" + if levels is None: + levels = np.array([2.0, 5.0, 10.0, 20.0, 90.0]) / 100.0 * np.max(SNRi[stk_cov[0, 0] > 0.0]) + cont_stk = self.ax_overplot.contour(SNRi, levels=levels, colors="grey", alpha=0.75, transform=self.ax_overplot.get_transform(self.wcs_UV)) + else: # default to intensity contours + disptypestr = "Stokes I" + if levels is None: + levels = np.array([2.0, 5.0, 10.0, 20.0, 90.0]) / 100.0 * np.max(stkI[stkI > 0.0]) * self.map_convert + cont_stk = self.ax_overplot.contour( + stkI * self.map_convert, levels=levels, colors="grey", alpha=0.75, transform=self.ax_overplot.get_transform(self.wcs_UV) + ) + # self.ax_overplot.clabel(cont_stk, inline=False, colors="k", fontsize=7) # Display pixel scale and North direction - fontprops = fm.FontProperties(size=16) - px_size = self.other_wcs.wcs.get_cdelt()[0] * 3600.0 - px_sc = AnchoredSizeBar( - self.ax_overplot.transData, - 1.0 / px_size, - "1 arcsec", - 3, - pad=0.5, - sep=5, - borderpad=0.5, - frameon=False, - size_vertical=0.005, - color=font_color, - fontproperties=fontprops, - ) - self.ax_overplot.add_artist(px_sc) + if step_vec != 0: + fontprops = fm.FontProperties(size=16) + px_size = self.other_wcs.wcs.get_cdelt()[0] * 3600.0 + px_sc = AnchoredSizeBar( + self.ax_overplot.transData, + 1.0 / px_size, + "1 arcsec", + 3, + pad=0.5, + sep=5, + borderpad=0.5, + frameon=False, + size_vertical=0.005, + color=font_color, + fontproperties=fontprops, + ) + self.ax_overplot.add_artist(px_sc) north_dir = AnchoredDirectionArrows( self.ax_overplot.transAxes, "E", @@ -1388,7 +1542,8 @@ class overplot_pol(align_maps): length=-0.08, fontsize=0.03, loc=1, - aspect_ratio=-(stkI.shape[1]/stkI.shape[0]), + aspect_ratio=-(self.ax_overplot.get_xbound()[1] - self.ax_overplot.get_xbound()[0]) + / (self.ax_overplot.get_ybound()[1] - self.ax_overplot.get_ybound()[0]), sep_y=0.01, sep_x=0.01, angle=-self.Stokes_UV[0].header["orientat"], @@ -1396,20 +1551,21 @@ class overplot_pol(align_maps): arrow_props={"ec": "k", "fc": "w", "alpha": 1, "lw": 0.5}, ) self.ax_overplot.add_artist(north_dir) - pol_sc = AnchoredSizeBar( - self.ax_overplot.transData, - self.scale_vec, - r"$P$= 100%", - 4, - pad=0.5, - sep=5, - borderpad=0.5, - frameon=False, - size_vertical=0.005, - color=font_color, - fontproperties=fontprops, - ) - self.ax_overplot.add_artist(pol_sc) + if step_vec != 0: + pol_sc = AnchoredSizeBar( + self.ax_overplot.transData, + self.scale_vec, + r"$P$= 100%", + 4, + pad=0.5, + sep=5, + borderpad=0.5, + frameon=False, + size_vertical=0.005, + color=font_color, + fontproperties=fontprops, + ) + self.ax_overplot.add_artist(pol_sc) (self.cr_map,) = self.ax_overplot.plot(*(self.map_wcs.celestial.wcs.crpix - (1.0, 1.0)), "r+", transform=self.ax_overplot.get_transform(self.wcs_UV)) (self.cr_other,) = self.ax_overplot.plot(*(self.other_wcs.celestial.wcs.crpix - (1.0, 1.0)), "g+") @@ -1422,47 +1578,72 @@ class overplot_pol(align_maps): self.legend_title = r"{0:s} image".format(self.other_observer) handles, labels = self.ax_overplot.get_legend_handles_labels() - handles[np.argmax([li == "{0:s} polarization map".format(self.map_observer) for li in labels])] = FancyArrowPatch( - (0, 0), (0, 1), arrowstyle="-", fc="w", ec="k", lw=2 - ) - labels.append("{0:s} Stokes I contour".format(self.map_observer)) - handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=cont_stkI.collections[0].get_edgecolor()[0])) + if step_vec != 0: + handles[np.argmax([li == "{0:s} polarization map".format(self.map_observer) for li in labels])] = FancyArrowPatch( + (0, 0), (0, 1), arrowstyle="-", fc="w", ec="k", lw=2 + ) + labels.append("{0:s} {1:s} contour".format(self.map_observer, disptypestr)) + handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=cont_stk.get_edgecolor()[0])) self.legend = self.ax_overplot.legend( handles=handles, labels=labels, bbox_to_anchor=(0.0, 1.02, 1.0, 0.102), loc="lower left", mode="expand", borderaxespad=0.0 ) + self.fig_overplot.suptitle( + "{0:s} observation from {1:s} overplotted with {2:s} contours from {3:s}".format(obj, self.other_observer, vecstr + disptypestr, self.map_observer), + wrap=True, + ) + if savename is not None: if savename[-4:] not in [".png", ".jpg", ".pdf"]: savename += ".pdf" - self.fig_overplot.savefig(savename, bbox_inches="tight", dpi=200) + self.fig_overplot.savefig(savename, bbox_inches="tight", dpi=150, facecolor="None") self.fig_overplot.canvas.draw() - def plot(self, levels=None, SNRp_cut=3.0, SNRi_cut=3.0, scale_vec=2.0, savename=None, **kwargs) -> None: + def plot(self, levels=None, P_cut=0.99, SNRi_cut=1.0, step_vec=1, scale_vec=2.0, disptype="i", savename=None, **kwargs) -> None: while not self.aligned: self.align() - self.overplot(levels=levels, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, scale_vec=scale_vec, savename=savename, **kwargs) + self.overplot(levels=levels, P_cut=P_cut, SNRi_cut=SNRi_cut, step_vec=step_vec, scale_vec=scale_vec, disptype=disptype, savename=savename, **kwargs) plt.show(block=True) def add_vector(self, position="center", pol_deg=1.0, pol_ang=0.0, **kwargs): - if position == "center": + if isinstance(position, str) and position == "center": position = np.array(self.X.shape) / 2.0 if isinstance(position, SkyCoord): position = self.other_wcs.world_to_pixel(position) u, v = pol_deg * np.cos(np.radians(pol_ang) + np.pi / 2.0), pol_deg * np.sin(np.radians(pol_ang) + np.pi / 2.0) - for key, value in [["scale", [["scale", self.scale_vec]]], ["width", [["width", 0.1]]], ["color", [["color", "k"]]]]: + for key, value in [ + ["scale", [["scale", 1.0 / self.scale_vec]]], + ["width", [["width", 0.5 * self.px_scale]]], + ["linewidth", [["linewidth", 0.3 * self.px_scale]]], + ["color", [["color", "k"]]], + ["edgecolor", [["edgecolor", "w"]]], + ]: try: _ = kwargs[key] except KeyError: for key_i, val_i in value: kwargs[key_i] = val_i + handles, labels = self.legend.legend_handles, [l.get_text() for l in self.legend.texts] new_vec = self.ax_overplot.quiver( - *position, u, v, units="xy", angles="uv", scale_units="xy", pivot="mid", headwidth=0.0, headlength=0.0, headaxislength=0.0, **kwargs + *position, + u, + v, + units="xy", + angles="uv", + scale_units="xy", + pivot="mid", + headwidth=0.0, + headlength=0.0, + headaxislength=0.0, + **kwargs, ) + handles.append(FancyArrowPatch((0, 0), (0, 1), arrowstyle="-", fc=new_vec.get_fc(), ec=new_vec.get_ec())) + labels.append(new_vec.get_label()) self.legend.remove() self.legend = self.ax_overplot.legend( - title=self.legend_title, bbox_to_anchor=(0.0, 1.02, 1.0, 0.102), loc="lower left", mode="expand", borderaxespad=0.0 + title=self.legend_title, handles=handles, labels=labels, bbox_to_anchor=(0.0, 1.02, 1.0, 0.102), loc="lower left", mode="expand", borderaxespad=0.0 ) self.fig_overplot.canvas.draw() return new_vec @@ -1481,7 +1662,7 @@ class align_pol(object): self.kwargs = kwargs - def single_plot(self, curr_map, wcs, v_lim=None, ax_lim=None, SNRp_cut=3.0, SNRi_cut=3.0, savename=None, **kwargs): + def single_plot(self, curr_map, wcs, v_lim=None, ax_lim=None, P_cut=3.0, SNRi_cut=3.0, savename=None, **kwargs): # Get data stkI = curr_map["I_STOKES"].data stk_cov = curr_map["IQU_COV_MATRIX"].data @@ -1504,7 +1685,7 @@ class align_pol(object): SNRi = np.zeros(stkI.shape) SNRi[maskI] = stkI[maskI] / np.sqrt(stk_cov[0, 0][maskI]) - mask = (SNRp > SNRp_cut) * (SNRi > SNRi_cut) * (pol >= 0.0) + mask = (SNRp > P_cut) * (SNRi > SNRi_cut) * (pol >= 0.0) pol[mask] = np.nan # Plot the map @@ -1553,7 +1734,7 @@ class align_pol(object): length=-0.08, fontsize=0.025, loc=1, - aspect_ratio=-(stkI.shape[1]/stkI.shape[0]), + aspect_ratio=-(stkI.shape[1] / stkI.shape[0]), sep_y=0.01, sep_x=0.01, back_length=0.0, @@ -1602,7 +1783,7 @@ class align_pol(object): if savename is not None: if savename[-4:] not in [".png", ".jpg", ".pdf"]: savename += ".pdf" - fig.savefig(savename, bbox_inches="tight", dpi=300) + fig.savefig(savename, bbox_inches="tight", dpi=150, facecolor="None") plt.show(block=True) return fig, ax @@ -1613,7 +1794,7 @@ class align_pol(object): self.wcs, self.wcs_other[i] = curr_align.align() self.aligned[i] = curr_align.aligned - def plot(self, SNRp_cut=3.0, SNRi_cut=3.0, savename=None, **kwargs): + def plot(self, P_cut=3.0, SNRi_cut=3.0, savename=None, **kwargs): while not self.aligned.all(): self.align() eps = 1e-35 @@ -1659,13 +1840,13 @@ class align_pol(object): ) v_lim = np.array([vmin, vmax]) - fig, ax = self.single_plot(self.ref_map, self.wcs, v_lim=v_lim, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, savename=savename + "_0", **kwargs) + fig, ax = self.single_plot(self.ref_map, self.wcs, v_lim=v_lim, P_cut=P_cut, SNRi_cut=SNRi_cut, savename=savename + "_0", **kwargs) x_lim, y_lim = ax.get_xlim(), ax.get_ylim() ax_lim = np.array([self.wcs.pixel_to_world(x_lim[i], y_lim[i]) for i in range(len(x_lim))]) for i, curr_map in enumerate(self.other_maps): self.single_plot( - curr_map, self.wcs_other[i], v_lim=v_lim, ax_lim=ax_lim, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, savename=savename + "_" + str(i + 1), **kwargs + curr_map, self.wcs_other[i], v_lim=v_lim, ax_lim=ax_lim, P_cut=P_cut, SNRi_cut=SNRi_cut, savename=savename + "_" + str(i + 1), **kwargs ) @@ -1712,6 +1893,7 @@ class crop_map(object): self.mask_alpha = 0.75 self.rect_selector = RectangleSelector(self.ax, self.onselect_crop, button=[1]) self.embedded = True + self.ax.set(xlabel="Right Ascension (J2000)", ylabel="Declination (J2000)") self.display(self.data, self.wcs, self.map_convert, **self.kwargs) self.extent = np.array([0.0, self.data.shape[0], 0.0, self.data.shape[1]]) @@ -2010,8 +2192,7 @@ class image_lasso_selector(object): self.mask = np.zeros(self.img.shape[:2], dtype=bool) self.mask[self.indices] = True if hasattr(self, "cont"): - for coll in self.cont.collections: - coll.remove() + self.cont.remove() self.cont = self.ax.contour(self.mask.astype(float), levels=[0.5], colors="white", linewidths=1) if not self.embedded: self.displayed.set_data(array) @@ -2123,11 +2304,7 @@ class slit(object): for p in self.pix: self.mask[tuple(p)] = (np.abs(np.dot(rot2D(-self.angle), p - self.rect.get_center()[::-1])) < (self.height / 2.0, self.width / 2.0)).all() if hasattr(self, "cont"): - for coll in self.cont.collections: - try: - coll.remove() - except AttributeError: - return + self.cont.remove() self.cont = self.ax.contour(self.mask.astype(float), levels=[0.5], colors="white", linewidths=1) if not self.embedded: self.displayed.set_data(array) @@ -2226,11 +2403,7 @@ class aperture(object): x0, y0 = self.circ.center self.mask = np.sqrt((xx - x0) ** 2 + (yy - y0) ** 2) < self.radius if hasattr(self, "cont"): - for coll in self.cont.collections: - try: - coll.remove() - except AttributeError: - return + self.cont.remove() self.cont = self.ax.contour(self.mask.astype(float), levels=[0.5], colors="white", linewidths=1) if not self.embedded: self.displayed.set_data(array) @@ -2244,21 +2417,22 @@ class pol_map(object): Class to interactively study polarization maps. """ - def __init__(self, Stokes, SNRp_cut=3.0, SNRi_cut=3.0, step_vec=1, scale_vec=3.0, flux_lim=None, selection=None, pa_err=False): + def __init__(self, Stokes, P_cut=0.99, SNRi_cut=1.0, step_vec=1, scale_vec=3.0, flux_lim=None, selection=None, pa_err=False): if isinstance(Stokes, str): Stokes = fits.open(Stokes) self.Stokes = deepcopy(Stokes) - self.SNRp_cut = SNRp_cut + self.P_cut = P_cut self.SNRi_cut = SNRi_cut self.flux_lim = flux_lim self.SNRi = deepcopy(self.SNRi_cut) - self.SNRp = deepcopy(self.SNRp_cut) + self.SNRp = deepcopy(self.P_cut) self.region = None self.data = None self.display_selection = selection self.step_vec = step_vec self.scale_vec = scale_vec self.pa_err = pa_err + self.conf = PCconf(self.QN, self.UN, self.QN_ERR, self.UN_ERR) # Get data self.targ = self.Stokes[0].header["targname"] @@ -2278,18 +2452,22 @@ class pol_map(object): # Display integrated values in ROI self.pol_int() - # Set axes for sliders (SNRp_cut, SNRi_cut) - ax_I_cut = self.fig.add_axes([0.120, 0.080, 0.230, 0.01]) - ax_P_cut = self.fig.add_axes([0.120, 0.055, 0.230, 0.01]) - ax_vec_sc = self.fig.add_axes([0.240, 0.030, 0.110, 0.01]) - ax_snr_reset = self.fig.add_axes([0.080, 0.020, 0.05, 0.02]) + # Set axes for sliders (P_cut, SNRi_cut) + ax_I_cut = self.fig.add_axes([0.120, 0.080, 0.220, 0.01]) + self.ax_P_cut = self.fig.add_axes([0.120, 0.055, 0.220, 0.01]) + ax_vec_sc = self.fig.add_axes([0.260, 0.030, 0.090, 0.01]) + ax_snr_reset = self.fig.add_axes([0.060, 0.020, 0.05, 0.02]) + ax_snr_conf = self.fig.add_axes([0.115, 0.020, 0.05, 0.02]) SNRi_max = np.max(self.I[self.IQU_cov[0, 0] > 0.0] / np.sqrt(self.IQU_cov[0, 0][self.IQU_cov[0, 0] > 0.0])) SNRp_max = np.max(self.P[self.s_P > 0.0] / self.s_P[self.s_P > 0.0]) s_I_cut = Slider(ax_I_cut, r"$SNR^{I}_{cut}$", 1.0, int(SNRi_max * 0.95), valstep=1, valinit=self.SNRi_cut) - s_P_cut = Slider(ax_P_cut, r"$SNR^{P}_{cut}$", 1.0, int(SNRp_max * 0.95), valstep=1, valinit=self.SNRp_cut) - s_vec_sc = Slider(ax_vec_sc, r"Vectors scale", 0.0, 10.0, valstep=1, valinit=self.scale_vec) + self.s_P_cut = Slider(self.ax_P_cut, r"$Conf^{P}_{cut}$", 0.50, 1.0, valstep=[0.500, 0.900, 0.990, 0.999], valinit=self.P_cut if P_cut <= 1.0 else 0.99) + s_vec_sc = Slider(ax_vec_sc, r"Vec scale", 0.0, 10.0, valstep=1, valinit=self.scale_vec) b_snr_reset = Button(ax_snr_reset, "Reset") b_snr_reset.label.set_fontsize(8) + self.snr_conf = 1 + b_snr_conf = Button(ax_snr_conf, "Conf") + b_snr_conf.label.set_fontsize(8) def update_snri(val): self.SNRi = val @@ -2311,13 +2489,34 @@ class pol_map(object): def reset_snr(event): s_I_cut.reset() - s_P_cut.reset() + self.s_P_cut.reset() s_vec_sc.reset() + def toggle_snr_conf(event=None): + self.ax_P_cut.remove() + self.ax_P_cut = self.fig.add_axes([0.120, 0.055, 0.220, 0.01]) + if self.snr_conf: + self.snr_conf = 0 + b_snr_conf.label.set_text("Conf") + self.s_P_cut = Slider(self.ax_P_cut, r"$SNR^{P}_{cut}$", 1.0, int(SNRp_max * 0.95), valstep=1, valinit=self.P_cut if P_cut >= 1.0 else 3) + else: + self.snr_conf = 1 + b_snr_conf.label.set_text("SNR") + self.s_P_cut = Slider( + self.ax_P_cut, r"$Conf^{P}_{cut}$", 0.50, 1.0, valstep=[0.500, 0.900, 0.990, 0.999], valinit=self.P_cut if P_cut <= 1.0 else 0.99 + ) + self.s_P_cut.on_changed(update_snrp) + update_snrp(self.s_P_cut.val) + self.fig.canvas.draw_idle() + s_I_cut.on_changed(update_snri) - s_P_cut.on_changed(update_snrp) + self.s_P_cut.on_changed(update_snrp) s_vec_sc.on_changed(update_vecsc) b_snr_reset.on_clicked(reset_snr) + b_snr_conf.on_clicked(toggle_snr_conf) + + if self.P_cut >= 1.0: + toggle_snr_conf() # Set axe for ROI selection ax_select = self.fig.add_axes([0.375, 0.070, 0.05, 0.02]) @@ -2335,8 +2534,7 @@ class pol_map(object): self.selected = False self.region = deepcopy(self.select_instance.mask.astype(bool)) self.select_instance.displayed.remove() - for coll in self.select_instance.cont.collections: - coll.remove() + self.select_instance.cont.remove() self.select_instance.lasso.set_active(False) self.set_data_mask(deepcopy(self.region)) self.pol_int() @@ -2379,8 +2577,7 @@ class pol_map(object): self.select_instance.update_mask() self.region = deepcopy(self.select_instance.mask.astype(bool)) self.select_instance.displayed.remove() - for coll in self.select_instance.cont.collections[:]: - coll.remove() + self.select_instance.cont.remove() self.select_instance.circ.set_visible(False) self.set_data_mask(deepcopy(self.region)) self.pol_int() @@ -2437,8 +2634,7 @@ class pol_map(object): self.select_instance.update_mask() self.region = deepcopy(self.select_instance.mask.astype(bool)) self.select_instance.displayed.remove() - for coll in self.select_instance.cont.collections[:]: - coll.remove() + self.select_instance.cont.remove() self.select_instance.rect.set_visible(False) self.set_data_mask(deepcopy(self.region)) self.pol_int() @@ -2589,7 +2785,7 @@ class pol_map(object): save_fig.suptitle(r"{0:s} with $SNR_{{p}} \geq$ {1:d} and $SNR_{{I}} \geq$ {2:d}".format(self.targ, int(self.SNRp), int(self.SNRi))) if expression[-4:] not in [".png", ".jpg", ".pdf"]: expression += ".pdf" - save_fig.savefig(expression, bbox_inches="tight", dpi=200) + save_fig.savefig(expression, bbox_inches="tight", dpi=150, facecolor="None") plt.close(save_fig) text_save.set_val("") ax_snr_reset.set(visible=True) @@ -2715,14 +2911,42 @@ class pol_map(object): def I(self): return self.Stokes["I_STOKES"].data + @property + def I_ERR(self): + return np.sqrt(self.Stokes["IQU_COV_MATRIX"].data[0, 0]) + @property def Q(self): return self.Stokes["Q_STOKES"].data + @property + def QN(self): + return self.Q / np.where(self.I > 0, self.I, np.nan) + + @property + def Q_ERR(self): + return np.sqrt(self.Stokes["IQU_COV_MATRIX"].data[1, 1]) + + @property + def QN_ERR(self): + return self.Q_ERR / np.where(self.I > 0, self.I, np.nan) + @property def U(self): return self.Stokes["U_STOKES"].data + @property + def UN(self): + return self.U / np.where(self.I > 0, self.I, np.nan) + + @property + def U_ERR(self): + return np.sqrt(self.Stokes["IQU_COV_MATRIX"].data[2, 2]) + + @property + def UN_ERR(self): + return self.U_ERR / np.where(self.I > 0, self.I, np.nan) + @property def IQU_cov(self): return self.Stokes["IQU_COV_MATRIX"].data @@ -2754,8 +2978,11 @@ class pol_map(object): def cut(self): s_I = np.sqrt(self.IQU_cov[0, 0]) SNRp_mask, SNRi_mask = np.zeros(self.P.shape).astype(bool), np.zeros(self.I.shape).astype(bool) - SNRp_mask[self.s_P > 0.0] = self.P[self.s_P > 0.0] / self.s_P[self.s_P > 0.0] > self.SNRp SNRi_mask[s_I > 0.0] = self.I[s_I > 0.0] / s_I[s_I > 0.0] > self.SNRi + if self.SNRp >= 1.0: + SNRp_mask[self.s_P > 0.0] = self.P[self.s_P > 0.0] / self.s_P[self.s_P > 0.0] > self.SNRp + else: + SNRp_mask = self.conf > self.SNRp return np.logical_and(SNRi_mask, SNRp_mask) def ax_cosmetics(self, ax=None): @@ -2807,7 +3034,7 @@ class pol_map(object): length=-0.05, fontsize=0.02, loc=1, - aspect_ratio=-(self.I.shape[1]/self.I.shape[0]), + aspect_ratio=-(self.I.shape[1] / self.I.shape[0]), sep_y=0.01, sep_x=0.01, back_length=0.0, @@ -3045,8 +3272,10 @@ class pol_map(object): fig.canvas.draw_idle() def pol_int(self, fig=None, ax=None): + str_conf = "" if self.region is None: s_I = np.sqrt(self.IQU_cov[0, 0]) + N_pix = self.I.size I_reg = self.I.sum() I_reg_err = np.sqrt(np.sum(s_I**2)) P_reg = self.Stokes[0].header["P_int"] @@ -3061,6 +3290,7 @@ class pol_map(object): s_IU = self.IQU_cov[0, 2] s_QU = self.IQU_cov[1, 2] + N_cut = self.cut.sum() I_cut = self.I[self.cut].sum() Q_cut = self.Q[self.cut].sum() U_cut = self.U[self.cut].sum() @@ -3096,6 +3326,7 @@ class pol_map(object): s_IU = self.IQU_cov[0, 2] s_QU = self.IQU_cov[1, 2] + N_pix = self.region.sum() I_reg = self.I[self.region].sum() Q_reg = self.Q[self.region].sum() U_reg = self.U[self.region].sum() @@ -3106,6 +3337,10 @@ class pol_map(object): IU_reg_err = np.sqrt(np.sum(s_IU[self.region] ** 2)) QU_reg_err = np.sqrt(np.sum(s_QU[self.region] ** 2)) + conf = PCconf(QN=Q_reg / I_reg, QN_ERR=Q_reg_err / I_reg, UN=U_reg / I_reg, UN_ERR=U_reg_err / I_reg) + if 1.0 - conf > 1e-3: + str_conf = "\n" + r"Confidence: {0:.2f} %".format(conf * 100.0) + with np.errstate(divide="ignore", invalid="ignore"): P_reg = np.sqrt(Q_reg**2 + U_reg**2) / I_reg P_reg_err = ( @@ -3124,6 +3359,7 @@ class pol_map(object): ) new_cut = np.logical_and(self.region, self.cut) + N_cut = new_cut.sum() I_cut = self.I[new_cut].sum() Q_cut = self.Q[new_cut].sum() U_cut = self.U[new_cut].sum() @@ -3152,11 +3388,7 @@ class pol_map(object): ) if hasattr(self, "cont"): - for coll in self.cont.collections: - try: - coll.remove() - except AttributeError: - del coll + self.cont.remove() del self.cont if fig is None: fig = self.fig @@ -3172,9 +3404,19 @@ class pol_map(object): + r"$P^{{int}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_reg * 100.0, np.ceil(P_reg_err * 1000.0) / 10.0) + "\n" + r"$\theta_{{P}}^{{int}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_reg, np.ceil(PA_reg_err * 10.0) / 10.0) + + str_conf ) self.str_cut = "" - # self.str_cut = "\n"+r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format(self.pivot_wav, sci_not(I_cut*self.map_convert, I_cut_err*self.map_convert, 2))+"\n"+r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut*100., np.ceil(P_cut_err*1000.)/10.)+"\n"+r"$\theta_{{P}}^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err*10.)/10.) + # self.str_cut = ( + # "\n" + # + r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format( + # self.pivot_wav, sci_not(I_cut * self.map_convert, I_cut_err * self.map_convert, 2) + # ) + # + "\n" + # + r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut * 100.0, np.ceil(P_cut_err * 1000.0) / 10.0) + # + "\n" + # + r"$\theta_{{P}}^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err * 10.0) / 10.0) + # ) self.an_int = ax.annotate( self.str_int + self.str_cut, color="white", @@ -3198,9 +3440,19 @@ class pol_map(object): + r"$P^{{int}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_reg * 100.0, np.ceil(P_reg_err * 1000.0) / 10.0) + "\n" + r"$\theta_{{P}}^{{int}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_reg, np.ceil(PA_reg_err * 10.0) / 10.0) + + str_conf ) str_cut = "" - # str_cut = "\n"+r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format(self.pivot_wav, sci_not(I_cut*self.map_convert, I_cut_err*self.map_convert, 2))+"\n"+r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut*100., np.ceil(P_cut_err*1000.)/10.)+"\n"+r"$\theta_{{P}}^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err*10.)/10.) + # str_cut = ( + # "\n" + # + r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format( + # self.pivot_wav, sci_not(I_cut * self.map_convert, I_cut_err * self.map_convert, 2) + # ) + # + "\n" + # + r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut * 100.0, np.ceil(P_cut_err * 1000.0) / 10.0) + # + "\n" + # + r"$\theta_{{P}}^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err * 10.0) / 10.0) + # ) ax.annotate( str_int + str_cut, color="white", @@ -3221,133 +3473,170 @@ if __name__ == "__main__": parser = argparse.ArgumentParser(description="Interactively plot the pipeline products") parser.add_argument("-f", "--file", metavar="path", required=False, help="The full or relative path to the data product", type=str, default=None) - parser.add_argument( - "-p", "--snrp", metavar="snrp_cut", required=False, help="The cut in signal-to-noise for the polarization degree", type=float, default=3.0 - ) + parser.add_argument("-p", "--pcut", metavar="p_cut", required=False, help="The cut in signal-to-noise for the polarization degree", type=float, default=3.0) parser.add_argument("-i", "--snri", metavar="snri_cut", required=False, help="The cut in signal-to-noise for the intensity", type=float, default=3.0) parser.add_argument( "-st", "--step-vec", metavar="step_vec", required=False, help="Quantity of vectors to be shown, 1 is all, 2 is every other, etc.", type=int, default=1 ) parser.add_argument( - "-sc", "--scale-vec", metavar="scale_vec", required=False, help="Size of the 100% polarization vector in pixel units", type=float, default=3.0 + "-sc", "--scale-vec", metavar="scale_vec", required=False, help="Size of the 100%% polarization vector in pixel units", type=float, default=3.0 ) parser.add_argument("-pa", "--pang-err", action="store_true", required=False, help="Whether the polarization angle uncertainties should be displayed") parser.add_argument("-l", "--lim", metavar="flux_lim", nargs=2, required=False, help="Limits for the intensity map", type=float, default=None) - parser.add_argument("-pdf", "--static-pdf", metavar="static_pdf", required=False, help="Whether the analysis tool or the static pdfs should be outputed", default=None) + parser.add_argument( + "-pdf", "--static-pdf", metavar="static_pdf", required=False, help="Whether the analysis tool or the static pdfs should be outputed", default=None + ) + parser.add_argument("-t", "--type", metavar="type", required=False, help="Type of plot to be be outputed", default=None) args = parser.parse_args() if args.file is not None: Stokes_UV = fits.open(args.file, mode="readonly") if args.static_pdf is not None: - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"]]), - plots_folder=args.static_pdf, - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "I"]), - plots_folder=args.static_pdf, - display="Intensity", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "P_flux"]), - plots_folder=args.static_pdf, - display="Pol_Flux", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "P"]), - plots_folder=args.static_pdf, - display="Pol_deg", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "PA"]), - plots_folder=args.static_pdf, - display="Pol_ang", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "I_err"]), - plots_folder=args.static_pdf, - display="I_err", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "P_err"]), - plots_folder=args.static_pdf, - display="Pol_deg_err", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "SNRi"]), - plots_folder=args.static_pdf, - display="SNRi", - ) - polarization_map( - Stokes_UV, - Stokes_UV["DATA_MASK"].data.astype(bool), - SNRp_cut=args.snrp, - SNRi_cut=args.snri, - flux_lim=args.lim, - step_vec=args.step_vec, - scale_vec=args.scale_vec, - savename="_".join([Stokes_UV[0].header["FILENAME"], "SNRp"]), - plots_folder=args.static_pdf, - display="SNRp", - ) + if args.type is not None: + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], args.type]), + plots_folder=args.static_pdf, + display=args.type, + ) + else: + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"]]), + plots_folder=args.static_pdf, + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "I"]), + plots_folder=args.static_pdf, + display="Intensity", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "P_flux"]), + plots_folder=args.static_pdf, + display="Pol_Flux", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "P"]), + plots_folder=args.static_pdf, + display="Pol_deg", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "PA"]), + plots_folder=args.static_pdf, + display="Pol_ang", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "I_err"]), + plots_folder=args.static_pdf, + display="I_err", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "P_err"]), + plots_folder=args.static_pdf, + display="Pol_deg_err", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "SNRi"]), + plots_folder=args.static_pdf, + display="SNRi", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut if args.pcut >= 1.0 else 3.0, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "SNRp"]), + plots_folder=args.static_pdf, + display="SNRp", + ) + polarization_map( + Stokes_UV, + Stokes_UV["DATA_MASK"].data.astype(bool), + P_cut=args.pcut if args.pcut < 1.0 else 0.99, + SNRi_cut=args.snri, + flux_lim=args.lim, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + savename="_".join([Stokes_UV[0].header["FILENAME"], "confP"]), + plots_folder=args.static_pdf, + display="confp", + ) else: - pol_map(Stokes_UV, SNRp_cut=args.snrp, SNRi_cut=args.snri, step_vec=args.step_vec, scale_vec=args.scale_vec, flux_lim=args.lim, pa_err=args.pang_err) - + pol_map( + Stokes_UV, + P_cut=args.pcut, + SNRi_cut=args.snri, + step_vec=args.step_vec, + scale_vec=args.scale_vec, + flux_lim=args.lim, + pa_err=args.pang_err, + selection=args.type, + ) else: - print("python3 plots.py -f -p -i -st -sc -l -pa --pdf ") + print( + "python3 plots.py -f -p -i -st -sc -l -pa --pdf " + ) diff --git a/package/lib/query.py b/package/lib/query.py index ba99d49..76033cd 100755 --- a/package/lib/query.py +++ b/package/lib/query.py @@ -1,4 +1,4 @@ -#!/usr/bin/python3 +#!/usr/bin/env python3 # -*- coding:utf-8 -*- """ Library function to query and download datatsets from MAST api. diff --git a/package/lib/reduction.py b/package/lib/reduction.py index 2b6f736..613e0d7 100755 --- a/package/lib/reduction.py +++ b/package/lib/reduction.py @@ -217,9 +217,9 @@ def bin_ndarray(ndarray, new_shape, operation="sum"): elif operation.lower() in ["mean", "average", "avg"]: ndarray = ndarray.mean(-1 * (i + 1)) else: - row_comp = np.mat(get_row_compressor(ndarray.shape[0], new_shape[0], operation)) - col_comp = np.mat(get_column_compressor(ndarray.shape[1], new_shape[1], operation)) - ndarray = np.array(row_comp * np.mat(ndarray) * col_comp) + row_comp = np.asmatrix(get_row_compressor(ndarray.shape[0], new_shape[0], operation)) + col_comp = np.asmatrix(get_column_compressor(ndarray.shape[1], new_shape[1], operation)) + ndarray = np.array(row_comp * np.asmatrix(ndarray) * col_comp) return ndarray @@ -416,12 +416,12 @@ def deconvolve_array(data_array, headers, psf="gaussian", FWHM=1.0, scale="px", FWHM /= pxsize[0].min() # Define Point-Spread-Function kernel - if psf.lower() in ["gauss", "gaussian"]: + if isinstance(psf, np.ndarray) and (len(psf.shape) == 2): + kernel = psf + elif psf.lower() in ["gauss", "gaussian"]: if shape is None: shape = np.min(data_array[0].shape) - 2, np.min(data_array[0].shape) - 2 kernel = gaussian_psf(FWHM=FWHM, shape=shape) - elif isinstance(psf, np.ndarray) and (len(psf.shape) == 2): - kernel = psf else: raise ValueError("{} is not a valid value for 'psf'".format(psf)) @@ -676,6 +676,7 @@ def rebin_array(data_array, error_array, headers, pxsize=2, scale="px", operatio nw.wcs.crpix /= Dxy nw.array_shape = new_shape new_header["NAXIS1"], new_header["NAXIS2"] = nw.array_shape + new_header["PHOTFLAM"] = header["PHOTFLAM"] * (Dxy[0] * Dxy[1]) for key, val in nw.to_header().items(): new_header.set(key, val) new_header["SAMPLING"] = (str(pxsize) + scale, "Resampling performed during reduction") diff --git a/package/lib/utils.py b/package/lib/utils.py index bf4128c..7d38335 100755 --- a/package/lib/utils.py +++ b/package/lib/utils.py @@ -4,6 +4,14 @@ import numpy as np def rot2D(ang): """ Return the 2D rotation matrix of given angle in degrees + ---------- + Inputs: + ang : float + Angle in degrees + ---------- + Returns: + rot_mat : numpy.ndarray + 2D matrix of shape (2,2) to perform vector rotation at angle "ang". """ alpha = np.pi * ang / 180 return np.array([[np.cos(alpha), np.sin(alpha)], [-np.sin(alpha), np.cos(alpha)]]) @@ -13,6 +21,14 @@ def princ_angle(ang): """ Return the principal angle in the 0° to 180° quadrant as PA is always defined at p/m 180°. + ---------- + Inputs: + ang : float, numpy.ndarray + Angle in degrees. Can be an array of angles. + ---------- + Returns: + princ_ang : float, numpy.ndarray + Principal angle in the 0°-180° quadrant in the same shape as input. """ if not isinstance(ang, np.ndarray): A = np.array([ang]) @@ -28,9 +44,100 @@ def princ_angle(ang): return A[0] +def PCconf(QN, UN, QN_ERR, UN_ERR): + """ + Compute the confidence level for 2 parameters polarisation degree and + polarisation angle from the PCUBE analysis. + ---------- + Inputs: + QN : float, numpy.ndarray + Normalized Q Stokes flux. + UN : float, numpy.ndarray + Normalized U Stokes flux. + QN_ERR : float, numpy.ndarray + Normalized error on Q Stokes flux. + UN_ERR : float, numpy.ndarray + Normalized error on U Stokes flux. + ---------- + Returns: + conf : numpy.ndarray + 2D matrix of same shape as input containing the confidence on the polarization + computation between 0 and 1 for 2 parameters of interest (Q and U Stokes fluxes). + """ + mask = np.logical_and(QN_ERR > 0.0, UN_ERR > 0.0) + conf = np.full(QN.shape, -1.0) + chi2 = QN**2 / QN_ERR**2 + UN**2 / UN_ERR**2 + conf[mask] = 1.0 - np.exp(-0.5 * chi2[mask]) + return conf + + +def CenterConf(mask, PA, sPA): + """ + Compute the confidence map for the position of the center of emission. + ---------- + Inputs: + mask : bool, numpy.ndarray + Mask of the polarization vectors from which the center of emission should be drawn. + PA : float, numpy.ndarray + 2D matrix containing the computed polarization angle. + sPA : float, numpy.ndarray + 2D matrix containing the total uncertainties on the polarization angle. + ---------- + Returns: + conf : numpy.ndarray + 2D matrix of same shape as input containing the confidence on the polarization + computation between 0 and 1 for 2 parameters of interest (Q and U Stokes fluxes). + """ + chi2 = np.full(PA.shape, np.nan) + conf = np.full(PA.shape, -1.0) + yy, xx = np.indices(PA.shape) + + def ideal(c): + itheta = np.full(PA.shape, np.nan) + itheta[(xx + 0.5) != c[0]] = np.degrees(np.arctan((yy[(xx + 0.5) != c[0]] + 0.5 - c[1]) / (xx[(xx + 0.5) != c[0]] + 0.5 - c[0]))) + itheta[(xx + 0.5) == c[0]] = PA[(xx + 0.5) == c[0]] + return princ_angle(itheta) + + def chisq(c): + return np.sum((princ_angle(PA[mask]) - ideal((c[0], c[1]))[mask]) ** 2 / sPA[mask] ** 2) / np.sum(mask) + + for x, y in zip(xx[np.isfinite(PA)], yy[np.isfinite(PA)]): + chi2[y, x] = chisq((x, y)) + + from scipy.optimize import minimize + from scipy.special import gammainc + + conf[np.isfinite(PA)] = 1.0 - gammainc(0.5, 0.5 * chi2[np.isfinite(PA)]) + c0 = np.unravel_index(np.argmax(conf), conf.shape)[::-1] + result = minimize(chisq, c0, bounds=[(0, PA.shape[1]), (0.0, PA.shape[0])]) + if result.success: + print("Center of emission found") + else: + print("Center of emission not found", result) + return conf, result.x + + def sci_not(v, err, rnd=1, out=str): """ - Return the scientifque error notation as a string. + Return the scientific error notation as a string. + ---------- + Inputs: + v : float + Value to be transformed into scientific notation. + err : float + Error on the value to be transformed into scientific notation. + rnd : int + Number of significant numbers for the scientific notation. + Default to 1. + out : str or other + Format in which the notation should be returned. "str" means the notation + is returned as a single string, "other" means it is returned as a list of "str". + Default to str. + ---------- + Returns: + conf : numpy.ndarray + 2D matrix of same shape as input containing the confidence on the polarization + computation between 0 and 1 for 2 parameters of interest (Q and U Stokes fluxes). """ power = -int(("%E" % v)[-3:]) + 1 output = [r"({0}".format(round(v * 10**power, rnd)), round(v * 10**power, rnd)] @@ -46,10 +153,21 @@ def sci_not(v, err, rnd=1, out=str): else: return *output[1:], -power + def wcs_PA(PC21, PC22): """ Return the position angle in degrees to the North direction of a wcs from the values of coefficient of its transformation matrix. + ---------- + Inputs: + PC21 : float + Value of the WCS matric PC[1,0] + PC22 : float + Value of the WCS matric PC[1,1] + ---------- + Returns: + orient : float + Angle in degrees between the North direction and the Up direction of the WCS. """ if (abs(PC21) > abs(PC22)) and (PC21 >= 0): orient = -np.arccos(PC22) * 180.0 / np.pi diff --git a/package/overplot_IC5063.py b/package/overplot_IC5063.py deleted file mode 100755 index 6a4fb1e..0000000 --- a/package/overplot_IC5063.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/python3 -import numpy as np -from astropy.io import fits -from lib.plots import overplot_pol, overplot_radio -from matplotlib.colors import LogNorm - -Stokes_UV = fits.open("./data/IC5063/5918/IC5063_FOC_b0.10arcsec_c0.20arcsec.fits") -Stokes_18GHz = fits.open("./data/IC5063/radio/IC5063_18GHz.fits") -Stokes_24GHz = fits.open("./data/IC5063/radio/IC5063_24GHz.fits") -Stokes_103GHz = fits.open("./data/IC5063/radio/IC5063_103GHz.fits") -Stokes_229GHz = fits.open("./data/IC5063/radio/IC5063_229GHz.fits") -Stokes_357GHz = fits.open("./data/IC5063/radio/IC5063_357GHz.fits") -# Stokes_S2 = fits.open("./data/IC5063/POLARIZATION_COMPARISON/S2_rot_crop.fits") -Stokes_IR = fits.open("./data/IC5063/IR/u2e65g01t_c0f_rot.fits") - -# levelsMorganti = np.array([1.,2.,3.,8.,16.,32.,64.,128.]) -levelsMorganti = np.logspace(-0.1249, 1.97, 7) / 100.0 - -levels18GHz = levelsMorganti * Stokes_18GHz[0].data.max() -A = overplot_radio(Stokes_UV, Stokes_18GHz) -A.plot(levels=levels18GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/18GHz_overplot.pdf", vec_scale=None) - -levels24GHz = levelsMorganti * Stokes_24GHz[0].data.max() -B = overplot_radio(Stokes_UV, Stokes_24GHz) -B.plot(levels=levels24GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/24GHz_overplot.pdf", vec_scale=None) - -levels103GHz = levelsMorganti * Stokes_103GHz[0].data.max() -C = overplot_radio(Stokes_UV, Stokes_103GHz) -C.plot(levels=levels103GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/103GHz_overplot.pdf", vec_scale=None) - -levels229GHz = levelsMorganti * Stokes_229GHz[0].data.max() -D = overplot_radio(Stokes_UV, Stokes_229GHz) -D.plot(levels=levels229GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/229GHz_overplot.pdf", vec_scale=None) - -levels357GHz = levelsMorganti * Stokes_357GHz[0].data.max() -E = overplot_radio(Stokes_UV, Stokes_357GHz) -E.plot(levels=levels357GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/357GHz_overplot.pdf", vec_scale=None) - -# F = overplot_pol(Stokes_UV, Stokes_S2) -# F.plot(SNRp_cut=3.0, SNRi_cut=80.0, savename='./plots/IC5063/S2_overplot.pdf', norm=LogNorm(vmin=5e-20,vmax=5e-18)) - -G = overplot_pol(Stokes_UV, Stokes_IR, cmap="inferno") -G.plot( - SNRp_cut=2.0, - SNRi_cut=10.0, - savename="./plots/IC5063/IR_overplot.pdf", - vec_scale=None, - norm=LogNorm(Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"] / 1e3, Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"]), - cmap="inferno_r", -) diff --git a/package/overplot_MRK463E.py b/package/overplot_MRK463E.py deleted file mode 100755 index 5c3411d..0000000 --- a/package/overplot_MRK463E.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/python3 -import numpy as np -from astropy.io import fits -from lib.plots import overplot_chandra, overplot_pol -from matplotlib.colors import LogNorm - -Stokes_UV = fits.open("./data/MRK463E/5960/MRK463E_FOC_b0.05arcsec_c0.10arcsec.fits") -Stokes_IR = fits.open("./data/MRK463E/WFPC2/IR_rot_crop.fits") -Stokes_Xr = fits.open("./data/MRK463E/Chandra/X_ray_crop.fits") - -levels = np.geomspace(1.0, 99.0, 7) - -A = overplot_chandra(Stokes_UV, Stokes_Xr, norm=LogNorm()) -A.plot(levels=levels, SNRp_cut=3.0, SNRi_cut=3.0, vec_scale=5, zoom=1, savename="./plots/MRK463E/Chandra_overplot.pdf") -A.write_to(path1="./data/MRK463E/FOC_data_Chandra.fits", path2="./data/MRK463E/Chandra_data.fits", suffix="aligned") - -levels = np.array([0.8, 2, 5, 10, 20, 50]) / 100.0 * Stokes_UV[0].header["photflam"] -B = overplot_pol(Stokes_UV, Stokes_IR, norm=LogNorm()) -B.plot(levels=levels, SNRp_cut=3.0, SNRi_cut=3.0, vec_scale=5, norm=LogNorm(8.5e-18, 2.5e-15), savename="./plots/MRK463E/IR_overplot.pdf") -B.write_to(path1="./data/MRK463E/FOC_data_WFPC.fits", path2="./data/MRK463E/WFPC_data.fits", suffix="aligned") diff --git a/package/Combine.py b/package/src/Combine.py similarity index 96% rename from package/Combine.py rename to package/src/Combine.py index b3871f1..d5b19bb 100755 --- a/package/Combine.py +++ b/package/src/Combine.py @@ -1,6 +1,9 @@ -#!/usr/bin/python +#!/usr/bin/env python3 # -*- coding:utf-8 -*- -# Project libraries +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) import numpy as np @@ -170,7 +173,7 @@ def main(infiles, target=None, output_dir="./data/"): # Reduction parameters kwargs = {} # Polarization map output - kwargs["SNRp_cut"] = 3.0 + kwargs["P_cut"] = 0.99 kwargs["SNRi_cut"] = 1.0 kwargs["flux_lim"] = 1e-19, 3e-17 kwargs["scale_vec"] = 5 @@ -183,9 +186,7 @@ def main(infiles, target=None, output_dir="./data/"): new_infiles = [] for i, group in enumerate(grouped_infiles): - new_infiles.append( - FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True)[0] - ) + new_infiles.append(FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True)[0]) infiles = new_infiles diff --git a/package/src/__init__.py b/package/src/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/package/src/analysis.py b/package/src/analysis.py deleted file mode 100755 index 815eaa3..0000000 --- a/package/src/analysis.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/python -from getopt import error as get_error -from getopt import getopt -from sys import argv - -arglist = argv[1:] -options = "hf:p:i:l:" -long_options = ["help", "fits=", "snrp=", "snri=", "lim="] - -fits_path = None -SNRp_cut, SNRi_cut = 3, 3 -flux_lim = None -out_txt = None - -try: - arg, val = getopt(arglist, options, long_options) - - for curr_arg, curr_val in arg: - if curr_arg in ("-h", "--help"): - print("python3 analysis.py -f -p -i -l ") - elif curr_arg in ("-f", "--fits"): - fits_path = str(curr_val) - elif curr_arg in ("-p", "--snrp"): - SNRp_cut = int(curr_val) - elif curr_arg in ("-i", "--snri"): - SNRi_cut = int(curr_val) - elif curr_arg in ("-l", "--lim"): - flux_lim = list("".join(curr_val).split(",")) -except get_error as err: - print(str(err)) - -if fits_path is not None: - from astropy.io import fits - from lib.plots import pol_map - - Stokes_UV = fits.open(fits_path) - p = pol_map(Stokes_UV, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim) - -else: - print("python3 analysis.py -f -p -i -l ") diff --git a/package/src/comparison_Kishimoto.py b/package/src/comparison_Kishimoto.py deleted file mode 100755 index 75b7073..0000000 --- a/package/src/comparison_Kishimoto.py +++ /dev/null @@ -1,214 +0,0 @@ -#!/usr/bin/python -from src.lib.background import gauss, bin_centers -from src.lib.deconvolve import zeropad -from src.lib.reduction import align_data -from src.lib.plots import princ_angle -from matplotlib.colors import LogNorm -from os.path import join as path_join -from astropy.io import fits -from astropy.wcs import WCS -from scipy.ndimage import shift -from scipy.optimize import curve_fit -import numpy as np -import matplotlib.pyplot as plt - -root_dir = path_join('/home/t.barnouin/Documents/Thesis/HST') -root_dir_K = path_join(root_dir, 'Kishimoto', 'output') -root_dir_S = path_join(root_dir, 'FOC_Reduction', 'output') -root_dir_data_S = path_join(root_dir, 'FOC_Reduction', 'data', 'NGC1068', '5144') -root_dir_plot_S = path_join(root_dir, 'FOC_Reduction', 'plots', 'NGC1068', '5144', 'notaligned') -filename_S = "NGC1068_FOC_b10.00pixel_not_aligned.fits" -plt.rcParams.update({'font.size': 15}) - -SNRi_cut = 30. -SNRp_cut = 3. - -data_K = {} -data_S = {} -for d, i in zip(['I', 'Q', 'U', 'P', 'PA', 'sI', 'sQ', 'sU', 'sP', 'sPA'], [0, 1, 2, 5, 8, (3, 0, 0), (3, 1, 1), (3, 2, 2), 6, 9]): - data_K[d] = np.loadtxt(path_join(root_dir_K, d+'.txt')) - with fits.open(path_join(root_dir_data_S, filename_S)) as f: - if not type(i) is int: - data_S[d] = np.sqrt(f[i[0]].data[i[1], i[2]]) - else: - data_S[d] = f[i].data - if i == 0: - header = f[i].header -wcs = WCS(header) -convert_flux = header['photflam'] - -bkg_S = np.median(data_S['I'])/3 -bkg_K = np.median(data_K['I'])/3 - -# zeropad data to get same size of array -shape = data_S['I'].shape -for d in data_K: - data_K[d] = zeropad(data_K[d], shape) - -# shift array to get same information in same pixel -data_arr, error_ar, heads, data_msk, shifts, shifts_err = align_data(np.array([data_S['I'], data_K['I']]), [header, header], error_array=np.array( - [data_S['sI'], data_K['sI']]), background=np.array([bkg_S, bkg_K]), upsample_factor=10., ref_center='center', return_shifts=True) -for d in data_K: - data_K[d] = shift(data_K[d], shifts[1], order=1, cval=0.) - -# compute pol components from shifted array -for d in [data_S, data_K]: - for i in d: - d[i][np.isnan(d[i])] = 0. - d['P'] = np.where(np.logical_and(np.isfinite(d['I']), d['I'] > 0.), np.sqrt(d['Q']**2+d['U']**2)/d['I'], 0.) - d['sP'] = np.where(np.logical_and(np.isfinite(d['I']), d['I'] > 0.), np.sqrt((d['Q']**2*d['sQ']**2+d['U']**2*d['sU']**2) / - (d['Q']**2+d['U']**2)+((d['Q']/d['I'])**2+(d['U']/d['I'])**2)*d['sI']**2)/d['I'], 0.) - d['d_P'] = np.where(np.logical_and(np.isfinite(d['P']), np.isfinite(d['sP'])), np.sqrt(d['P']**2-d['sP']**2), 0.) - d['PA'] = 0.5*np.arctan2(d['U'], d['Q'])+np.pi - d['SNRp'] = np.zeros(d['d_P'].shape) - d['SNRp'][d['sP'] > 0.] = d['d_P'][d['sP'] > 0.]/d['sP'][d['sP'] > 0.] - d['SNRi'] = np.zeros(d['I'].shape) - d['SNRi'][d['sI'] > 0.] = d['I'][d['sI'] > 0.]/d['sI'][d['sI'] > 0.] - d['mask'] = np.logical_and(d['SNRi'] > SNRi_cut, d['SNRp'] > SNRp_cut) -data_S['mask'], data_K['mask'] = np.logical_and(data_S['mask'], data_K['mask']), np.logical_and(data_S['mask'], data_K['mask']) - - -# -# Compute histogram of measured polarization in cut -# -bins = int(data_S['mask'].sum()/5) -bin_size = 1./bins -mod_p = np.linspace(0., 1., 300) -for d in [data_S, data_K]: - d['hist'], d['bin_edges'] = np.histogram(d['d_P'][d['mask']], bins=bins, range=(0., 1.)) - d['binning'] = bin_centers(d['bin_edges']) - peak, bins_fwhm = d['binning'][np.argmax(d['hist'])], d['binning'][d['hist'] > d['hist'].max()/2.] - fwhm = bins_fwhm[1]-bins_fwhm[0] - p0 = [d['hist'].max(), peak, fwhm] - try: - popt, pcov = curve_fit(gauss, d['binning'], d['hist'], p0=p0) - except RuntimeError: - popt = p0 - d['hist_chi2'] = np.sum((d['hist']-gauss(d['binning'], *popt))**2)/d['hist'].size - d['hist_popt'] = popt - -fig_p, ax_p = plt.subplots(num="Polarization degree histogram", figsize=(10, 6), constrained_layout=True) -ax_p.errorbar(data_S['binning'], data_S['hist'], xerr=bin_size/2., fmt='b.', ecolor='b', label='P through this pipeline') -ax_p.plot(mod_p, gauss(mod_p, *data_S['hist_popt']), 'b--', label='mean = {1:.2f}, stdev = {2:.2f}'.format(*data_S['hist_popt'])) -ax_p.errorbar(data_K['binning'], data_K['hist'], xerr=bin_size/2., fmt='r.', ecolor='r', label="P through Kishimoto's pipeline") -ax_p.plot(mod_p, gauss(mod_p, *data_K['hist_popt']), 'r--', label='mean = {1:.2f}, stdev = {2:.2f}'.format(*data_K['hist_popt'])) -ax_p.set(xlabel="Polarization degree", ylabel="Counts", title="Histogram of polarization degree computed in the cut for both pipelines.") -ax_p.legend() -fig_p.savefig(path_join(root_dir_plot_S, "NGC1068_K_pol_deg.png"), bbox_inches="tight", dpi=300) - -# -# Compute angular difference between the maps in cut -# -dtheta = np.where(data_S['mask'], 0.5*np.arctan((np.sin(2*data_S['PA'])*np.cos(2*data_K['PA'])-np.cos(2*data_S['PA']) * - np.cos(2*data_K['PA']))/(np.cos(2*data_S['PA'])*np.cos(2*data_K['PA'])+np.cos(2*data_S['PA'])*np.sin(2*data_K['PA']))), np.nan) -fig_pa = plt.figure(num="Polarization degree alignement") -ax_pa = fig_pa.add_subplot(111, projection=wcs) -cbar_ax_pa = fig_pa.add_axes([0.88, 0.12, 0.01, 0.75]) -ax_pa.set_title(r"Degree of alignement $\zeta$ of the polarization angles from the 2 pipelines in the cut") -im_pa = ax_pa.imshow(np.cos(2*dtheta), vmin=-1., vmax=1., origin='lower', cmap='bwr', label=r"$\zeta$ between this pipeline and Kishimoto's") -cbar_pa = plt.colorbar(im_pa, cax=cbar_ax_pa, label=r"$\zeta = \cos\left( 2 \cdot \delta\theta_P \right)$") -ax_pa.coords[0].set_axislabel('Right Ascension (J2000)') -ax_pa.coords[1].set_axislabel('Declination (J2000)') -fig_pa.savefig(path_join(root_dir_plot_S, "NGC1068_K_pol_ang.png"), bbox_inches="tight", dpi=300) - -# -# Compute power uncertainty difference between the maps in cut -# -eta = np.where(data_S['mask'], np.abs(data_K['d_P']-data_S['d_P'])/np.sqrt(data_S['sP']**2+data_K['sP']**2)/2., np.nan) -fig_dif_p = plt.figure(num="Polarization power difference ratio") -ax_dif_p = fig_dif_p.add_subplot(111, projection=wcs) -cbar_ax_dif_p = fig_dif_p.add_axes([0.88, 0.12, 0.01, 0.75]) -ax_dif_p.set_title(r"Degree of difference $\eta$ of the polarization from the 2 pipelines in the cut") -im_dif_p = ax_dif_p.imshow(eta, vmin=0., vmax=2., origin='lower', cmap='bwr_r', label=r"$\eta$ between this pipeline and Kishimoto's") -cbar_dif_p = plt.colorbar(im_dif_p, cax=cbar_ax_dif_p, label=r"$\eta = \frac{2 \left|P^K-P^S\right|}{\sqrt{{\sigma^K_P}^2+{\sigma^S_P}^2}}$") -ax_dif_p.coords[0].set_axislabel('Right Ascension (J2000)') -ax_dif_p.coords[1].set_axislabel('Declination (J2000)') -fig_dif_p.savefig(path_join(root_dir_plot_S, "NGC1068_K_pol_diff.png"), bbox_inches="tight", dpi=300) - -# -# Compute angle uncertainty difference between the maps in cut -# -eta = np.where(data_S['mask'], np.abs(data_K['PA']-data_S['PA'])/np.sqrt(data_S['sPA']**2+data_K['sPA']**2)/2., np.nan) -fig_dif_pa = plt.figure(num="Polarization angle difference ratio") -ax_dif_pa = fig_dif_pa.add_subplot(111, projection=wcs) -cbar_ax_dif_pa = fig_dif_pa.add_axes([0.88, 0.12, 0.01, 0.75]) -ax_dif_pa.set_title(r"Degree of difference $\eta$ of the polarization from the 2 pipelines in the cut") -im_dif_pa = ax_dif_pa.imshow(eta, vmin=0., vmax=2., origin='lower', cmap='bwr_r', label=r"$\eta$ between this pipeline and Kishimoto's") -cbar_dif_pa = plt.colorbar(im_dif_pa, cax=cbar_ax_dif_pa, - label=r"$\eta = \frac{2 \left|\theta_P^K-\theta_P^S\right|}{\sqrt{{\sigma^K_{\theta_P}}^2+{\sigma^S_{\theta_P}}^2}}$") -ax_dif_pa.coords[0].set_axislabel('Right Ascension (J2000)') -ax_dif_pa.coords[1].set_axislabel('Declination (J2000)') -fig_dif_pa.savefig(path_join(root_dir_plot_S, "NGC1068_K_polang_diff.png"), bbox_inches="tight", dpi=300) - -# display both polarization maps to check consistency -# plt.rcParams.update({'font.size': 15}) -fig = plt.figure(num="Polarization maps comparison", figsize=(10, 10)) -ax = fig.add_subplot(111, projection=wcs) -fig.subplots_adjust(right=0.85) -cbar_ax = fig.add_axes([0.88, 0.12, 0.01, 0.75]) - -for d in [data_S, data_K]: - d['X'], d['Y'] = np.meshgrid(np.arange(d['I'].shape[1]), np.arange(d['I'].shape[0])) - d['xy_U'], d['xy_V'] = np.where(d['mask'], d['d_P']*np.cos(np.pi/2.+d['PA']), np.nan), np.where(d['mask'], d['d_P']*np.sin(np.pi/2.+d['PA']), np.nan) - -im0 = ax.imshow(data_S['I']*convert_flux, norm=LogNorm(data_S['I'][data_S['I'] > 0].min()*convert_flux, data_S['I'] - [data_S['I'] > 0].max()*convert_flux), origin='lower', cmap='gray', label=r"$I_{STOKES}$ through this pipeline") -quiv0 = ax.quiver(data_S['X'], data_S['Y'], data_S['xy_U'], data_S['xy_V'], units='xy', angles='uv', scale=0.5, scale_units='xy', - pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.2, color='b', alpha=0.75, label="PA through this pipeline") -quiv1 = ax.quiver(data_K['X'], data_K['Y'], data_K['xy_U'], data_K['xy_V'], units='xy', angles='uv', scale=0.5, scale_units='xy', - pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, color='r', alpha=0.75, label="PA through Kishimoto's pipeline") - -ax.set_title(r"$SNR_P \geq$ "+str(SNRi_cut)+r"$\; & \; SNR_I \geq $"+str(SNRp_cut)) -# ax.coords.grid(True, color='white', ls='dotted', alpha=0.5) -ax.coords[0].set_axislabel('Right Ascension (J2000)') -ax.coords[0].set_axislabel_position('b') -ax.coords[0].set_ticklabel_position('b') -ax.coords[1].set_axislabel('Declination (J2000)') -ax.coords[1].set_axislabel_position('l') -ax.coords[1].set_ticklabel_position('l') -# ax.axis('equal') - -cbar = plt.colorbar(im0, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") -ax.legend(loc='upper right') -fig.savefig(path_join(root_dir_plot_S, "NGC1068_K_comparison.png"), bbox_inches="tight", dpi=300) - -# compute integrated polarization parameters on a specific cut -for d in [data_S, data_K]: - d['I_dil'] = np.sum(d['I'][d['mask']]) - d['sI_dil'] = np.sqrt(np.sum(d['sI'][d['mask']]**2)) - d['Q_dil'] = np.sum(d['Q'][d['mask']]) - d['sQ_dil'] = np.sqrt(np.sum(d['sQ'][d['mask']]**2)) - d['U_dil'] = np.sum(d['U'][d['mask']]) - d['sU_dil'] = np.sqrt(np.sum(d['sU'][d['mask']]**2)) - - d['P_dil'] = np.sqrt(d['Q_dil']**2+d['U_dil']**2)/d['I_dil'] - d['sP_dil'] = np.sqrt((d['Q_dil']**2*d['sQ_dil']**2+d['U_dil']**2*d['sU_dil']**2)/(d['Q_dil']**2+d['U_dil']**2) + - ((d['Q_dil']/d['I_dil'])**2+(d['U_dil']/d['I_dil'])**2)*d['sI_dil']**2)/d['I_dil'] - d['d_P_dil'] = np.sqrt(d['P_dil']**2-d['sP_dil']**2) - d['PA_dil'] = princ_angle((90./np.pi)*np.arctan2(d['U_dil'], d['Q_dil'])) - d['sPA_dil'] = princ_angle((90./(np.pi*(d['Q_dil']**2+d['U_dil']**2)))*np.sqrt(d['Q_dil']**2*d['sU_dil']**2+d['U_dil']**2*d['sU_dil']**2)) -print('From this pipeline :\n', "P = {0:.2f} ± {1:.2f} %\n".format( - data_S['d_P_dil']*100., data_S['sP_dil']*100.), "PA = {0:.2f} ± {1:.2f} °".format(data_S['PA_dil'], data_S['sPA_dil'])) -print("From Kishimoto's pipeline :\n", "P = {0:.2f} ± {1:.2f} %\n".format( - data_K['d_P_dil']*100., data_K['sP_dil']*100.), "PA = {0:.2f} ± {1:.2f} °".format(data_K['PA_dil'], data_K['sPA_dil'])) - -# compare different types of error -print("This pipeline : average sI/I={0:.2f} ; sQ/Q={1:.2f} ; sU/U={2:.2f} ; sP/P={3:.2f}".format(np.mean(data_S['sI'][data_S['mask']]/data_S['I'][data_S['mask']]), np.mean( - data_S['sQ'][data_S['mask']]/data_S['Q'][data_S['mask']]), np.mean(data_S['sU'][data_S['mask']]/data_S['U'][data_S['mask']]), np.mean(data_S['sP'][data_S['mask']]/data_S['P'][data_S['mask']]))) -print("Kishimoto's pipeline : average sI/I={0:.2f} ; sQ/Q={1:.2f} ; sU/U={2:.2f} ; sP/P={3:.2f}".format(np.mean(data_K['sI'][data_S['mask']]/data_K['I'][data_S['mask']]), np.mean( - data_K['sQ'][data_S['mask']]/data_K['Q'][data_S['mask']]), np.mean(data_K['sU'][data_S['mask']]/data_K['U'][data_S['mask']]), np.mean(data_K['sP'][data_S['mask']]/data_K['P'][data_S['mask']]))) -for d, i in zip(['I', 'Q', 'U', 'P', 'PA', 'sI', 'sQ', 'sU', 'sP', 'sPA'], [0, 1, 2, 5, 8, (3, 0, 0), (3, 1, 1), (3, 2, 2), 6, 9]): - data_K[d] = np.loadtxt(path_join(root_dir_K, d+'.txt')) - with fits.open(path_join(root_dir_data_S, filename_S)) as f: - if not type(i) is int: - data_S[d] = np.sqrt(f[i[0]].data[i[1], i[2]]) - else: - data_S[d] = f[i].data - if i == 0: - header = f[i].header - -# from Kishimoto's pipeline : IQU_dir, IQU_shift, IQU_stat, IQU_trans -# from my pipeline : raw_bg, raw_flat, raw_psf, raw_shift, raw_wav, IQU_dir -# but errors from my pipeline are propagated all along, how to compare then ? - -plt.show() diff --git a/package/src/emission_center.py b/package/src/emission_center.py new file mode 100755 index 0000000..9c05fde --- /dev/null +++ b/package/src/emission_center.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) + + +def main(infile, target=None, output_dir=None): + from os.path import join as pathjoin + + import numpy as np + from astropy.io.fits import open as fits_open + from astropy.wcs import WCS + from lib.plots import polarization_map + from lib.utils import CenterConf, PCconf + from matplotlib.patches import Rectangle + from matplotlib.pyplot import figure, show + + output = [] + + Stokes = fits_open(infile) + stkI = Stokes["I_STOKES"].data + QN, UN, QN_ERR, UN_ERR = np.full((4, stkI.shape[0], stkI.shape[1]), np.nan) + for sflux, nflux in zip( + [Stokes["Q_STOKES"].data, Stokes["U_STOKES"].data, np.sqrt(Stokes["IQU_COV_MATRIX"].data[1, 1]), np.sqrt(Stokes["IQU_COV_MATRIX"].data[2, 2])], + [QN, UN, QN_ERR, UN_ERR], + ): + nflux[stkI > 0.0] = sflux[stkI > 0.0] / stkI[stkI > 0.0] + Stokesconf = PCconf(QN, UN, QN_ERR, UN_ERR) + Stokesmask = Stokes["DATA_MASK"].data.astype(bool) + Stokessnr = np.zeros(Stokesmask.shape) + Stokessnr[Stokes["POL_DEG_ERR"].data > 0.0] = ( + Stokes["POL_DEG_DEBIASED"].data[Stokes["POL_DEG_ERR"].data > 0.0] / Stokes["POL_DEG_ERR"].data[Stokes["POL_DEG_ERR"].data > 0.0] + ) + + Stokescentconf, Stokescenter = CenterConf(Stokesconf > 0.99, Stokes["POL_ANG"].data, Stokes["POL_ANG_ERR"].data) + Stokespos = WCS(Stokes[0].header).pixel_to_world(*Stokescenter) + + if target is None: + target = Stokes[0].header["TARGNAME"] + + fig = figure(figsize=(8, 8.5), layout="constrained") + fig, ax = polarization_map(Stokes, P_cut=0.99, step_vec=1, scale_vec=3, display="pf", fig=fig, width=0.33, linewidth=0.5) + + ax.plot(*Stokescenter, marker="+", color="k", lw=3) + ax.plot(*Stokescenter, marker="+", color="gray", lw=1.5, label="Best confidence for center: {0}".format(Stokespos.to_string("hmsdms"))) + ax.contour(Stokescentconf, [0.01], colors="k", linewidths=3) + confcentcont = ax.contour(Stokescentconf, [0.01], colors="gray") + # confcont = ax.contour(Stokesconf, [0.9905], colors="r") + # snr3cont = ax.contour(Stokessnr, [3.0], colors="b", linestyles="dashed") + # snr4cont = ax.contour(Stokessnr, [4.0], colors="b") + handles, labels = ax.get_legend_handles_labels() + labels.append(r"Center $Conf_{99\%}$ contour") + handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=confcentcont.get_edgecolor()[0])) + # labels.append(r"Polarization $Conf_{99\%}$ contour") + # handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=confcont.get_edgecolor()[0])) + # labels.append(r"$SNR_P \geq$ 3 contour") + # handles.append(Rectangle((0, 0), 1, 1, fill=False, ls="--", ec=snr3cont.get_edgecolor()[0])) + # labels.append(r"$SNR_P \geq$ 4 contour") + # handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=snr4cont.get_edgecolor()[0])) + ax.legend(handles=handles, labels=labels, bbox_to_anchor=(0.0, -0.02, 1.0, 0.01), loc="upper left", mode="expand", borderaxespad=0.0) + + if output_dir is not None: + filename = pathjoin(output_dir, "%s_center.pdf" % target) + fig.savefig(filename, dpi=300, facecolor="None") + output.append(filename) + show() + return output + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description="Look for the center of emission for a given reduced observation") + parser.add_argument("-t", "--target", metavar="targetname", required=False, help="the name of the target", type=str, default=None) + parser.add_argument("-f", "--file", metavar="path", required=False, help="The full or relative path to the data product", type=str, default=None) + parser.add_argument("-o", "--output_dir", metavar="directory_path", required=False, help="output directory path for the plots", type=str, default="./data") + args = parser.parse_args() + exitcode = main(infile=args.file, target=args.target, output_dir=args.output_dir) + print("Written to: ", exitcode) diff --git a/package/src/get_cdelt.py b/package/src/get_cdelt.py index b7054c6..7c06713 100755 --- a/package/src/get_cdelt.py +++ b/package/src/get_cdelt.py @@ -1,4 +1,9 @@ -#!/usr/bin/python +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) def main(infiles=None): @@ -13,7 +18,7 @@ def main(infiles=None): from numpy.linalg import eig if infiles is None: - print('Usage: "python get_cdelt.py -f infiles"') + print('Usage: "env python3 get_cdelt.py -f infiles"') return 1 prod = [["/".join(filepath.split("/")[:-1]), filepath.split("/")[-1]] for filepath in infiles] data_folder = prod[0][0] diff --git a/package/src/overplot_IC5063.py b/package/src/overplot_IC5063.py new file mode 100755 index 0000000..bea2fbe --- /dev/null +++ b/package/src/overplot_IC5063.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) + +import numpy as np +from astropy.io import fits +from lib.plots import overplot_pol, overplot_radio +from matplotlib.colors import LogNorm + +Stokes_UV = fits.open("./data/IC5063/5918/IC5063_FOC_b0.10arcsec_c0.20arcsec.fits") +# Stokes_18GHz = fits.open("./data/IC5063/radio/IC5063_18GHz.fits") +# Stokes_24GHz = fits.open("./data/IC5063/radio/IC5063_24GHz.fits") +# Stokes_103GHz = fits.open("./data/IC5063/radio/IC5063_103GHz.fits") +# Stokes_229GHz = fits.open("./data/IC5063/radio/IC5063_229GHz.fits") +# Stokes_357GHz = fits.open("./data/IC5063/radio/IC5063_357GHz.fits") +# Stokes_S2 = fits.open("./data/IC5063/POLARIZATION_COMPARISON/S2_rot_crop.fits") +Stokes_IR = fits.open("./data/IC5063/IR/u2e65g01t_c0f_rot.fits") + +# levelsMorganti = np.array([1.,2.,3.,8.,16.,32.,64.,128.]) +# levelsMorganti = np.logspace(-0.1249, 1.97, 7) / 100.0 + +# levels18GHz = levelsMorganti * Stokes_18GHz[0].data.max() +# A = overplot_radio(Stokes_UV, Stokes_18GHz) +# A.plot(levels=levels18GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/18GHz_overplot.pdf", scale_vec=None) + +# levels24GHz = levelsMorganti * Stokes_24GHz[0].data.max() +# B = overplot_radio(Stokes_UV, Stokes_24GHz) +# B.plot(levels=levels24GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/24GHz_overplot.pdf", scale_vec=None) + +# levels103GHz = levelsMorganti * Stokes_103GHz[0].data.max() +# C = overplot_radio(Stokes_UV, Stokes_103GHz) +# C.plot(levels=levels103GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/103GHz_overplot.pdf", scale_vec=None) + +# levels229GHz = levelsMorganti * Stokes_229GHz[0].data.max() +# D = overplot_radio(Stokes_UV, Stokes_229GHz) +# D.plot(levels=levels229GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/229GHz_overplot.pdf", scale_vec=None) + +# levels357GHz = levelsMorganti * Stokes_357GHz[0].data.max() +# E = overplot_radio(Stokes_UV, Stokes_357GHz) +# E.plot(levels=levels357GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/357GHz_overplot.pdf", scale_vec=None) + +# F = overplot_pol(Stokes_UV, Stokes_S2) +# F.plot(P_cut=3.0, SNRi_cut=80.0, savename='./plots/IC5063/S2_overplot.pdf', norm=LogNorm(vmin=5e-20,vmax=5e-18)) + +G = overplot_pol(Stokes_UV, Stokes_IR, cmap="inferno") +G.plot( + P_cut=0.99, + SNRi_cut=1.0, + savename="./plots/IC5063/IR_overplot.pdf", + scale_vec=None, + norm=LogNorm(Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"] / 1e3, Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"]), + cmap="inferno", +) diff --git a/package/src/overplot_MRK463E.py b/package/src/overplot_MRK463E.py new file mode 100755 index 0000000..505e39d --- /dev/null +++ b/package/src/overplot_MRK463E.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) + +import numpy as np +from astropy.io import fits +from lib.plots import overplot_chandra, overplot_pol +from matplotlib.colors import LogNorm + +Stokes_UV = fits.open("./data/MRK463E/5960/MRK463E_FOC_b0.05arcsec_c0.10arcsec.fits") +# Stokes_IR = fits.open("./data/MRK463E/WFPC2/IR_rot_crop.fits") +# Stokes_Xr = fits.open("./data/MRK463E/Chandra/X_ray_crop.fits") +Radio = fits.open("./data/MRK463E/EMERLIN/Voorwerpjes_1356+1822_1356+1822_uniform-image.fits") + +# levels = np.geomspace(1.0, 99.0, 7) + +# A = overplot_chandra(Stokes_UV, Stokes_Xr, norm=LogNorm()) +# A.plot(levels=levels, P_cut=0.99, SNRi_cut=1.0, scale_vec=5, zoom=1, savename="./plots/MRK463E/Chandra_overplot.pdf") +# A.write_to(path1="./data/MRK463E/FOC_data_Chandra.fits", path2="./data/MRK463E/Chandra_data.fits", suffix="aligned") + +# levels = np.array([0.8, 2, 5, 10, 20, 50]) / 100.0 * Stokes_UV[0].header["photflam"] +# B = overplot_pol(Stokes_UV, Stokes_IR, norm=LogNorm()) +# B.plot(levels=levels, P_cut=0.99, SNRi_cut=1.0, scale_vec=5, norm=LogNorm(8.5e-18, 2.5e-15), savename="./plots/MRK463E/IR_overplot.pdf") +# B.write_to(path1="./data/MRK463E/FOC_data_WFPC.fits", path2="./data/MRK463E/WFPC_data.fits", suffix="aligned") + +# levels = np.array([0.8, 2, 5, 10, 20, 50]) / 100.0 * Stokes_UV[0].header["photflam"] +levels = np.array([5, 10, 20, 50]) +C = overplot_pol(Stokes_UV, Radio, norm=LogNorm()) +C.other_im.set(norm=LogNorm(1e-4, 2e-2)) +C.plot( + levels=levels, + P_cut=0.99, + SNRi_cut=1.0, + step_vec=0, + scale_vec=3, + norm=LogNorm(1e-4, 2e-2), + cmap="inferno_r", + width=0.5, + linewidth=0.5, + disptype="snri", + savename="./plots/MRK463E/EMERLIN_snri_overplot.pdf", +) +C.write_to(path1="./data/MRK463E/FOC_data_EMERLIN.fits", path2="./data/MRK463E/EMERLIN_data.fits", suffix="aligned") diff --git a/package/src/overplot_NGC1068.py b/package/src/overplot_NGC1068.py new file mode 100755 index 0000000..8642458 --- /dev/null +++ b/package/src/overplot_NGC1068.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +from pathlib import Path +from sys import path as syspath + +syspath.append(str(Path(__file__).parent.parent)) + +import numpy as np +from astropy.io import fits +from lib.plots import overplot_pol, plt +from matplotlib.colors import LogNorm + +Stokes_UV = fits.open("./data/NGC1068/5144/NGC1068_FOC_b0.05arcsec_c0.07arcsec_crop.fits") +Radio = fits.open("./data/NGC1068/MERLIN-VLA/Combined_crop.fits") + +levels = np.logspace(-0.5, 1.99, 7) / 100.0 * Stokes_UV[0].data.max() * Stokes_UV[0].header["photflam"] +A = overplot_pol(Stokes_UV, Radio, norm=LogNorm()) +A.plot( + levels=levels, + P_cut=0.99, + SNRi_cut=1.0, + scale_vec=3, + step_vec=1, + norm=LogNorm(5e-5, 1e-1), + cmap="inferno_r", + width=0.8, + linewidth=1.2, +) +A.add_vector( + A.other_wcs.celestial.wcs.crpix - (1.0, 1.0), + pol_deg=0.124, + pol_ang=100.7, + width=2.0, + linewidth=1.0, + scale=1.0 / (A.px_scale * 6.0), + edgecolor="w", + color="b", + label=r"IXPE torus: P = $12.4 (\pm 3.6)$%, PA = $100.7 (\pm 8.3)$°", +) +A.fig_overplot.savefig("./plots/NGC1068/NGC1068_radio_overplot_full.pdf", dpi=300, bbox_inches="tight") +plt.show() +A.write_to(path1="./data/NGC1068/FOC_Radio.fits", path2="./data/NGC1068/Radio_FOC.fits", suffix="aligned") diff --git a/package/src/readfos.py b/package/src/readfos.py new file mode 100755 index 0000000..8a32cd1 --- /dev/null +++ b/package/src/readfos.py @@ -0,0 +1,426 @@ +#!/usr/bin/env python3 +import matplotlib.pyplot as plt +import numpy as np +from astropy.io.fits import getheader, getdata, hdu +from os.path import join as join_path, exists as path_exists +from os import system +from copy import deepcopy + +# consecutive spectra are made up of the summ of all previous ACCUMs, so the S/N increases along sequence +# _c0f.fits - calibrated vacuum wavelength +# _c1f.fits - calibrated fluxes (ergs sec^-1 cm^-2 Angs^-1) +# _c2f.fits - statistical errors (no sky, bkg subtraction, flatfield or sensitivity error) +# _c3f.fits - for SPECTROPOLARIMETRY mode contains I, Q, U, V, linear and circular polarization and polarization position angle +# _c4f.fits - object+sky count rate spectrum (corrected for overscanning, noise rejection, lost signal) +# _c5f.fits - flat-fielded object count rate spectrum (corrected for paired pulses, detector background, flatfield structure, GIM effects) +# _c6f.fits - flat-fielded sky count rate spectrum (corrected for paired pulses, detector background, flatfield structure, GIM effects) +# _c7f.fits - background count rate spectrum (scaled background subtracted from c4 products) +# _c8f.fits - flat-fielded and sky-subtracted object count rate spectrum + + +def princ_angle(ang): + """ + Return the principal angle in the 0° to 180° quadrant as PA is always + defined at p/m 180°. + ---------- + Inputs: + ang : float, numpy.ndarray + Angle in degrees. Can be an array of angles. + ---------- + Returns: + princ_ang : float, numpy.ndarray + Principal angle in the 0°-180° quadrant in the same shape as input. + """ + if not isinstance(ang, np.ndarray): + A = np.array([ang]) + else: + A = np.array(ang) + while np.any(A < 0.0): + A[A < 0.0] = A[A < 0.0] + 360.0 + while np.any(A >= 180.0): + A[A >= 180.0] = A[A >= 180.0] - 180.0 + if type(ang) is type(A): + return A + else: + return A[0] + + +class specpol(object): + """ + Class object for studying spectropolarimetry. + """ + + def __init__(self, other=None): + if isinstance(other, specpol): + # Copy constructor + self.wav = deepcopy(other.wav) + self.wav_err = deepcopy(other.wav_err) + self.I = deepcopy(other.I) + self.Q = deepcopy(other.Q) + self.U = deepcopy(other.U) + self.V = deepcopy(other.V) + self.IQU_cov = deepcopy(other.IQU_cov) + else: + # Initialise to zero + if isinstance(other, int): + self.zero(other) + else: + self.zero() + + @classmethod + def zero(self, n=1): + """ + Set all values to zero. + """ + self.wav = np.zeros(n) + self.wav_err = np.zeros((n, 2)) + self.I = np.zeros(n) + self.Q = np.zeros(n) + self.U = np.zeros(n) + self.V = np.zeros(n) + self.IQU_cov = np.zeros((4, 4, n)) + + @property + def I_err(self): + return np.sqrt(self.IQU_cov[0][0]) + + @property + def Q_err(self): + return np.sqrt(self.IQU_cov[1][1]) + + @property + def U_err(self): + return np.sqrt(self.IQU_cov[2][2]) + + @property + def V_err(self): + return np.sqrt(self.IQU_cov[3][3]) + + @property + def QN(self): + np.seterr(divide="ignore", invalid="ignore") + return self.Q / np.where(self.I > 0, self.I, np.nan) + + @property + def QN_err(self): + np.seterr(divide="ignore", invalid="ignore") + return self.Q_err / np.where(self.I > 0, self.I, np.nan) + + @property + def UN(self): + np.seterr(divide="ignore", invalid="ignore") + return self.U / np.where(self.I > 0, self.I, np.nan) + + @property + def UN_err(self): + np.seterr(divide="ignore", invalid="ignore") + return self.U_err / np.where(self.I > 0, self.I, np.nan) + + @property + def VN(self): + np.seterr(divide="ignore", invalid="ignore") + return self.V / np.where(self.I > 0, self.I, np.nan) + + @property + def VN_err(self): + np.seterr(divide="ignore", invalid="ignore") + return self.V_err / np.where(self.I > 0, self.I, np.nan) + + @property + def PF(self): + np.seterr(divide="ignore", invalid="ignore") + return np.sqrt(self.Q**2 + self.U**2 + self.V**2) + + @property + def PF_err(self): + np.seterr(divide="ignore", invalid="ignore") + return np.sqrt(self.Q**2 * self.Q_err**2 + self.U**2 * self.U_err**2 + self.V**2 * self.V_err**2) / np.where(self.PF > 0, self.PF, np.nan) + + @property + def P(self): + np.seterr(divide="ignore", invalid="ignore") + return self.PF / np.where(self.I > 0, self.I, np.nan) + + @property + def P_err(self): + np.seterr(divide="ignore", invalid="ignore") + return np.where(self.I > 0, np.sqrt(self.PF_err**2 + (self.PF / self.I) ** 2 * self.I_err**2) / self.I, np.nan) + + @property + def PA(self): + return princ_angle((90.0 / np.pi) * np.arctan2(self.U, self.Q)) + + @property + def PA_err(self): + return princ_angle((90.0 / np.pi) * np.sqrt(self.U**2 * self.Q_err**2 + self.Q**2 * self.U_err**2) / np.where(self.PF > 0, self.PF**2, np.nan)) + + def rotate(self, angle): + alpha = np.pi / 180.0 * angle + mrot = np.array( + [ + [1.0, 0.0, 0.0, 0.0], + [0.0, np.cos(2.0 * alpha), np.sin(2.0 * alpha), 0.0], + [0.0, -np.sin(2.0 * alpha), np.cos(2.0 * alpha), 0.0], + [0.0, 0.0, 0.0, 1.0], + ] + ) + self.I, self.Q, self.U, self.V = np.dot(mrot, np.array([self.I, self.Q, self.U, self.V])) + self.IQU_cov = np.dot(mrot, np.dot(self.IQU_cov.T, mrot.T).T) + + def bin_size(self, size): + """ + Rebin spectra to selected bin size in Angstrom. + """ + bin_edges = np.arange(np.floor(self.wav.min()), np.ceil(self.wav.max()), size) + in_bin = np.digitize(self.wav, bin_edges) - 1 + spec_b = specpol(bin_edges.shape[0] - 1) + for i in range(bin_edges.shape[0] - 1): + spec_b.wav[i] = np.mean(self.wav[in_bin == i]) + spec_b.wav_err[i] = (spec_b.wav[i] - bin_edges[i], bin_edges[i + 1] - spec_b.wav[i]) + + spec_b.I[i] = np.sum(self.I[in_bin == i]) + spec_b.Q[i] = np.sum(self.Q[in_bin == i]) + spec_b.U[i] = np.sum(self.U[in_bin == i]) + spec_b.V[i] = np.sum(self.V[in_bin == i]) + for m in range(4): + spec_b.IQU_cov[m][m][i] = np.sum(self.IQU_cov[m][m][in_bin == i]) + for n in [k for k in range(4) if k != m]: + spec_b.IQU_cov[m][n][i] = np.sqrt(np.sum(self.IQU_cov[m][n][in_bin == i] ** 2)) + return spec_b + + def dump_txt(self, filename, output_dir=""): + """ + Dump current spectra to a text file. + """ + data_dump = np.array([self.wav, self.I, self.Q, self.U, self.V, self.P, self.PA]).T + np.savetxt(join_path(output_dir, filename + ".txt"), data_dump) + return join_path(output_dir, filename) + + def plot(self, fig=None, ax=None, savename=None, plots_folder=""): + """ + Display current spectra. + """ + if fig is None: + if ax is None: + self.fig, self.ax = plt.subplots(1, 2, sharex=True, figsize=(20, 5), layout="constrained") + else: + self.ax = ax + else: + if ax is None: + self.fig = fig + self.ax = self.fig.add_subplot(111) + else: + self.fig = fig + self.ax = ax + if isinstance(ax, np.ndarray): + ax1, ax2 = self.ax[:2] + else: + ax1 = self.ax + + # Display flux and polarized flux on first ax + ax1.set_xlabel(r"Wavelength [$\AA$]") + ax1.errorbar(self.wav, self.I, xerr=self.wav_err.T, yerr=self.I_err, color="k", fmt=".", label="I") + ax1.errorbar(self.wav, self.PF, xerr=self.wav_err.T, yerr=self.PF_err, color="b", fmt=".", label="PF") + ax1.set_ylabel(r"F$_\lambda$ [erg s$^{-1}$ cm$^{-2} \AA^{-1}$]") + ax1.legend(ncols=2, loc=1) + + if isinstance(ax, np.ndarray): + # When given 2 axes, display P and PA on second + ax2.set_xlabel(r"Wavelength [$\AA$]") + ax2.errorbar(self.wav, self.P, xerr=self.wav_err.T, yerr=self.P_err, color="b", fmt=".", label="P") + ax2.set_ylim([0.0, 1.0]) + ax2.set_ylabel(r"P", color="b") + ax2.tick_params(axis="y", color="b", labelcolor="b") + ax22 = ax2.twinx() + ax22.errorbar(self.wav, self.PA, xerr=self.wav_err.T, yerr=self.PA_err, color="r", fmt=".", label="PA [°]") + ax22.set_ylim([0.0, 180.0]) + ax22.set_ylabel(r"PA", color="r") + ax22.tick_params(axis="y", color="r", labelcolor="r") + h2, l2 = ax2.get_legend_handles_labels() + h22, l22 = ax22.get_legend_handles_labels() + ax2.legend(h2 + h22, l2 + l22, ncols=2, loc=1) + + if hasattr(self, "fig") and savename is not None: + self.fig.savefig(join_path(plots_folder, savename + ".pdf"), dpi=300, bbox_inches="tight") + return self.fig, self.ax, join_path(plots_folder, savename + ".pdf") + elif hasattr(self, "fig"): + return self.fig, self.ax + else: + return self.ax + + +class FOSspecpol(specpol): + """ + Class object for studying FOS SPECTROPOLARYMETRY mode spectra. + """ + + def __init__(self, stokes, data_folder=""): + """ + Initialise object from fits filename or hdulist. + """ + if isinstance(stokes, str): + self.file_root = stokes.split("_")[0] + self.hd = getheader(join_path(data_folder, self.file_root + "_c0f.fits")) + wav = getdata(join_path(data_folder, self.file_root + "_c0f.fits")) + stokes = getdata(join_path(data_folder, self.file_root + "_c3f.fits")) + elif isinstance(stokes, hdu.hdulist.HDUList): + self.hd = stokes.header + self.file_root = self.hd["FILENAME"].split("_")[0] + wav = getdata(join_path(data_folder, self.file_root + "_c0f")) + stokes = stokes.data + else: + raise ValueError("Input must be a path to a fits file or an HDUlist") + + self.wav = np.concat((wav[0:2, :], wav[0].reshape(1, wav.shape[1]), wav[0].reshape(1, wav.shape[1])), axis=0) + self.wav_err = np.zeros((self.wav.shape[0], self.wav.shape[1], 2)) + + self.IQU_cov = np.zeros((4, 4, self.wav.shape[0], self.wav.shape[1])) + + self.I = stokes[0::14] + self.IQU_cov[0, 0] = stokes[4::14] ** 2 + self.Q = stokes[1::14] + self.IQU_cov[1, 1] = stokes[5::14] ** 2 + self.U = stokes[2::14] + self.IQU_cov[2, 2] = stokes[6::14] ** 2 + self.V = stokes[3::14] + self.IQU_cov[3, 3] = stokes[7::14] ** 2 + + self.subspec = {} + for i, name in enumerate(["PASS1", "PASS2", "PASS12", "PASS12corr"]): + spec = specpol(self.wav[i].shape[0]) + spec.wav, spec.wav_err, spec.I, spec.Q, spec.U, spec.V = self.wav[i], self.wav_err[i], self.I[i], self.Q[i], self.U[i], self.V[i] + spec.IQU_cov = self.IQU_cov[:, :, i, :] + spec.rotate((i != 3) * self.hd["PA_APER"]) + self.subspec[name] = spec + + self.P_fos = stokes[8::14] + self.P_fos_err = stokes[11::14] + self.PC_fos = stokes[9::14] + self.PC_fos_err = stokes[12::14] + self.PA_fos = princ_angle( + np.degrees(stokes[10::14]) + np.concat((np.ones((3, stokes.shape[1])), np.zeros((1, stokes.shape[1]))), axis=0) * self.hd["PA_APER"] + ) + self.PA_fos_err = princ_angle(np.degrees(stokes[13::14])) + + def __del__(self): + if hasattr(self, "ax"): + del self.ax + if hasattr(self, "fig"): + del self.fig + + def dump_txt(self, filename, spec_list=None, output_dir=""): + """ + Dump current spectra to a text file. + """ + outfiles = [] + if spec_list is None: + spec_list = self.subspec + for i, name in enumerate(["PASS1", "PASS2", "PASS12", "PASS12corr"]): + outfiles.append(spec_list[name].dump_txt(filename="_".join([filename, name]), output_dir=output_dir)) + return outfiles + + def plot(self, spec_list=None, savename=None, plots_folder="", fos=False): + """ + Display current spectra in single figure. + """ + outfiles = [] + if hasattr(self, "ax"): + del self.ax + if hasattr(self, "fig"): + del self.fig + if spec_list is None: + spec_list = self.subspec + self.fig, self.ax = plt.subplots(4, 2, sharex=True, sharey="col", figsize=(20, 10), layout="constrained") + for i, (name, title) in enumerate( + [("PASS1", "Pass Direction 1"), ("PASS2", "Pass Direction 2"), ("PASS12", "Pass Direction 1&2"), ("PASS12corr", "Pass Direction 1&2 corrected")] + ): + self.ax[i][0].set_title(title) + if fos: + self.ax[i][0] = spec_list[name].plot(ax=self.ax[i][0]) + self.ax[i][1].set_xlabel(r"Wavelength [$\AA$]") + self.ax[i][1].errorbar(self.wav[i], self.P_fos[i], xerr=self.wav_err[i].T, yerr=self.P_fos_err[i], color="b", fmt=".", label="P_fos") + self.ax[i][1].set_ylim([0.0, 1.0]) + self.ax[i][1].set_ylabel(r"P", color="b") + self.ax[i][1].tick_params(axis="y", color="b", labelcolor="b") + ax22 = self.ax[i][1].twinx() + ax22.errorbar(self.wav[i], self.PA_fos[i], xerr=self.wav_err[i].T, yerr=self.PA_fos_err[i], color="r", fmt=".", label="PA_fos [°]") + ax22.set_ylim([0.0, 180.0]) + ax22.set_ylabel(r"PA", color="r") + ax22.tick_params(axis="y", color="r", labelcolor="r") + h2, l2 = self.ax[i][1].get_legend_handles_labels() + h22, l22 = ax22.get_legend_handles_labels() + self.ax[i][1].legend(h2 + h22, l2 + l22, ncols=2, loc=1) + else: + self.ax[i] = spec_list[name].plot(ax=self.ax[i]) + self.ax[0][0].set_ylim(ymin=0.0) + + self.fig.suptitle("_".join([self.hd["TARGNAME"], str(self.hd["PROPOSID"]), self.hd["FILENAME"], self.hd["APER_ID"]])) + if savename is not None: + self.fig.savefig(join_path(plots_folder, savename + ".pdf"), dpi=300, bbox_inches="tight") + outfiles.append(join_path(plots_folder, savename + ".pdf")) + return outfiles + + def bin_size(self, size): + """ + Rebin spectra to selected bin size in Angstrom. + """ + key = "{0:.2f}bin".format(size) + if key not in self.subspec.keys(): + self.subspec[key] = dict([]) + for name in ["PASS1", "PASS2", "PASS12", "PASS12corr"]: + self.subspec[key][name] = self.subspec[name].bin_size(size) + return self.subspec[key] + + +def main(infiles, bin_size=None, output_dir=None): + outfiles = [] + if infiles is not None: + prod = np.array([["/".join(filepath.split("/")[:-1]), filepath.split("/")[-1]] for filepath in infiles], dtype=str) + obs_dir = "/".join(infiles[0].split("/")[:-1]) + if not path_exists(obs_dir): + system("mkdir -p {0:s} {1:s}".format(obs_dir, obs_dir.replace("data", "plots"))) + else: + print("Must input files to process.") + return 1 + data_folder = prod[0][0] + if output_dir is None: + output_dir = data_folder + try: + plots_folder = data_folder.replace("data", "plots") + except ValueError: + plots_folder = "." + if not path_exists(plots_folder): + system("mkdir -p {0:s} ".format(plots_folder)) + infiles = [p[1] for p in prod] + + roots = np.unique([file.split("_")[0] for file in infiles]) + for file_root in roots: + print(file_root) + spec = FOSspecpol(file_root, data_folder) + filename = "_".join([spec.hd["TARGNAME"], "FOS", str(spec.hd["PROPOSID"]), spec.file_root, spec.hd["APER_ID"]]) + if bin_size is not None: + key = "{0:.2f}bin".format(bin_size) + spec.bin_size(bin_size) + outfiles += spec.dump_txt("_".join([filename, key]), spec_list=spec.subspec[key], output_dir=output_dir) + outfiles += spec.plot(savename="_".join([filename, key]), spec_list=spec.subspec[key], plots_folder=plots_folder) + else: + outfiles += spec.dump_txt(filename, output_dir=output_dir) + outfiles += spec.plot(savename=filename, plots_folder=plots_folder) + del spec + plt.show() + + return outfiles + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description="Display and dump FOS Spectropolarimetry") + parser.add_argument("-f", "--files", metavar="path", required=False, nargs="*", help="the full or relative path to the data products", default=None) + parser.add_argument("-b", "--bin", metavar="bin_size", required=False, help="The bin size to resample spectra", type=float, default=None) + parser.add_argument( + "-o", "--output_dir", metavar="directory_path", required=False, help="output directory path for the data products", type=str, default=None + ) + args = parser.parse_args() + exitcode = main(infiles=args.files, bin_size=args.bin, output_dir=args.output_dir) + print("Written to: ", exitcode) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0f2c0ad --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +# Requirements for FOC_Reduction + +numpy +scipy +astropy +astroquery # To directly query the Mast archives +matplotlib >= 3.8 # Brought some depreciation on ContourSet