From 62aef1b1c4de1d6e6289bfd5a5ca1f3538b27bac Mon Sep 17 00:00:00 2001 From: sugar_jo <140659696+sugar-joh@users.noreply.github.com> Date: Mon, 15 Jul 2024 19:39:21 +0800 Subject: [PATCH] add optimal_binning to plotting --- package/FOC_reduction.py | 377 +++++++++++++++++++++++--------------- package/lib/background.py | 6 +- package/lib/plots.py | 105 ++++++----- package/lib/reduction.py | 4 +- 4 files changed, 291 insertions(+), 201 deletions(-) diff --git a/package/FOC_reduction.py b/package/FOC_reduction.py index 177fb06..ea0c60e 100755 --- a/package/FOC_reduction.py +++ b/package/FOC_reduction.py @@ -5,18 +5,24 @@ Main script where are progressively added the steps for the FOC pipeline reducti """ # Project libraries -import numpy as np + from copy import deepcopy +import os from os import system from os.path import exists as path_exists + +from matplotlib.colors import LogNorm +import numpy as np + +from lib.background import subtract_bkg import lib.fits as proj_fits # Functions to handle fits files import lib.reduction as proj_red # Functions used in reduction pipeline import lib.plots as proj_plots # Functions for plotting data from lib.utils import sci_not, princ_angle -from matplotlib.colors import LogNorm -def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=False, interactive=False): + +def main(target=None, proposal_id=None, data_dir=None, infiles=None, output_dir="./data", crop=False, interactive=False): # Reduction parameters # Deconvolution deconvolve = False @@ -36,7 +42,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= # Background estimation error_sub_type = 'freedman-diaconis' # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51)) subtract_error = 0.01 - display_bkg = True + display_bkg = False # Data binning rebin = True @@ -46,7 +52,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= # Alignement align_center = 'center' # If None will not align the images - display_align = True + display_align = False display_data = False # Transmittance correction @@ -75,39 +81,45 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= # 3. Use the same alignment as the routine # 4. Skip the rebinning step # 5. Calulate the Stokes parameters without smoothing - # - optimal_binning = False + optimal_binning = True optimize = False - options = {'optimize': optimize, 'optimal_binning': optimal_binning} # Pipeline start # Step 1: # Get data from fits files and translate to flux in erg/cm²/s/Angstrom. - if infiles is not None: - prod = np.array([["/".join(filepath.split('/')[:-1]), filepath.split('/')[-1]] for filepath in infiles], dtype=str) - obs_dir = "/".join(infiles[0].split("/")[:-1]) - if not path_exists(obs_dir): - system("mkdir -p {0:s} {1:s}".format(obs_dir, obs_dir.replace("data", "plots"))) - if target is None: - target = input("Target name:\n>") + + if data_dir is None: + if infiles is not None: + prod = np.array([["/".join(filepath.split('/')[:-1]), filepath.split('/')[-1]] for filepath in infiles], dtype=str) + obs_dir = "/".join(infiles[0].split("/")[:-1]) + if not path_exists(obs_dir): + system("mkdir -p {0:s} {1:s}".format(obs_dir, obs_dir.replace("data", "plots"))) + if target is None: + target = input("Target name:\n>") + else: + from lib.query import retrieve_products + target, products = retrieve_products(target, proposal_id, output_dir=output_dir) + prod = products.pop() + for prods in products: + main(target=target, infiles=["/".join(pr) for pr in prods], output_dir=output_dir, crop=crop, interactive=interactive) + data_folder = prod[0][0] + + infiles = [p[1] for p in prod] + data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True) + else: - from lib.query import retrieve_products - target, products = retrieve_products(target, proposal_id, output_dir=output_dir) - prod = products.pop() - for prods in products: - main(target=target, infiles=["/".join(pr) for pr in prods], output_dir=output_dir, crop=crop, interactive=interactive) - data_folder = prod[0][0] + infiles = [f for f in os.listdir(data_dir) if f.endswith('.fits') and f.startswith('x')] + data_folder = data_dir + if target is None: + target = input("Target name:\n>") + + data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True) try: plots_folder = data_folder.replace("data", "plots") except ValueError: plots_folder = "." if not path_exists(plots_folder): system("mkdir -p {0:s} ".format(plots_folder)) - infiles = [p[1] for p in prod] - data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True) - - if optimal_binning: - _data_array, _headers = deepcopy(data_array), deepcopy(headers) figname = "_".join([target, "FOC"]) figtype = "" @@ -123,137 +135,207 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop= figtype += "_deconv" if align_center is None: figtype += "_not_aligned" - - # Crop data to remove outside blank margins. - data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., - inside=True, display=display_crop, savename=figname, plots_folder=plots_folder) - data_mask = np.ones(data_array[0].shape, dtype=bool) if optimal_binning: + options = {'optimize': optimize, 'optimal_binning': True} + + # Step 1: Load the data again and preserve the full images + _data_array, _headers = deepcopy(data_array), deepcopy(headers) # Preserve full images _data_mask = np.ones(_data_array[0].shape, dtype=bool) + + # Step 2: Skip the cropping step but use the same error and background estimation (I don't understand why this is wrong) + data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., inside=True, + display=display_crop, savename=figname, plots_folder=plots_folder) + data_mask = np.ones(data_array[0].shape, dtype=bool) + + background = None + _, _, _, background, error_bkg = proj_red.get_error(data_array, headers, error_array, data_mask=data_mask, sub_type=error_sub_type, subtract_error=subtract_error, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True) + + # _background is the same as background, but for the optimal binning + _background = None + _, _error_array, _, _, _ = proj_red.get_error(_data_array, _headers, error_array=None, data_mask=_data_mask, sub_type=error_sub_type, subtract_error=False, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True) + _error_bkg = np.ones_like(_data_array) * error_bkg[:, 0, 0, np.newaxis, np.newaxis] + _data_array, _error_array, _background, _ = subtract_bkg(_data_array, _error_array, _data_mask, background, _error_bkg) - # Deconvolve data using Richardson-Lucy iterative algorithm with a gaussian PSF of given FWHM. - if deconvolve: - data_array = proj_red.deconvolve_array(data_array, headers, psf=psf, FWHM=psf_FWHM, scale=psf_scale, shape=psf_shape, iterations=iterations, algo=algo) - - # Estimate error from data background, estimated from sub-image of desired sub_shape. - background = None - data_array, error_array, headers, background = proj_red.get_error(data_array, headers, error_array, data_mask=data_mask, sub_type=error_sub_type, subtract_error=subtract_error, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True) - - # if optimal_binning: - # _data_array, _error_array, _background = proj_red.subtract_bkg(_data_array, error_array, background) # _background is the same as background, but for the optimal binning to clarify - - # Align and rescale images with oversampling. - data_array, error_array, headers, data_mask, shifts, error_shifts = proj_red.align_data( - data_array, headers, error_array=error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=True) - - # if optimal_binning: - # _data_array, _error_array, _headers, _data_mask, _shifts, _error_shifts = proj_red.align_data( - # _data_array, _headers, error_array=_error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=True) - - if display_align: + # Step 3: Align and rescale images with oversampling. (has to disable croping in align_data function) + _data_array, _error_array, _headers, _, shifts, error_shifts = proj_red.align_data(_data_array, _headers, error_array=_error_array, background=_background, + upsample_factor=10, ref_center=align_center, return_shifts=True) print("Image shifts: {} \nShifts uncertainty: {}".format(shifts, error_shifts)) - proj_plots.plot_obs(data_array, headers, savename="_".join([figname, str(align_center)]), plots_folder=plots_folder, norm=LogNorm( - vmin=data_array[data_array > 0.].min()*headers[0]['photflam'], vmax=data_array[data_array > 0.].max()*headers[0]['photflam'])) - - # Rebin data to desired pixel size. - if rebin: - data_array, error_array, headers, Dxy, data_mask = proj_red.rebin_array( - data_array, error_array, headers, pxsize=pxsize, scale=px_scale, operation=rebin_operation, data_mask=data_mask) - - # Rotate data to have North up - if rotate_data: - data_mask = np.ones(data_array.shape[1:]).astype(bool) - alpha = headers[0]['orientat'] - data_array, error_array, data_mask, headers = proj_red.rotate_data(data_array, error_array, data_mask, headers, -alpha) - - # Plot array for checking output - if display_data and px_scale.lower() not in ['full', 'integrate']: - proj_plots.plot_obs(data_array, headers, savename="_".join([figname, "rebin"]), plots_folder=plots_folder, norm=LogNorm( - vmin=data_array[data_array > 0.].min()*headers[0]['photflam'], vmax=data_array[data_array > 0.].max()*headers[0]['photflam'])) - - background = np.array([np.array(bkg).reshape(1, 1) for bkg in background]) - background_error = np.array([np.array(np.sqrt((bkg-background[np.array([h['filtnam1'] == head['filtnam1'] for h in headers], dtype=bool)].mean()) - ** 2/np.sum([h['filtnam1'] == head['filtnam1'] for h in headers]))).reshape(1, 1) for bkg, head in zip(background, headers)]) - - # Step 2: - # Compute Stokes I, Q, U with smoothed polarized images - # SMOOTHING DISCUSSION : - # FWHM of FOC have been estimated at about 0.03" across 1500-5000 Angstrom band, which is about 2 detector pixels wide - # see Jedrzejewski, R.; Nota, A.; Hack, W. J., A Comparison Between FOC and WFPC2 - # Bibcode : 1995chst.conf...10J - I_stokes, Q_stokes, U_stokes, Stokes_cov = proj_red.compute_Stokes( - data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr) - I_bkg, Q_bkg, U_bkg, S_cov_bkg = proj_red.compute_Stokes(background, background_error, np.array(True).reshape( - 1, 1), headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False) + _data_mask = np.ones(_data_array[0].shape, dtype=bool) + + # Step 4: Compute Stokes I, Q, U + _background = np.array([np.array(bkg).reshape(1, 1) for bkg in _background]) + _background_error = np.array([np.array(np.sqrt((bkg-_background[np.array([h['filtnam1'] == head['filtnam1'] for h in _headers], dtype=bool)].mean()) + ** 2/np.sum([h['filtnam1'] == head['filtnam1'] for h in _headers]))).reshape(1, 1) for bkg, head in zip(_background, _headers)]) + + _I_stokes, _Q_stokes, _U_stokes, _Stokes_cov = proj_red.compute_Stokes(_data_array, _error_array, _data_mask, _headers, + FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr) + _I_bkg, _Q_bkg, _U_bkg, _S_cov_bkg = proj_red.compute_Stokes(_background, _background_error, np.array(True).reshape(1, 1), _headers, + FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False) + + # Step 5: Compute polarimetric parameters (polarization degree and angle). + _P, _debiased_P, _s_P, _s_P_P, _PA, _s_PA, _s_PA_P = proj_red.compute_pol(_I_stokes, _Q_stokes, _U_stokes, _Stokes_cov, _headers) + _P_bkg, _debiased_P_bkg, _s_P_bkg, _s_P_P_bkg, _PA_bkg, _s_PA_bkg, _s_PA_P_bkg = proj_red.compute_pol(_I_bkg, _Q_bkg, _U_bkg, _S_cov_bkg, _headers) + + # Step 6: Save image to FITS. + figname = "_".join([figname, figtype]) if figtype != "" else figname + _Stokes_test = proj_fits.save_Stokes(_I_stokes, _Q_stokes, _U_stokes, _Stokes_cov, _P, _debiased_P, _s_P, _s_P_P, _PA, _s_PA, _s_PA_P, + _headers, _data_mask, figname, data_folder=data_folder, return_hdul=True) + + # Step 6: + _data_mask = _Stokes_test['data_mask'].data.astype(bool) + print(_data_array.shape, _data_mask.shape) + print("F_int({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(_headers[0]['photplam'], *sci_not( + _Stokes_test[0].data[_data_mask].sum()*_headers[0]['photflam'], np.sqrt(_Stokes_test[3].data[0, 0][_data_mask].sum())*_headers[0]['photflam'], 2, out=int))) + print("P_int = {0:.1f} ± {1:.1f} %".format(_headers[0]['p_int']*100., np.ceil(_headers[0]['p_int_err']*1000.)/10.)) + print("PA_int = {0:.1f} ± {1:.1f} °".format(princ_angle(_headers[0]['pa_int']), princ_angle(np.ceil(_headers[0]['pa_int_err']*10.)/10.))) + # Background values + print("F_bkg({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(_headers[0]['photplam'], *sci_not( + _I_bkg[0, 0]*_headers[0]['photflam'], np.sqrt(_S_cov_bkg[0, 0][0, 0])*_headers[0]['photflam'], 2, out=int))) + print("P_bkg = {0:.1f} ± {1:.1f} %".format(_debiased_P_bkg[0, 0]*100., np.ceil(_s_P_bkg[0, 0]*1000.)/10.)) + print("PA_bkg = {0:.1f} ± {1:.1f} °".format(princ_angle(_PA_bkg[0, 0]), princ_angle(np.ceil(_s_PA_bkg[0, 0]*10.)/10.))) + # Plot polarization map (Background is either total Flux, Polarization degree or Polarization degree error). + if px_scale.lower() not in ['full', 'integrate'] and not interactive: + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, + step_vec=step_vec, vec_scale=vec_scale, savename="_".join([figname]), plots_folder=plots_folder, **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "I"]), plots_folder=plots_folder, display='Intensity', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "P_flux"]), plots_folder=plots_folder, display='Pol_Flux', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "P"]), plots_folder=plots_folder, display='Pol_deg', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "PA"]), plots_folder=plots_folder, display='Pol_ang', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "I_err"]), plots_folder=plots_folder, display='I_err', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "P_err"]), plots_folder=plots_folder, display='Pol_deg_err', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "SNRi"]), plots_folder=plots_folder, display='SNRi', **options) + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "SNRp"]), plots_folder=plots_folder, display='SNRp', **options) + elif not interactive: + proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, + savename=figname, plots_folder=plots_folder, display='integrate', **options) + elif px_scale.lower() not in ['full', 'integrate']: + proj_plots.pol_map(_Stokes_test, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim) - # if optimal_binning: - # _I_stokes, _Q_stokes, _U_stokes, _Stokes_cov = proj_red.compute_Stokes( - # _data_array, _error_array, _data_mask, _headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr) - # _I_bkg, _Q_bkg, _U_bkg, _S_cov_bkg = proj_red.compute_Stokes(_background, background_error, np.array(True).reshape( - # 1, 1), _headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False) + else: + options = {'optimize': optimize, 'optimal_binning': False} + # Crop data to remove outside blank margins. + data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., inside=True, + display=display_crop, savename=figname, plots_folder=plots_folder) + data_mask = np.ones(data_array[0].shape, dtype=bool) - # Step 3: - # Rotate images to have North up - if rotate_stokes: - I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers = proj_red.rotate_Stokes( - I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers, SNRi_cut=None) - I_bkg, Q_bkg, U_bkg, S_cov_bkg, _, _ = proj_red.rotate_Stokes(I_bkg, Q_bkg, U_bkg, S_cov_bkg, np.array(True).reshape(1, 1), headers, SNRi_cut=None) + # Deconvolve data using Richardson-Lucy iterative algorithm with a gaussian PSF of given FWHM. + if deconvolve: + data_array = proj_red.deconvolve_array(data_array, headers, psf=psf, FWHM=psf_FWHM, scale=psf_scale, shape=psf_shape, iterations=iterations, algo=algo) - # Compute polarimetric parameters (polarization degree and angle). - P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers) - P_bkg, debiased_P_bkg, s_P_bkg, s_P_P_bkg, PA_bkg, s_PA_bkg, s_PA_P_bkg = proj_red.compute_pol(I_bkg, Q_bkg, U_bkg, S_cov_bkg, headers) + # Estimate error from data background, estimated from sub-image of desired sub_shape. + background = None + data_array, error_array, headers, background, error_bkg = proj_red.get_error(data_array, headers, error_array, data_mask=data_mask, sub_type=error_sub_type, subtract_error=subtract_error, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True) - # Step 4: - # Save image to FITS. - figname = "_".join([figname, figtype]) if figtype != "" else figname - Stokes_test = proj_fits.save_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, - headers, data_mask, figname, data_folder=data_folder, return_hdul=True) + # Align and rescale images with oversampling. + data_array, error_array, headers, data_mask, shifts, error_shifts = proj_red.align_data( + data_array, headers, error_array=error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=True) + + if display_align: + print("Image shifts: {} \nShifts uncertainty: {}".format(shifts, error_shifts)) + proj_plots.plot_obs(data_array, headers, savename="_".join([figname, str(align_center)]), plots_folder=plots_folder, norm=LogNorm( + vmin=data_array[data_array > 0.].min()*headers[0]['photflam'], vmax=data_array[data_array > 0.].max()*headers[0]['photflam'])) - # Step 5: - # crop to desired region of interest (roi) - if crop: - figname += "_crop" - stokescrop = proj_plots.crop_Stokes(deepcopy(Stokes_test), norm=LogNorm()) - stokescrop.crop() - stokescrop.write_to("/".join([data_folder, figname+".fits"])) - Stokes_test, headers = stokescrop.hdul_crop, [dataset.header for dataset in stokescrop.hdul_crop] + # Rebin data to desired pixel size. + if rebin: + data_array, error_array, headers, Dxy, data_mask = proj_red.rebin_array( + data_array, error_array, headers, pxsize=pxsize, scale=px_scale, operation=rebin_operation, data_mask=data_mask) - data_mask = Stokes_test['data_mask'].data.astype(bool) - print("F_int({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(headers[0]['photplam'], *sci_not( - Stokes_test[0].data[data_mask].sum()*headers[0]['photflam'], np.sqrt(Stokes_test[3].data[0, 0][data_mask].sum())*headers[0]['photflam'], 2, out=int))) - print("P_int = {0:.1f} ± {1:.1f} %".format(headers[0]['p_int']*100., np.ceil(headers[0]['p_int_err']*1000.)/10.)) - print("PA_int = {0:.1f} ± {1:.1f} °".format(princ_angle(headers[0]['pa_int']), princ_angle(np.ceil(headers[0]['pa_int_err']*10.)/10.))) - # Background values - print("F_bkg({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(headers[0]['photplam'], *sci_not( - I_bkg[0, 0]*headers[0]['photflam'], np.sqrt(S_cov_bkg[0, 0][0, 0])*headers[0]['photflam'], 2, out=int))) - print("P_bkg = {0:.1f} ± {1:.1f} %".format(debiased_P_bkg[0, 0]*100., np.ceil(s_P_bkg[0, 0]*1000.)/10.)) - print("PA_bkg = {0:.1f} ± {1:.1f} °".format(princ_angle(PA_bkg[0, 0]), princ_angle(np.ceil(s_PA_bkg[0, 0]*10.)/10.))) - # Plot polarization map (Background is either total Flux, Polarization degree or Polarization degree error). - if px_scale.lower() not in ['full', 'integrate'] and not interactive: - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, - step_vec=step_vec, vec_scale=vec_scale, savename="_".join([figname]), plots_folder=plots_folder, **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "I"]), plots_folder=plots_folder, display='Intensity', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "P_flux"]), plots_folder=plots_folder, display='Pol_Flux', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "P"]), plots_folder=plots_folder, display='Pol_deg', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "PA"]), plots_folder=plots_folder, display='Pol_ang', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "I_err"]), plots_folder=plots_folder, display='I_err', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "P_err"]), plots_folder=plots_folder, display='Pol_deg_err', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "SNRi"]), plots_folder=plots_folder, display='SNRi', **options) - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, - vec_scale=vec_scale, savename="_".join([figname, "SNRp"]), plots_folder=plots_folder, display='SNRp', **options) - elif not interactive: - proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, - savename=figname, plots_folder=plots_folder, display='integrate', **options) - elif px_scale.lower() not in ['full', 'integrate']: - proj_plots.pol_map(Stokes_test, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim) + # Rotate data to have North up + if rotate_data: + data_mask = np.ones(data_array.shape[1:]).astype(bool) + alpha = headers[0]['orientat'] + data_array, error_array, data_mask, headers = proj_red.rotate_data(data_array, error_array, data_mask, headers, -alpha) + + # Plot array for checking output + if display_data and px_scale.lower() not in ['full', 'integrate']: + proj_plots.plot_obs(data_array, headers, savename="_".join([figname, "rebin"]), plots_folder=plots_folder, norm=LogNorm( + vmin=data_array[data_array > 0.].min()*headers[0]['photflam'], vmax=data_array[data_array > 0.].max()*headers[0]['photflam'])) + + background = np.array([np.array(bkg).reshape(1, 1) for bkg in background]) + background_error = np.array([np.array(np.sqrt((bkg-background[np.array([h['filtnam1'] == head['filtnam1'] for h in headers], dtype=bool)].mean()) + ** 2/np.sum([h['filtnam1'] == head['filtnam1'] for h in headers]))).reshape(1, 1) for bkg, head in zip(background, headers)]) + + # Step 2: + # Compute Stokes I, Q, U with smoothed polarized images + # SMOOTHING DISCUSSION : + # FWHM of FOC have been estimated at about 0.03" across 1500-5000 Angstrom band, which is about 2 detector pixels wide + # see Jedrzejewski, R.; Nota, A.; Hack, W. J., A Comparison Between FOC and WFPC2 + # Bibcode : 1995chst.conf...10J + I_stokes, Q_stokes, U_stokes, Stokes_cov = proj_red.compute_Stokes( + data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr) + I_bkg, Q_bkg, U_bkg, S_cov_bkg = proj_red.compute_Stokes(background, background_error, np.array(True).reshape( + 1, 1), headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False) + + # Step 3: + # Rotate images to have North up + if rotate_stokes: + I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers = proj_red.rotate_Stokes( + I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers, SNRi_cut=None) + I_bkg, Q_bkg, U_bkg, S_cov_bkg, _, _ = proj_red.rotate_Stokes(I_bkg, Q_bkg, U_bkg, S_cov_bkg, np.array(True).reshape(1, 1), headers, SNRi_cut=None) + + # Compute polarimetric parameters (polarization degree and angle). + P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers) + P_bkg, debiased_P_bkg, s_P_bkg, s_P_P_bkg, PA_bkg, s_PA_bkg, s_PA_P_bkg = proj_red.compute_pol(I_bkg, Q_bkg, U_bkg, S_cov_bkg, headers) + + # Step 4: + # Save image to FITS. + figname = "_".join([figname, figtype]) if figtype != "" else figname + Stokes_test = proj_fits.save_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, + headers, data_mask, figname, data_folder=data_folder, return_hdul=True) + + # Step 5: + # crop to desired region of interest (roi) + if crop: + figname += "_crop" + stokescrop = proj_plots.crop_Stokes(deepcopy(Stokes_test), norm=LogNorm()) + stokescrop.crop() + stokescrop.write_to("/".join([data_folder, figname+".fits"])) + Stokes_test, headers = stokescrop.hdul_crop, [dataset.header for dataset in stokescrop.hdul_crop] + + data_mask = Stokes_test['data_mask'].data.astype(bool) + print("F_int({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(headers[0]['photplam'], *sci_not( + Stokes_test[0].data[data_mask].sum()*headers[0]['photflam'], np.sqrt(Stokes_test[3].data[0, 0][data_mask].sum())*headers[0]['photflam'], 2, out=int))) + print("P_int = {0:.1f} ± {1:.1f} %".format(headers[0]['p_int']*100., np.ceil(headers[0]['p_int_err']*1000.)/10.)) + print("PA_int = {0:.1f} ± {1:.1f} °".format(princ_angle(headers[0]['pa_int']), princ_angle(np.ceil(headers[0]['pa_int_err']*10.)/10.))) + # Background values + print("F_bkg({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(headers[0]['photplam'], *sci_not( + I_bkg[0, 0]*headers[0]['photflam'], np.sqrt(S_cov_bkg[0, 0][0, 0])*headers[0]['photflam'], 2, out=int))) + print("P_bkg = {0:.1f} ± {1:.1f} %".format(debiased_P_bkg[0, 0]*100., np.ceil(s_P_bkg[0, 0]*1000.)/10.)) + print("PA_bkg = {0:.1f} ± {1:.1f} °".format(princ_angle(PA_bkg[0, 0]), princ_angle(np.ceil(s_PA_bkg[0, 0]*10.)/10.))) + # Plot polarization map (Background is either total Flux, Polarization degree or Polarization degree error). + if px_scale.lower() not in ['full', 'integrate'] and not interactive: + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, + step_vec=step_vec, vec_scale=vec_scale, savename="_".join([figname]), plots_folder=plots_folder, **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "I"]), plots_folder=plots_folder, display='Intensity', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "P_flux"]), plots_folder=plots_folder, display='Pol_Flux', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "P"]), plots_folder=plots_folder, display='Pol_deg', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "PA"]), plots_folder=plots_folder, display='Pol_ang', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "I_err"]), plots_folder=plots_folder, display='I_err', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "P_err"]), plots_folder=plots_folder, display='Pol_deg_err', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "SNRi"]), plots_folder=plots_folder, display='SNRi', **options) + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec, + vec_scale=vec_scale, savename="_".join([figname, "SNRp"]), plots_folder=plots_folder, display='SNRp', **options) + elif not interactive: + proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, + savename=figname, plots_folder=plots_folder, display='integrate', **options) + elif px_scale.lower() not in ['full', 'integrate']: + proj_plots.pol_map(Stokes_test, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim) return 0 @@ -264,12 +346,13 @@ if __name__ == "__main__": parser = argparse.ArgumentParser(description='Query MAST for target products') parser.add_argument('-t', '--target', metavar='targetname', required=False, help='the name of the target', type=str, default=None) parser.add_argument('-p', '--proposal_id', metavar='proposal_id', required=False, help='the proposal id of the data products', type=int, default=None) + parser.add_argument('-d', '--data_dir', metavar='directory_path', required=False, help='directory path to the data products', type=str, default=None) parser.add_argument('-f', '--files', metavar='path', required=False, nargs='*', help='the full or relative path to the data products', default=None) parser.add_argument('-o', '--output_dir', metavar='directory_path', required=False, help='output directory path for the data products', type=str, default="./data") parser.add_argument('-c', '--crop', action='store_true', required=False, help='whether to crop the analysis region') parser.add_argument('-i', '--interactive', action='store_true', required=False, help='whether to output to the interactive analysis tool') args = parser.parse_args() - exitcode = main(target=args.target, proposal_id=args.proposal_id, infiles=args.files, + exitcode = main(target=args.target, proposal_id=args.proposal_id, data_dir=args.data_dir, infiles=args.files, output_dir=args.output_dir, crop=args.crop, interactive=args.interactive) print("Finished with ExitCode: ", exitcode) diff --git a/package/lib/background.py b/package/lib/background.py index 8b4f0a6..7d5c4bc 100755 --- a/package/lib/background.py +++ b/package/lib/background.py @@ -235,7 +235,7 @@ def bkg_fit(data, error, mask, headers, subtract_error=True, display=False, save weights = 1/chi2**2 weights /= weights.sum() - bkg = np.sum(weights*(coeff[:, 1]+np.abs(coeff[:, 2])*subtract_error)) + bkg = np.sum(weights*(coeff[:, 1]+np.abs(coeff[:, 2]) * 0.01)) # why not just use 0.01 error_bkg[i] *= bkg @@ -342,7 +342,7 @@ def bkg_hist(data, error, mask, headers, sub_type=None, subtract_error=True, dis # popt, pcov = curve_fit(gausspol, binning[-1], hist, p0=p0) popt, pcov = curve_fit(gauss, binning[-1], hist, p0=p0) coeff.append(popt) - bkg = popt[1]+np.abs(popt[2])*subtract_error + bkg = popt[1]+np.abs(popt[2]) * 0.01 # why not just use 0.01 error_bkg[i] *= bkg @@ -443,7 +443,7 @@ def bkg_mini(data, error, mask, headers, sub_shape=(15, 15), subtract_error=True # Compute error : root mean square of the background sub_image = image[minima[0]:minima[0]+sub_shape[0], minima[1]:minima[1]+sub_shape[1]] # bkg = np.std(sub_image) # Previously computed using standard deviation over the background - bkg = np.sqrt(np.sum(sub_image**2)/sub_image.size)*subtract_error if subtract_error > 0 else np.sqrt(np.sum(sub_image**2)/sub_image.size) + bkg = np.sqrt(np.sum(sub_image**2)/sub_image.size)*0.01 if subtract_error > 0 else np.sqrt(np.sum(sub_image**2)/sub_image.size) error_bkg[i] *= bkg # n_error_array[i] = np.sqrt(n_error_array[i]**2 + error_bkg[i]**2) diff --git a/package/lib/plots.py b/package/lib/plots.py index 6ae86a0..6c4dad2 100755 --- a/package/lib/plots.py +++ b/package/lib/plots.py @@ -41,8 +41,11 @@ prototypes : """ from copy import deepcopy -import numpy as np from os.path import join as path_join + +from astropy.wcs import WCS +from astropy.io import fits +from astropy.coordinates import SkyCoord import matplotlib.pyplot as plt from matplotlib.patches import Rectangle, Circle, FancyArrowPatch from matplotlib.path import Path @@ -51,49 +54,48 @@ from matplotlib.colors import LogNorm import matplotlib.font_manager as fm import matplotlib.patheffects as pe from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows -from astropy.wcs import WCS -from astropy.io import fits -from astropy.coordinates import SkyCoord +import numpy as np from scipy.ndimage import zoom as sc_zoom + try: from .utils import rot2D, princ_angle, sci_not except ImportError: from utils import rot2D, princ_angle, sci_not -def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, wcs, convert, step_vec=1, vec_scale=2., adaptive_binning=False): - def adaptive_binning(I_stokes, Q_stokes, U_stokes, Stokes_cov): - shape = I_stokes.shape - - assert shape[0] == shape[1], "Only square images are supported" - assert shape[0] % 2 == 0, "Image size must be a power of 2" - - n = int(np.log2(shape[0])) - bin_map = np.zeros(shape) - bin_num = 0 - - for level in range(n): - grid_size = 2**level - temp_I = I_stokes.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) - temp_Q = Q_stokes.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) - temp_U = U_stokes.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) - temp_cov = Stokes_cov.reshape(3, 3, int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(3).sum(4) - temp_bin_map = bin_map.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) - - temp_P = (temp_Q**2 + temp_U**2)**0.5 / temp_I - temp_P_err = (1 / temp_I) * np.sqrt((temp_Q**2 * temp_cov[1,1,:,:] + temp_U**2 * temp_cov[2,2,:,:] + 2. * temp_Q * temp_U * temp_cov[1,2,:,:]) / (temp_Q**2 + temp_U**2) + \ - ((temp_Q / temp_I)**2 + (temp_U / temp_I)**2) * temp_cov[0,0,:,:] - \ - 2. * (temp_Q / temp_I) * temp_cov[0,1,:,:] - \ - 2. * (temp_U / temp_I) * temp_cov[0,2,:,:]) - - for i in range(int(shape[0]/grid_size)): - for j in range(int(shape[1]/grid_size)): - if (temp_P[i,j] / temp_P_err[i,j] > 3) and (temp_bin_map[i,j] == 0): # the default criterion is 3 sigma in P - bin_num += 1 - bin_map[i*grid_size:(i+1)*grid_size,j*grid_size:(j+1)*grid_size] = bin_num - - return bin_map, bin_num +def adaptive_binning(I_stokes, Q_stokes, U_stokes, Stokes_cov): + shape = I_stokes.shape - if adaptive_binning: + assert shape[0] == shape[1], "Only square images are supported" + assert shape[0] % 2 == 0, "Image size must be a power of 2" + + n = int(np.log2(shape[0])) + bin_map = np.zeros(shape) + bin_num = 0 + + for level in range(n): + grid_size = 2**level + temp_I = I_stokes.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) + temp_Q = Q_stokes.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) + temp_U = U_stokes.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) + temp_cov = Stokes_cov.reshape(3, 3, int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(3).sum(4) + temp_bin_map = bin_map.reshape(int(shape[0]/grid_size), grid_size, int(shape[1]/grid_size), grid_size).sum(1).sum(2) + + temp_P = (temp_Q**2 + temp_U**2)**0.5 / temp_I + temp_P_err = (1 / temp_I) * np.sqrt((temp_Q**2 * temp_cov[1,1,:,:] + temp_U**2 * temp_cov[2,2,:,:] + 2. * temp_Q * temp_U * temp_cov[1,2,:,:]) / (temp_Q**2 + temp_U**2) + \ + ((temp_Q / temp_I)**2 + (temp_U / temp_I)**2) * temp_cov[0,0,:,:] - \ + 2. * (temp_Q / temp_I) * temp_cov[0,1,:,:] - \ + 2. * (temp_U / temp_I) * temp_cov[0,2,:,:]) + + for i in range(int(shape[0]/grid_size)): + for j in range(int(shape[1]/grid_size)): + if (temp_P[i,j] / temp_P_err[i,j] > 3) and (temp_bin_map[i,j] == 0): # the default criterion is 3 sigma in P + bin_num += 1 + bin_map[i*grid_size:(i+1)*grid_size,j*grid_size:(j+1)*grid_size] = bin_num + + return bin_map, bin_num + +def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, step_vec=1., vec_scale=2., optimal_binning=False): + if optimal_binning: bin_map, bin_num = adaptive_binning(stkI, stkQ, stkU, stk_cov) for i in range(1, bin_num+1): @@ -114,14 +116,14 @@ def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, wcs, convert, np.sqrt(bin_U**2 * bin_cov[1,1] + bin_Q**2 * bin_cov[2,2] - 2. * bin_Q * bin_U * bin_cov[1,2]) ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata), poldata * np.sin(np.pi/2.+pangdata), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='white', edgecolor='white') - ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata+3*pangdata_err), poldata * np.sin(np.pi/2.+pangdata+3*pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed') - ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata-3*pangdata_err), poldata * np.sin(np.pi/2.+pangdata-3*pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed') + ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata+pangdata_err), poldata * np.sin(np.pi/2.+pangdata+pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed') + ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata-pangdata_err), poldata * np.sin(np.pi/2.+pangdata-pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed') else: X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0])) U, V = poldata*np.cos(np.pi/2.+pangdata*np.pi/180.), poldata*np.sin(np.pi/2.+pangdata*np.pi/180.) ax.quiver(X[::step_vec, ::step_vec], Y[::step_vec, ::step_vec], U[::step_vec, ::step_vec], V[::step_vec, ::step_vec], units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.5, linewidth=0.75, color='w', edgecolor='k') - + def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="", **kwargs): """ @@ -157,7 +159,7 @@ def plot_obs(data_array, headers, rectangle=None, savename=None, plots_folder="" nb_obs = np.max([np.sum([head['filtnam1'] == curr_pol for head in headers]) for curr_pol in ['POL0', 'POL60', 'POL120']]) shape = np.array((3, nb_obs)) fig, ax = plt.subplots(shape[0], shape[1], figsize=(3*shape[1], 3*shape[0]), dpi=200, layout='constrained', - sharex=True, sharey=True) + sharex=True, sharey=True) r_pol = dict(pol0=0, pol60=1, pol120=2) c_pol = dict(pol0=0, pol60=0, pol120=0) for i, (data, head) in enumerate(zip(data_array, headers)): @@ -318,7 +320,11 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c The figure and ax created for interactive contour maps. """ # Get data + optimal_binning = kwargs.get('optimal_binning', False) + stkI = Stokes['I_stokes'].data.copy() + stkQ = Stokes['Q_stokes'].data.copy() + stkU = Stokes['U_stokes'].data.copy() stk_cov = Stokes['IQU_cov_matrix'].data.copy() pol = Stokes['Pol_deg_debiased'].data.copy() pol_err = Stokes['Pol_deg_err'].data.copy() @@ -428,7 +434,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c display = 's_i' if (SNRi > SNRi_cut).any(): vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.]) * - convert_flux), np.max(np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.])*convert_flux) + convert_flux), np.max(np.sqrt(stk_cov[0, 0][stk_cov[0, 0] > 0.])*convert_flux) im = ax.imshow(np.sqrt(stk_cov[0, 0])*convert_flux, norm=LogNorm(vmin, vmax), aspect='equal', cmap='inferno_r', alpha=1.) else: im = ax.imshow(np.sqrt(stk_cov[0, 0])*convert_flux, aspect='equal', cmap='inferno', alpha=1.) @@ -486,10 +492,11 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c poldata[np.isfinite(poldata)] = 1./2. step_vec = 1 vec_scale = 2. - X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0])) - U, V = poldata*np.cos(np.pi/2.+pangdata*np.pi/180.), poldata*np.sin(np.pi/2.+pangdata*np.pi/180.) - ax.quiver(X[::step_vec, ::step_vec], Y[::step_vec, ::step_vec], U[::step_vec, ::step_vec], V[::step_vec, ::step_vec], units='xy', angles='uv', - scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.5, linewidth=0.75, color='w', edgecolor='k') + # X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0])) + # U, V = poldata*np.cos(np.pi/2.+pangdata*np.pi/180.), poldata*np.sin(np.pi/2.+pangdata*np.pi/180.) + # ax.quiver(X[::step_vec, ::step_vec], Y[::step_vec, ::step_vec], U[::step_vec, ::step_vec], V[::step_vec, ::step_vec], units='xy', angles='uv', + # scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.5, linewidth=0.75, color='w', edgecolor='k') + plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, step_vec=step_vec, vec_scale=vec_scale, optimal_binning=optimal_binning) pol_sc = AnchoredSizeBar(ax.transData, vec_scale, r"$P$= 100 %", 4, pad=0.5, sep=5, borderpad=0.5, frameon=False, size_vertical=0.005, color='w') ax.add_artist(pol_sc) @@ -510,7 +517,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c x, y, width, height, angle, color = rectangle x, y = np.array([x, y]) - np.array(stkI.shape)/2. ax.add_patch(Rectangle((x, y), width, height, angle=angle, - edgecolor=color, fill=False)) + edgecolor=color, fill=False)) # ax.coords.grid(True, color='white', ls='dotted', alpha=0.5) ax.coords[0].set_axislabel('Right Ascension (J2000)') @@ -562,9 +569,9 @@ class align_maps(object): self.other_convert, self.other_unit = (float(self.other_header['photflam']), r"$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$") if "PHOTFLAM" in list( self.other_header.keys()) else (1., self.other_header['bunit'] if 'BUNIT' in list(self.other_header.keys()) else "Arbitray Units") self.map_observer = "/".join([self.map_header['telescop'], self.map_header['instrume']] - ) if "INSTRUME" in list(self.map_header.keys()) else self.map_header['telescop'] + ) if "INSTRUME" in list(self.map_header.keys()) else self.map_header['telescop'] self.other_observer = "/".join([self.other_header['telescop'], self.other_header['instrume']] - ) if "INSTRUME" in list(self.other_header.keys()) else self.other_header['telescop'] + ) if "INSTRUME" in list(self.other_header.keys()) else self.other_header['telescop'] plt.rcParams.update({'font.size': 10}) fontprops = fm.FontProperties(size=16) diff --git a/package/lib/reduction.py b/package/lib/reduction.py index 410f60f..38b9836 100755 --- a/package/lib/reduction.py +++ b/package/lib/reduction.py @@ -692,7 +692,7 @@ def align_data(data_array, headers, error_array=None, background=None, upsample_ full_headers.append(headers[0]) err_array = np.concatenate((error_array, [np.zeros(ref_data.shape)]), axis=0) - full_array, err_array, full_headers = crop_array(full_array, full_headers, err_array, step=5, inside=False, null_val=0.) + # full_array, err_array, full_headers = crop_array(full_array, full_headers, err_array, step=5, inside=False, null_val=0.) data_array, ref_data, headers = full_array[:-1], full_array[-1], full_headers[:-1] error_array = err_array[:-1] @@ -766,7 +766,7 @@ def align_data(data_array, headers, error_array=None, background=None, upsample_ headers[i].update(headers_wcs[i].to_header()) data_mask = rescaled_mask.all(axis=0) - data_array, error_array, data_mask, headers = crop_array(rescaled_image, headers, rescaled_error, data_mask, null_val=0.01*background) + # data_array, error_array, data_mask, headers = crop_array(rescaled_image, headers, rescaled_error, data_mask, null_val=0.01*background) if return_shifts: return data_array, error_array, headers, data_mask, shifts, errors