add optimal_binning to plotting
This commit is contained in:
@@ -5,18 +5,24 @@ Main script where are progressively added the steps for the FOC pipeline reducti
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Project libraries
|
# Project libraries
|
||||||
import numpy as np
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
import os
|
||||||
from os import system
|
from os import system
|
||||||
from os.path import exists as path_exists
|
from os.path import exists as path_exists
|
||||||
|
|
||||||
|
from matplotlib.colors import LogNorm
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from lib.background import subtract_bkg
|
||||||
import lib.fits as proj_fits # Functions to handle fits files
|
import lib.fits as proj_fits # Functions to handle fits files
|
||||||
import lib.reduction as proj_red # Functions used in reduction pipeline
|
import lib.reduction as proj_red # Functions used in reduction pipeline
|
||||||
import lib.plots as proj_plots # Functions for plotting data
|
import lib.plots as proj_plots # Functions for plotting data
|
||||||
from lib.utils import sci_not, princ_angle
|
from lib.utils import sci_not, princ_angle
|
||||||
from matplotlib.colors import LogNorm
|
|
||||||
|
|
||||||
|
|
||||||
def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=False, interactive=False):
|
|
||||||
|
def main(target=None, proposal_id=None, data_dir=None, infiles=None, output_dir="./data", crop=False, interactive=False):
|
||||||
# Reduction parameters
|
# Reduction parameters
|
||||||
# Deconvolution
|
# Deconvolution
|
||||||
deconvolve = False
|
deconvolve = False
|
||||||
@@ -36,7 +42,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
# Background estimation
|
# Background estimation
|
||||||
error_sub_type = 'freedman-diaconis' # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51))
|
error_sub_type = 'freedman-diaconis' # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51))
|
||||||
subtract_error = 0.01
|
subtract_error = 0.01
|
||||||
display_bkg = True
|
display_bkg = False
|
||||||
|
|
||||||
# Data binning
|
# Data binning
|
||||||
rebin = True
|
rebin = True
|
||||||
@@ -46,7 +52,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
|
|
||||||
# Alignement
|
# Alignement
|
||||||
align_center = 'center' # If None will not align the images
|
align_center = 'center' # If None will not align the images
|
||||||
display_align = True
|
display_align = False
|
||||||
display_data = False
|
display_data = False
|
||||||
|
|
||||||
# Transmittance correction
|
# Transmittance correction
|
||||||
@@ -75,14 +81,14 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
# 3. Use the same alignment as the routine
|
# 3. Use the same alignment as the routine
|
||||||
# 4. Skip the rebinning step
|
# 4. Skip the rebinning step
|
||||||
# 5. Calulate the Stokes parameters without smoothing
|
# 5. Calulate the Stokes parameters without smoothing
|
||||||
#
|
optimal_binning = True
|
||||||
optimal_binning = False
|
|
||||||
optimize = False
|
optimize = False
|
||||||
options = {'optimize': optimize, 'optimal_binning': optimal_binning}
|
|
||||||
|
|
||||||
# Pipeline start
|
# Pipeline start
|
||||||
# Step 1:
|
# Step 1:
|
||||||
# Get data from fits files and translate to flux in erg/cm²/s/Angstrom.
|
# Get data from fits files and translate to flux in erg/cm²/s/Angstrom.
|
||||||
|
|
||||||
|
if data_dir is None:
|
||||||
if infiles is not None:
|
if infiles is not None:
|
||||||
prod = np.array([["/".join(filepath.split('/')[:-1]), filepath.split('/')[-1]] for filepath in infiles], dtype=str)
|
prod = np.array([["/".join(filepath.split('/')[:-1]), filepath.split('/')[-1]] for filepath in infiles], dtype=str)
|
||||||
obs_dir = "/".join(infiles[0].split("/")[:-1])
|
obs_dir = "/".join(infiles[0].split("/")[:-1])
|
||||||
@@ -97,17 +103,23 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
for prods in products:
|
for prods in products:
|
||||||
main(target=target, infiles=["/".join(pr) for pr in prods], output_dir=output_dir, crop=crop, interactive=interactive)
|
main(target=target, infiles=["/".join(pr) for pr in prods], output_dir=output_dir, crop=crop, interactive=interactive)
|
||||||
data_folder = prod[0][0]
|
data_folder = prod[0][0]
|
||||||
|
|
||||||
|
infiles = [p[1] for p in prod]
|
||||||
|
data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
infiles = [f for f in os.listdir(data_dir) if f.endswith('.fits') and f.startswith('x')]
|
||||||
|
data_folder = data_dir
|
||||||
|
if target is None:
|
||||||
|
target = input("Target name:\n>")
|
||||||
|
|
||||||
|
data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True)
|
||||||
try:
|
try:
|
||||||
plots_folder = data_folder.replace("data", "plots")
|
plots_folder = data_folder.replace("data", "plots")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
plots_folder = "."
|
plots_folder = "."
|
||||||
if not path_exists(plots_folder):
|
if not path_exists(plots_folder):
|
||||||
system("mkdir -p {0:s} ".format(plots_folder))
|
system("mkdir -p {0:s} ".format(plots_folder))
|
||||||
infiles = [p[1] for p in prod]
|
|
||||||
data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True)
|
|
||||||
|
|
||||||
if optimal_binning:
|
|
||||||
_data_array, _headers = deepcopy(data_array), deepcopy(headers)
|
|
||||||
|
|
||||||
figname = "_".join([target, "FOC"])
|
figname = "_".join([target, "FOC"])
|
||||||
figtype = ""
|
figtype = ""
|
||||||
@@ -124,33 +136,109 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
if align_center is None:
|
if align_center is None:
|
||||||
figtype += "_not_aligned"
|
figtype += "_not_aligned"
|
||||||
|
|
||||||
# Crop data to remove outside blank margins.
|
if optimal_binning:
|
||||||
data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0.,
|
options = {'optimize': optimize, 'optimal_binning': True}
|
||||||
inside=True, display=display_crop, savename=figname, plots_folder=plots_folder)
|
|
||||||
|
# Step 1: Load the data again and preserve the full images
|
||||||
|
_data_array, _headers = deepcopy(data_array), deepcopy(headers) # Preserve full images
|
||||||
|
_data_mask = np.ones(_data_array[0].shape, dtype=bool)
|
||||||
|
|
||||||
|
# Step 2: Skip the cropping step but use the same error and background estimation (I don't understand why this is wrong)
|
||||||
|
data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., inside=True,
|
||||||
|
display=display_crop, savename=figname, plots_folder=plots_folder)
|
||||||
data_mask = np.ones(data_array[0].shape, dtype=bool)
|
data_mask = np.ones(data_array[0].shape, dtype=bool)
|
||||||
|
|
||||||
if optimal_binning:
|
background = None
|
||||||
|
_, _, _, background, error_bkg = proj_red.get_error(data_array, headers, error_array, data_mask=data_mask, sub_type=error_sub_type, subtract_error=subtract_error, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True)
|
||||||
|
|
||||||
|
# _background is the same as background, but for the optimal binning
|
||||||
|
_background = None
|
||||||
|
_, _error_array, _, _, _ = proj_red.get_error(_data_array, _headers, error_array=None, data_mask=_data_mask, sub_type=error_sub_type, subtract_error=False, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True)
|
||||||
|
_error_bkg = np.ones_like(_data_array) * error_bkg[:, 0, 0, np.newaxis, np.newaxis]
|
||||||
|
_data_array, _error_array, _background, _ = subtract_bkg(_data_array, _error_array, _data_mask, background, _error_bkg)
|
||||||
|
|
||||||
|
# Step 3: Align and rescale images with oversampling. (has to disable croping in align_data function)
|
||||||
|
_data_array, _error_array, _headers, _, shifts, error_shifts = proj_red.align_data(_data_array, _headers, error_array=_error_array, background=_background,
|
||||||
|
upsample_factor=10, ref_center=align_center, return_shifts=True)
|
||||||
|
print("Image shifts: {} \nShifts uncertainty: {}".format(shifts, error_shifts))
|
||||||
_data_mask = np.ones(_data_array[0].shape, dtype=bool)
|
_data_mask = np.ones(_data_array[0].shape, dtype=bool)
|
||||||
|
|
||||||
|
# Step 4: Compute Stokes I, Q, U
|
||||||
|
_background = np.array([np.array(bkg).reshape(1, 1) for bkg in _background])
|
||||||
|
_background_error = np.array([np.array(np.sqrt((bkg-_background[np.array([h['filtnam1'] == head['filtnam1'] for h in _headers], dtype=bool)].mean())
|
||||||
|
** 2/np.sum([h['filtnam1'] == head['filtnam1'] for h in _headers]))).reshape(1, 1) for bkg, head in zip(_background, _headers)])
|
||||||
|
|
||||||
|
_I_stokes, _Q_stokes, _U_stokes, _Stokes_cov = proj_red.compute_Stokes(_data_array, _error_array, _data_mask, _headers,
|
||||||
|
FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr)
|
||||||
|
_I_bkg, _Q_bkg, _U_bkg, _S_cov_bkg = proj_red.compute_Stokes(_background, _background_error, np.array(True).reshape(1, 1), _headers,
|
||||||
|
FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False)
|
||||||
|
|
||||||
|
# Step 5: Compute polarimetric parameters (polarization degree and angle).
|
||||||
|
_P, _debiased_P, _s_P, _s_P_P, _PA, _s_PA, _s_PA_P = proj_red.compute_pol(_I_stokes, _Q_stokes, _U_stokes, _Stokes_cov, _headers)
|
||||||
|
_P_bkg, _debiased_P_bkg, _s_P_bkg, _s_P_P_bkg, _PA_bkg, _s_PA_bkg, _s_PA_P_bkg = proj_red.compute_pol(_I_bkg, _Q_bkg, _U_bkg, _S_cov_bkg, _headers)
|
||||||
|
|
||||||
|
# Step 6: Save image to FITS.
|
||||||
|
figname = "_".join([figname, figtype]) if figtype != "" else figname
|
||||||
|
_Stokes_test = proj_fits.save_Stokes(_I_stokes, _Q_stokes, _U_stokes, _Stokes_cov, _P, _debiased_P, _s_P, _s_P_P, _PA, _s_PA, _s_PA_P,
|
||||||
|
_headers, _data_mask, figname, data_folder=data_folder, return_hdul=True)
|
||||||
|
|
||||||
|
# Step 6:
|
||||||
|
_data_mask = _Stokes_test['data_mask'].data.astype(bool)
|
||||||
|
print(_data_array.shape, _data_mask.shape)
|
||||||
|
print("F_int({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(_headers[0]['photplam'], *sci_not(
|
||||||
|
_Stokes_test[0].data[_data_mask].sum()*_headers[0]['photflam'], np.sqrt(_Stokes_test[3].data[0, 0][_data_mask].sum())*_headers[0]['photflam'], 2, out=int)))
|
||||||
|
print("P_int = {0:.1f} ± {1:.1f} %".format(_headers[0]['p_int']*100., np.ceil(_headers[0]['p_int_err']*1000.)/10.))
|
||||||
|
print("PA_int = {0:.1f} ± {1:.1f} °".format(princ_angle(_headers[0]['pa_int']), princ_angle(np.ceil(_headers[0]['pa_int_err']*10.)/10.)))
|
||||||
|
# Background values
|
||||||
|
print("F_bkg({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(_headers[0]['photplam'], *sci_not(
|
||||||
|
_I_bkg[0, 0]*_headers[0]['photflam'], np.sqrt(_S_cov_bkg[0, 0][0, 0])*_headers[0]['photflam'], 2, out=int)))
|
||||||
|
print("P_bkg = {0:.1f} ± {1:.1f} %".format(_debiased_P_bkg[0, 0]*100., np.ceil(_s_P_bkg[0, 0]*1000.)/10.))
|
||||||
|
print("PA_bkg = {0:.1f} ± {1:.1f} °".format(princ_angle(_PA_bkg[0, 0]), princ_angle(np.ceil(_s_PA_bkg[0, 0]*10.)/10.)))
|
||||||
|
# Plot polarization map (Background is either total Flux, Polarization degree or Polarization degree error).
|
||||||
|
if px_scale.lower() not in ['full', 'integrate'] and not interactive:
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim,
|
||||||
|
step_vec=step_vec, vec_scale=vec_scale, savename="_".join([figname]), plots_folder=plots_folder, **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "I"]), plots_folder=plots_folder, display='Intensity', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "P_flux"]), plots_folder=plots_folder, display='Pol_Flux', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "P"]), plots_folder=plots_folder, display='Pol_deg', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "PA"]), plots_folder=plots_folder, display='Pol_ang', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "I_err"]), plots_folder=plots_folder, display='I_err', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "P_err"]), plots_folder=plots_folder, display='Pol_deg_err', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "SNRi"]), plots_folder=plots_folder, display='SNRi', **options)
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim, step_vec=step_vec,
|
||||||
|
vec_scale=vec_scale, savename="_".join([figname, "SNRp"]), plots_folder=plots_folder, display='SNRp', **options)
|
||||||
|
elif not interactive:
|
||||||
|
proj_plots.polarization_map(deepcopy(_Stokes_test), _data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut,
|
||||||
|
savename=figname, plots_folder=plots_folder, display='integrate', **options)
|
||||||
|
elif px_scale.lower() not in ['full', 'integrate']:
|
||||||
|
proj_plots.pol_map(_Stokes_test, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim)
|
||||||
|
|
||||||
|
else:
|
||||||
|
options = {'optimize': optimize, 'optimal_binning': False}
|
||||||
|
# Crop data to remove outside blank margins.
|
||||||
|
data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., inside=True,
|
||||||
|
display=display_crop, savename=figname, plots_folder=plots_folder)
|
||||||
|
data_mask = np.ones(data_array[0].shape, dtype=bool)
|
||||||
|
|
||||||
# Deconvolve data using Richardson-Lucy iterative algorithm with a gaussian PSF of given FWHM.
|
# Deconvolve data using Richardson-Lucy iterative algorithm with a gaussian PSF of given FWHM.
|
||||||
if deconvolve:
|
if deconvolve:
|
||||||
data_array = proj_red.deconvolve_array(data_array, headers, psf=psf, FWHM=psf_FWHM, scale=psf_scale, shape=psf_shape, iterations=iterations, algo=algo)
|
data_array = proj_red.deconvolve_array(data_array, headers, psf=psf, FWHM=psf_FWHM, scale=psf_scale, shape=psf_shape, iterations=iterations, algo=algo)
|
||||||
|
|
||||||
# Estimate error from data background, estimated from sub-image of desired sub_shape.
|
# Estimate error from data background, estimated from sub-image of desired sub_shape.
|
||||||
background = None
|
background = None
|
||||||
data_array, error_array, headers, background = proj_red.get_error(data_array, headers, error_array, data_mask=data_mask, sub_type=error_sub_type, subtract_error=subtract_error, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True)
|
data_array, error_array, headers, background, error_bkg = proj_red.get_error(data_array, headers, error_array, data_mask=data_mask, sub_type=error_sub_type, subtract_error=subtract_error, display=display_bkg, savename="_".join([figname, "errors"]), plots_folder=plots_folder, return_background=True)
|
||||||
|
|
||||||
# if optimal_binning:
|
|
||||||
# _data_array, _error_array, _background = proj_red.subtract_bkg(_data_array, error_array, background) # _background is the same as background, but for the optimal binning to clarify
|
|
||||||
|
|
||||||
# Align and rescale images with oversampling.
|
# Align and rescale images with oversampling.
|
||||||
data_array, error_array, headers, data_mask, shifts, error_shifts = proj_red.align_data(
|
data_array, error_array, headers, data_mask, shifts, error_shifts = proj_red.align_data(
|
||||||
data_array, headers, error_array=error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=True)
|
data_array, headers, error_array=error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=True)
|
||||||
|
|
||||||
# if optimal_binning:
|
|
||||||
# _data_array, _error_array, _headers, _data_mask, _shifts, _error_shifts = proj_red.align_data(
|
|
||||||
# _data_array, _headers, error_array=_error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=True)
|
|
||||||
|
|
||||||
if display_align:
|
if display_align:
|
||||||
print("Image shifts: {} \nShifts uncertainty: {}".format(shifts, error_shifts))
|
print("Image shifts: {} \nShifts uncertainty: {}".format(shifts, error_shifts))
|
||||||
proj_plots.plot_obs(data_array, headers, savename="_".join([figname, str(align_center)]), plots_folder=plots_folder, norm=LogNorm(
|
proj_plots.plot_obs(data_array, headers, savename="_".join([figname, str(align_center)]), plots_folder=plots_folder, norm=LogNorm(
|
||||||
@@ -187,12 +275,6 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
|
|||||||
I_bkg, Q_bkg, U_bkg, S_cov_bkg = proj_red.compute_Stokes(background, background_error, np.array(True).reshape(
|
I_bkg, Q_bkg, U_bkg, S_cov_bkg = proj_red.compute_Stokes(background, background_error, np.array(True).reshape(
|
||||||
1, 1), headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False)
|
1, 1), headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False)
|
||||||
|
|
||||||
# if optimal_binning:
|
|
||||||
# _I_stokes, _Q_stokes, _U_stokes, _Stokes_cov = proj_red.compute_Stokes(
|
|
||||||
# _data_array, _error_array, _data_mask, _headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr)
|
|
||||||
# _I_bkg, _Q_bkg, _U_bkg, _S_cov_bkg = proj_red.compute_Stokes(_background, background_error, np.array(True).reshape(
|
|
||||||
# 1, 1), _headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=False)
|
|
||||||
|
|
||||||
# Step 3:
|
# Step 3:
|
||||||
# Rotate images to have North up
|
# Rotate images to have North up
|
||||||
if rotate_stokes:
|
if rotate_stokes:
|
||||||
@@ -264,12 +346,13 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser(description='Query MAST for target products')
|
parser = argparse.ArgumentParser(description='Query MAST for target products')
|
||||||
parser.add_argument('-t', '--target', metavar='targetname', required=False, help='the name of the target', type=str, default=None)
|
parser.add_argument('-t', '--target', metavar='targetname', required=False, help='the name of the target', type=str, default=None)
|
||||||
parser.add_argument('-p', '--proposal_id', metavar='proposal_id', required=False, help='the proposal id of the data products', type=int, default=None)
|
parser.add_argument('-p', '--proposal_id', metavar='proposal_id', required=False, help='the proposal id of the data products', type=int, default=None)
|
||||||
|
parser.add_argument('-d', '--data_dir', metavar='directory_path', required=False, help='directory path to the data products', type=str, default=None)
|
||||||
parser.add_argument('-f', '--files', metavar='path', required=False, nargs='*', help='the full or relative path to the data products', default=None)
|
parser.add_argument('-f', '--files', metavar='path', required=False, nargs='*', help='the full or relative path to the data products', default=None)
|
||||||
parser.add_argument('-o', '--output_dir', metavar='directory_path', required=False,
|
parser.add_argument('-o', '--output_dir', metavar='directory_path', required=False,
|
||||||
help='output directory path for the data products', type=str, default="./data")
|
help='output directory path for the data products', type=str, default="./data")
|
||||||
parser.add_argument('-c', '--crop', action='store_true', required=False, help='whether to crop the analysis region')
|
parser.add_argument('-c', '--crop', action='store_true', required=False, help='whether to crop the analysis region')
|
||||||
parser.add_argument('-i', '--interactive', action='store_true', required=False, help='whether to output to the interactive analysis tool')
|
parser.add_argument('-i', '--interactive', action='store_true', required=False, help='whether to output to the interactive analysis tool')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
exitcode = main(target=args.target, proposal_id=args.proposal_id, infiles=args.files,
|
exitcode = main(target=args.target, proposal_id=args.proposal_id, data_dir=args.data_dir, infiles=args.files,
|
||||||
output_dir=args.output_dir, crop=args.crop, interactive=args.interactive)
|
output_dir=args.output_dir, crop=args.crop, interactive=args.interactive)
|
||||||
print("Finished with ExitCode: ", exitcode)
|
print("Finished with ExitCode: ", exitcode)
|
||||||
|
|||||||
@@ -235,7 +235,7 @@ def bkg_fit(data, error, mask, headers, subtract_error=True, display=False, save
|
|||||||
weights = 1/chi2**2
|
weights = 1/chi2**2
|
||||||
weights /= weights.sum()
|
weights /= weights.sum()
|
||||||
|
|
||||||
bkg = np.sum(weights*(coeff[:, 1]+np.abs(coeff[:, 2])*subtract_error))
|
bkg = np.sum(weights*(coeff[:, 1]+np.abs(coeff[:, 2]) * 0.01)) # why not just use 0.01
|
||||||
|
|
||||||
error_bkg[i] *= bkg
|
error_bkg[i] *= bkg
|
||||||
|
|
||||||
@@ -342,7 +342,7 @@ def bkg_hist(data, error, mask, headers, sub_type=None, subtract_error=True, dis
|
|||||||
# popt, pcov = curve_fit(gausspol, binning[-1], hist, p0=p0)
|
# popt, pcov = curve_fit(gausspol, binning[-1], hist, p0=p0)
|
||||||
popt, pcov = curve_fit(gauss, binning[-1], hist, p0=p0)
|
popt, pcov = curve_fit(gauss, binning[-1], hist, p0=p0)
|
||||||
coeff.append(popt)
|
coeff.append(popt)
|
||||||
bkg = popt[1]+np.abs(popt[2])*subtract_error
|
bkg = popt[1]+np.abs(popt[2]) * 0.01 # why not just use 0.01
|
||||||
|
|
||||||
error_bkg[i] *= bkg
|
error_bkg[i] *= bkg
|
||||||
|
|
||||||
@@ -443,7 +443,7 @@ def bkg_mini(data, error, mask, headers, sub_shape=(15, 15), subtract_error=True
|
|||||||
# Compute error : root mean square of the background
|
# Compute error : root mean square of the background
|
||||||
sub_image = image[minima[0]:minima[0]+sub_shape[0], minima[1]:minima[1]+sub_shape[1]]
|
sub_image = image[minima[0]:minima[0]+sub_shape[0], minima[1]:minima[1]+sub_shape[1]]
|
||||||
# bkg = np.std(sub_image) # Previously computed using standard deviation over the background
|
# bkg = np.std(sub_image) # Previously computed using standard deviation over the background
|
||||||
bkg = np.sqrt(np.sum(sub_image**2)/sub_image.size)*subtract_error if subtract_error > 0 else np.sqrt(np.sum(sub_image**2)/sub_image.size)
|
bkg = np.sqrt(np.sum(sub_image**2)/sub_image.size)*0.01 if subtract_error > 0 else np.sqrt(np.sum(sub_image**2)/sub_image.size)
|
||||||
error_bkg[i] *= bkg
|
error_bkg[i] *= bkg
|
||||||
|
|
||||||
# n_error_array[i] = np.sqrt(n_error_array[i]**2 + error_bkg[i]**2)
|
# n_error_array[i] = np.sqrt(n_error_array[i]**2 + error_bkg[i]**2)
|
||||||
|
|||||||
@@ -41,8 +41,11 @@ prototypes :
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
import numpy as np
|
|
||||||
from os.path import join as path_join
|
from os.path import join as path_join
|
||||||
|
|
||||||
|
from astropy.wcs import WCS
|
||||||
|
from astropy.io import fits
|
||||||
|
from astropy.coordinates import SkyCoord
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from matplotlib.patches import Rectangle, Circle, FancyArrowPatch
|
from matplotlib.patches import Rectangle, Circle, FancyArrowPatch
|
||||||
from matplotlib.path import Path
|
from matplotlib.path import Path
|
||||||
@@ -51,16 +54,14 @@ from matplotlib.colors import LogNorm
|
|||||||
import matplotlib.font_manager as fm
|
import matplotlib.font_manager as fm
|
||||||
import matplotlib.patheffects as pe
|
import matplotlib.patheffects as pe
|
||||||
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
|
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
|
||||||
from astropy.wcs import WCS
|
import numpy as np
|
||||||
from astropy.io import fits
|
|
||||||
from astropy.coordinates import SkyCoord
|
|
||||||
from scipy.ndimage import zoom as sc_zoom
|
from scipy.ndimage import zoom as sc_zoom
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from .utils import rot2D, princ_angle, sci_not
|
from .utils import rot2D, princ_angle, sci_not
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from utils import rot2D, princ_angle, sci_not
|
from utils import rot2D, princ_angle, sci_not
|
||||||
|
|
||||||
def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, wcs, convert, step_vec=1, vec_scale=2., adaptive_binning=False):
|
|
||||||
def adaptive_binning(I_stokes, Q_stokes, U_stokes, Stokes_cov):
|
def adaptive_binning(I_stokes, Q_stokes, U_stokes, Stokes_cov):
|
||||||
shape = I_stokes.shape
|
shape = I_stokes.shape
|
||||||
|
|
||||||
@@ -93,7 +94,8 @@ def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, wcs, convert,
|
|||||||
|
|
||||||
return bin_map, bin_num
|
return bin_map, bin_num
|
||||||
|
|
||||||
if adaptive_binning:
|
def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, step_vec=1., vec_scale=2., optimal_binning=False):
|
||||||
|
if optimal_binning:
|
||||||
bin_map, bin_num = adaptive_binning(stkI, stkQ, stkU, stk_cov)
|
bin_map, bin_num = adaptive_binning(stkI, stkQ, stkU, stk_cov)
|
||||||
|
|
||||||
for i in range(1, bin_num+1):
|
for i in range(1, bin_num+1):
|
||||||
@@ -114,8 +116,8 @@ def plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, wcs, convert,
|
|||||||
np.sqrt(bin_U**2 * bin_cov[1,1] + bin_Q**2 * bin_cov[2,2] - 2. * bin_Q * bin_U * bin_cov[1,2])
|
np.sqrt(bin_U**2 * bin_cov[1,1] + bin_Q**2 * bin_cov[2,2] - 2. * bin_Q * bin_U * bin_cov[1,2])
|
||||||
|
|
||||||
ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata), poldata * np.sin(np.pi/2.+pangdata), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='white', edgecolor='white')
|
ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata), poldata * np.sin(np.pi/2.+pangdata), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='white', edgecolor='white')
|
||||||
ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata+3*pangdata_err), poldata * np.sin(np.pi/2.+pangdata+3*pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed')
|
ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata+pangdata_err), poldata * np.sin(np.pi/2.+pangdata+pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed')
|
||||||
ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata-3*pangdata_err), poldata * np.sin(np.pi/2.+pangdata-3*pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed')
|
ax.quiver(y_center, x_center, poldata * np.cos(np.pi/2.+pangdata-pangdata_err), poldata * np.sin(np.pi/2.+pangdata-pangdata_err), units='xy', angles='uv', scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, linewidth=0.5, color='black', edgecolor='black', ls='dashed')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0]))
|
X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0]))
|
||||||
@@ -318,7 +320,11 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
|
|||||||
The figure and ax created for interactive contour maps.
|
The figure and ax created for interactive contour maps.
|
||||||
"""
|
"""
|
||||||
# Get data
|
# Get data
|
||||||
|
optimal_binning = kwargs.get('optimal_binning', False)
|
||||||
|
|
||||||
stkI = Stokes['I_stokes'].data.copy()
|
stkI = Stokes['I_stokes'].data.copy()
|
||||||
|
stkQ = Stokes['Q_stokes'].data.copy()
|
||||||
|
stkU = Stokes['U_stokes'].data.copy()
|
||||||
stk_cov = Stokes['IQU_cov_matrix'].data.copy()
|
stk_cov = Stokes['IQU_cov_matrix'].data.copy()
|
||||||
pol = Stokes['Pol_deg_debiased'].data.copy()
|
pol = Stokes['Pol_deg_debiased'].data.copy()
|
||||||
pol_err = Stokes['Pol_deg_err'].data.copy()
|
pol_err = Stokes['Pol_deg_err'].data.copy()
|
||||||
@@ -486,10 +492,11 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
|
|||||||
poldata[np.isfinite(poldata)] = 1./2.
|
poldata[np.isfinite(poldata)] = 1./2.
|
||||||
step_vec = 1
|
step_vec = 1
|
||||||
vec_scale = 2.
|
vec_scale = 2.
|
||||||
X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0]))
|
# X, Y = np.meshgrid(np.arange(stkI.shape[1]), np.arange(stkI.shape[0]))
|
||||||
U, V = poldata*np.cos(np.pi/2.+pangdata*np.pi/180.), poldata*np.sin(np.pi/2.+pangdata*np.pi/180.)
|
# U, V = poldata*np.cos(np.pi/2.+pangdata*np.pi/180.), poldata*np.sin(np.pi/2.+pangdata*np.pi/180.)
|
||||||
ax.quiver(X[::step_vec, ::step_vec], Y[::step_vec, ::step_vec], U[::step_vec, ::step_vec], V[::step_vec, ::step_vec], units='xy', angles='uv',
|
# ax.quiver(X[::step_vec, ::step_vec], Y[::step_vec, ::step_vec], U[::step_vec, ::step_vec], V[::step_vec, ::step_vec], units='xy', angles='uv',
|
||||||
scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.5, linewidth=0.75, color='w', edgecolor='k')
|
# scale=1./vec_scale, scale_units='xy', pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.5, linewidth=0.75, color='w', edgecolor='k')
|
||||||
|
plot_quiver(ax, stkI, stkQ, stkU, stk_cov, poldata, pangdata, step_vec=step_vec, vec_scale=vec_scale, optimal_binning=optimal_binning)
|
||||||
pol_sc = AnchoredSizeBar(ax.transData, vec_scale, r"$P$= 100 %", 4, pad=0.5, sep=5, borderpad=0.5, frameon=False, size_vertical=0.005, color='w')
|
pol_sc = AnchoredSizeBar(ax.transData, vec_scale, r"$P$= 100 %", 4, pad=0.5, sep=5, borderpad=0.5, frameon=False, size_vertical=0.005, color='w')
|
||||||
|
|
||||||
ax.add_artist(pol_sc)
|
ax.add_artist(pol_sc)
|
||||||
|
|||||||
@@ -692,7 +692,7 @@ def align_data(data_array, headers, error_array=None, background=None, upsample_
|
|||||||
full_headers.append(headers[0])
|
full_headers.append(headers[0])
|
||||||
err_array = np.concatenate((error_array, [np.zeros(ref_data.shape)]), axis=0)
|
err_array = np.concatenate((error_array, [np.zeros(ref_data.shape)]), axis=0)
|
||||||
|
|
||||||
full_array, err_array, full_headers = crop_array(full_array, full_headers, err_array, step=5, inside=False, null_val=0.)
|
# full_array, err_array, full_headers = crop_array(full_array, full_headers, err_array, step=5, inside=False, null_val=0.)
|
||||||
|
|
||||||
data_array, ref_data, headers = full_array[:-1], full_array[-1], full_headers[:-1]
|
data_array, ref_data, headers = full_array[:-1], full_array[-1], full_headers[:-1]
|
||||||
error_array = err_array[:-1]
|
error_array = err_array[:-1]
|
||||||
@@ -766,7 +766,7 @@ def align_data(data_array, headers, error_array=None, background=None, upsample_
|
|||||||
headers[i].update(headers_wcs[i].to_header())
|
headers[i].update(headers_wcs[i].to_header())
|
||||||
|
|
||||||
data_mask = rescaled_mask.all(axis=0)
|
data_mask = rescaled_mask.all(axis=0)
|
||||||
data_array, error_array, data_mask, headers = crop_array(rescaled_image, headers, rescaled_error, data_mask, null_val=0.01*background)
|
# data_array, error_array, data_mask, headers = crop_array(rescaled_image, headers, rescaled_error, data_mask, null_val=0.01*background)
|
||||||
|
|
||||||
if return_shifts:
|
if return_shifts:
|
||||||
return data_array, error_array, headers, data_mask, shifts, errors
|
return data_array, error_array, headers, data_mask, shifts, errors
|
||||||
|
|||||||
Reference in New Issue
Block a user