Merge branch 'testing'

add readfos script and overall bug fix for FOC_reduction
This commit is contained in:
2024-12-19 16:46:25 +01:00
24 changed files with 1518 additions and 825 deletions

View File

@@ -1,2 +1,6 @@
# FOC_Reduction
FOC reduction pipeline
TODO:
- Add all polarimetry capables instruments from HST (starting with FOS and ACS)
- Build science case for future UV polarimeters (AGN outflow geometry, dust scattering, torus outline ?)

BIN
doc/pipeline.pdf Normal file

Binary file not shown.

10
license.md Normal file
View File

@@ -0,0 +1,10 @@
MIT License
Copyright (c) 2024 Thibault Barnouin
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -1,9 +1,14 @@
#!/usr/bin/python
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Main script where are progressively added the steps for the FOC pipeline reduction.
"""
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
# Project libraries
from copy import deepcopy
from os import system
@@ -25,7 +30,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
# from lib.deconvolve import from_file_psf
psf = "gaussian" # Can be user-defined as well
# psf = from_file_psf(data_folder+psf_file)
psf_FWHM = 3.1
psf_FWHM = 1.55
psf_scale = "px"
psf_shape = None # (151, 151)
iterations = 1
@@ -35,13 +40,13 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
display_crop = False
# Background estimation
error_sub_type = "freedman-diaconis" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51))
subtract_error = 1.0
display_bkg = False
error_sub_type = "scott" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51))
subtract_error = 2.0
display_bkg = True
# Data binning
pxsize = 2
pxscale = "px" # pixel, arcsec or full
pxsize = 0.05
pxscale = "arcsec" # pixel, arcsec or full
rebin_operation = "sum" # sum or average
# Alignement
@@ -54,17 +59,17 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
# Smoothing
smoothing_function = "combine" # gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine
smoothing_FWHM = 2.0 # If None, no smoothing is done
smoothing_scale = "px" # pixel or arcsec
smoothing_FWHM = 0.075 # If None, no smoothing is done
smoothing_scale = "arcsec" # pixel or arcsec
# Rotation
rotate_North = True
# Polarization map output
SNRp_cut = 3.0 # P measurments with SNR>3
SNRi_cut = 1.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%.
P_cut = 5 # if >=1.0 cut on the signal-to-noise else cut on the confidence level in Q, U
SNRi_cut = 5.0 # I measurments with SNR>30, which implies an uncertainty in P of 4.7%.
flux_lim = None # lowest and highest flux displayed on plot, defaults to bkg and maximum in cut if None
scale_vec = 5
scale_vec = 3
step_vec = 1 # plot all vectors in the array. if step_vec = 2, then every other vector will be plotted if step_vec = 0 then all vectors are displayed at full length
# Pipeline start
@@ -171,6 +176,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
proj_plots.plot_obs(
data_array,
headers,
shifts=shifts,
savename="_".join([figname, str(align_center)]),
plots_folder=plots_folder,
norm=LogNorm(vmin=data_array[data_array > 0.0].min() * headers[0]["photflam"], vmax=data_array[data_array > 0.0].max() * headers[0]["photflam"]),
@@ -292,7 +298,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
P_cut=P_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
@@ -300,108 +306,31 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
savename="_".join([figname]),
plots_folder=plots_folder,
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "I"]),
plots_folder=plots_folder,
display="Intensity",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "P_flux"]),
plots_folder=plots_folder,
display="Pol_Flux",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "P"]),
plots_folder=plots_folder,
display="Pol_deg",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "PA"]),
plots_folder=plots_folder,
display="Pol_ang",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "I_err"]),
plots_folder=plots_folder,
display="I_err",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "P_err"]),
plots_folder=plots_folder,
display="Pol_deg_err",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "SNRi"]),
plots_folder=plots_folder,
display="SNRi",
)
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
SNRp_cut=SNRp_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, "SNRp"]),
plots_folder=plots_folder,
display="SNRp",
)
for figtype, figsuffix in zip(
["Intensity", "Pol_flux", "Pol_deg", "Pol_ang", "I_err", "P_err", "SNRi", "SNRp", "confp"],
["I", "P_flux", "P", "PA", "I_err", "P_err", "SNRi", "SNRp", "confP"],
):
try:
proj_plots.polarization_map(
deepcopy(Stokes_hdul),
data_mask,
P_cut=P_cut,
SNRi_cut=SNRi_cut,
flux_lim=flux_lim,
step_vec=step_vec,
scale_vec=scale_vec,
savename="_".join([figname, figsuffix]),
plots_folder=plots_folder,
display=figtype,
)
except ValueError:
pass
elif not interactive:
proj_plots.polarization_map(
deepcopy(Stokes_hdul), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, savename=figname, plots_folder=plots_folder, display="integrate"
deepcopy(Stokes_hdul), data_mask, P_cut=P_cut, SNRi_cut=SNRi_cut, savename=figname, plots_folder=plots_folder, display="integrate"
)
elif pxscale.lower() not in ["full", "integrate"]:
proj_plots.pol_map(Stokes_hdul, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim)
proj_plots.pol_map(Stokes_hdul, P_cut=P_cut, SNRi_cut=SNRi_cut, step_vec=step_vec, scale_vec=scale_vec, flux_lim=flux_lim)
return outfiles

View File

@@ -1,3 +1,2 @@
from . import lib
from . import src
from .lib import *
from . import FOC_reduction

View File

@@ -286,11 +286,13 @@ def richardson_lucy(image, psf, iterations=20, clip=True, filter_epsilon=None):
image = image.astype(float_type, copy=False)
psf = psf.astype(float_type, copy=False)
psf /= psf.sum()
im_deconv = image.copy()
im_deconv = np.full(image.shape, 0.5, dtype=float_type)
psf_mirror = np.flip(psf)
eps = 1e-20
for _ in range(iterations):
conv = convolve(im_deconv, psf, mode="same")
conv = convolve(im_deconv, psf, mode="same") + eps
if filter_epsilon:
relative_blur = np.where(conv < filter_epsilon, 0, image / conv)
else:

View File

@@ -141,6 +141,7 @@ def save_Stokes(
header["TELESCOP"] = (header_stokes["TELESCOP"] if "TELESCOP" in list(header_stokes.keys()) else "HST", "telescope used to acquire data")
header["INSTRUME"] = (header_stokes["INSTRUME"] if "INSTRUME" in list(header_stokes.keys()) else "FOC", "identifier for instrument used to acuire data")
header["PHOTPLAM"] = (header_stokes["PHOTPLAM"], "Pivot Wavelength")
header["PHOTBW"] = (header_stokes["PHOTBW"], "RMS Bandwidth of the Filter and Detector")
header["PHOTFLAM"] = (header_stokes["PHOTFLAM"], "Inverse Sensitivity in DN/sec/cm**2/Angst")
header["EXPTIME"] = (header_stokes["EXPTIME"], "Total exposure time in sec")
header["PROPOSID"] = (header_stokes["PROPOSID"], "PEP proposal identifier for observation")

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Library function to query and download datatsets from MAST api.

View File

@@ -217,9 +217,9 @@ def bin_ndarray(ndarray, new_shape, operation="sum"):
elif operation.lower() in ["mean", "average", "avg"]:
ndarray = ndarray.mean(-1 * (i + 1))
else:
row_comp = np.mat(get_row_compressor(ndarray.shape[0], new_shape[0], operation))
col_comp = np.mat(get_column_compressor(ndarray.shape[1], new_shape[1], operation))
ndarray = np.array(row_comp * np.mat(ndarray) * col_comp)
row_comp = np.asmatrix(get_row_compressor(ndarray.shape[0], new_shape[0], operation))
col_comp = np.asmatrix(get_column_compressor(ndarray.shape[1], new_shape[1], operation))
ndarray = np.array(row_comp * np.asmatrix(ndarray) * col_comp)
return ndarray
@@ -416,12 +416,12 @@ def deconvolve_array(data_array, headers, psf="gaussian", FWHM=1.0, scale="px",
FWHM /= pxsize[0].min()
# Define Point-Spread-Function kernel
if psf.lower() in ["gauss", "gaussian"]:
if isinstance(psf, np.ndarray) and (len(psf.shape) == 2):
kernel = psf
elif psf.lower() in ["gauss", "gaussian"]:
if shape is None:
shape = np.min(data_array[0].shape) - 2, np.min(data_array[0].shape) - 2
kernel = gaussian_psf(FWHM=FWHM, shape=shape)
elif isinstance(psf, np.ndarray) and (len(psf.shape) == 2):
kernel = psf
else:
raise ValueError("{} is not a valid value for 'psf'".format(psf))
@@ -676,6 +676,7 @@ def rebin_array(data_array, error_array, headers, pxsize=2, scale="px", operatio
nw.wcs.crpix /= Dxy
nw.array_shape = new_shape
new_header["NAXIS1"], new_header["NAXIS2"] = nw.array_shape
new_header["PHOTFLAM"] = header["PHOTFLAM"] * (Dxy[0] * Dxy[1])
for key, val in nw.to_header().items():
new_header.set(key, val)
new_header["SAMPLING"] = (str(pxsize) + scale, "Resampling performed during reduction")

View File

@@ -4,6 +4,14 @@ import numpy as np
def rot2D(ang):
"""
Return the 2D rotation matrix of given angle in degrees
----------
Inputs:
ang : float
Angle in degrees
----------
Returns:
rot_mat : numpy.ndarray
2D matrix of shape (2,2) to perform vector rotation at angle "ang".
"""
alpha = np.pi * ang / 180
return np.array([[np.cos(alpha), np.sin(alpha)], [-np.sin(alpha), np.cos(alpha)]])
@@ -13,6 +21,14 @@ def princ_angle(ang):
"""
Return the principal angle in the 0° to 180° quadrant as PA is always
defined at p/m 180°.
----------
Inputs:
ang : float, numpy.ndarray
Angle in degrees. Can be an array of angles.
----------
Returns:
princ_ang : float, numpy.ndarray
Principal angle in the 0°-180° quadrant in the same shape as input.
"""
if not isinstance(ang, np.ndarray):
A = np.array([ang])
@@ -28,9 +44,100 @@ def princ_angle(ang):
return A[0]
def PCconf(QN, UN, QN_ERR, UN_ERR):
"""
Compute the confidence level for 2 parameters polarisation degree and
polarisation angle from the PCUBE analysis.
----------
Inputs:
QN : float, numpy.ndarray
Normalized Q Stokes flux.
UN : float, numpy.ndarray
Normalized U Stokes flux.
QN_ERR : float, numpy.ndarray
Normalized error on Q Stokes flux.
UN_ERR : float, numpy.ndarray
Normalized error on U Stokes flux.
----------
Returns:
conf : numpy.ndarray
2D matrix of same shape as input containing the confidence on the polarization
computation between 0 and 1 for 2 parameters of interest (Q and U Stokes fluxes).
"""
mask = np.logical_and(QN_ERR > 0.0, UN_ERR > 0.0)
conf = np.full(QN.shape, -1.0)
chi2 = QN**2 / QN_ERR**2 + UN**2 / UN_ERR**2
conf[mask] = 1.0 - np.exp(-0.5 * chi2[mask])
return conf
def CenterConf(mask, PA, sPA):
"""
Compute the confidence map for the position of the center of emission.
----------
Inputs:
mask : bool, numpy.ndarray
Mask of the polarization vectors from which the center of emission should be drawn.
PA : float, numpy.ndarray
2D matrix containing the computed polarization angle.
sPA : float, numpy.ndarray
2D matrix containing the total uncertainties on the polarization angle.
----------
Returns:
conf : numpy.ndarray
2D matrix of same shape as input containing the confidence on the polarization
computation between 0 and 1 for 2 parameters of interest (Q and U Stokes fluxes).
"""
chi2 = np.full(PA.shape, np.nan)
conf = np.full(PA.shape, -1.0)
yy, xx = np.indices(PA.shape)
def ideal(c):
itheta = np.full(PA.shape, np.nan)
itheta[(xx + 0.5) != c[0]] = np.degrees(np.arctan((yy[(xx + 0.5) != c[0]] + 0.5 - c[1]) / (xx[(xx + 0.5) != c[0]] + 0.5 - c[0])))
itheta[(xx + 0.5) == c[0]] = PA[(xx + 0.5) == c[0]]
return princ_angle(itheta)
def chisq(c):
return np.sum((princ_angle(PA[mask]) - ideal((c[0], c[1]))[mask]) ** 2 / sPA[mask] ** 2) / np.sum(mask)
for x, y in zip(xx[np.isfinite(PA)], yy[np.isfinite(PA)]):
chi2[y, x] = chisq((x, y))
from scipy.optimize import minimize
from scipy.special import gammainc
conf[np.isfinite(PA)] = 1.0 - gammainc(0.5, 0.5 * chi2[np.isfinite(PA)])
c0 = np.unravel_index(np.argmax(conf), conf.shape)[::-1]
result = minimize(chisq, c0, bounds=[(0, PA.shape[1]), (0.0, PA.shape[0])])
if result.success:
print("Center of emission found")
else:
print("Center of emission not found", result)
return conf, result.x
def sci_not(v, err, rnd=1, out=str):
"""
Return the scientifque error notation as a string.
Return the scientific error notation as a string.
----------
Inputs:
v : float
Value to be transformed into scientific notation.
err : float
Error on the value to be transformed into scientific notation.
rnd : int
Number of significant numbers for the scientific notation.
Default to 1.
out : str or other
Format in which the notation should be returned. "str" means the notation
is returned as a single string, "other" means it is returned as a list of "str".
Default to str.
----------
Returns:
conf : numpy.ndarray
2D matrix of same shape as input containing the confidence on the polarization
computation between 0 and 1 for 2 parameters of interest (Q and U Stokes fluxes).
"""
power = -int(("%E" % v)[-3:]) + 1
output = [r"({0}".format(round(v * 10**power, rnd)), round(v * 10**power, rnd)]
@@ -46,10 +153,21 @@ def sci_not(v, err, rnd=1, out=str):
else:
return *output[1:], -power
def wcs_PA(PC21, PC22):
"""
Return the position angle in degrees to the North direction of a wcs
from the values of coefficient of its transformation matrix.
----------
Inputs:
PC21 : float
Value of the WCS matric PC[1,0]
PC22 : float
Value of the WCS matric PC[1,1]
----------
Returns:
orient : float
Angle in degrees between the North direction and the Up direction of the WCS.
"""
if (abs(PC21) > abs(PC22)) and (PC21 >= 0):
orient = -np.arccos(PC22) * 180.0 / np.pi

View File

@@ -1,50 +0,0 @@
#!/usr/bin/python3
import numpy as np
from astropy.io import fits
from lib.plots import overplot_pol, overplot_radio
from matplotlib.colors import LogNorm
Stokes_UV = fits.open("./data/IC5063/5918/IC5063_FOC_b0.10arcsec_c0.20arcsec.fits")
Stokes_18GHz = fits.open("./data/IC5063/radio/IC5063_18GHz.fits")
Stokes_24GHz = fits.open("./data/IC5063/radio/IC5063_24GHz.fits")
Stokes_103GHz = fits.open("./data/IC5063/radio/IC5063_103GHz.fits")
Stokes_229GHz = fits.open("./data/IC5063/radio/IC5063_229GHz.fits")
Stokes_357GHz = fits.open("./data/IC5063/radio/IC5063_357GHz.fits")
# Stokes_S2 = fits.open("./data/IC5063/POLARIZATION_COMPARISON/S2_rot_crop.fits")
Stokes_IR = fits.open("./data/IC5063/IR/u2e65g01t_c0f_rot.fits")
# levelsMorganti = np.array([1.,2.,3.,8.,16.,32.,64.,128.])
levelsMorganti = np.logspace(-0.1249, 1.97, 7) / 100.0
levels18GHz = levelsMorganti * Stokes_18GHz[0].data.max()
A = overplot_radio(Stokes_UV, Stokes_18GHz)
A.plot(levels=levels18GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/18GHz_overplot.pdf", vec_scale=None)
levels24GHz = levelsMorganti * Stokes_24GHz[0].data.max()
B = overplot_radio(Stokes_UV, Stokes_24GHz)
B.plot(levels=levels24GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/24GHz_overplot.pdf", vec_scale=None)
levels103GHz = levelsMorganti * Stokes_103GHz[0].data.max()
C = overplot_radio(Stokes_UV, Stokes_103GHz)
C.plot(levels=levels103GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/103GHz_overplot.pdf", vec_scale=None)
levels229GHz = levelsMorganti * Stokes_229GHz[0].data.max()
D = overplot_radio(Stokes_UV, Stokes_229GHz)
D.plot(levels=levels229GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/229GHz_overplot.pdf", vec_scale=None)
levels357GHz = levelsMorganti * Stokes_357GHz[0].data.max()
E = overplot_radio(Stokes_UV, Stokes_357GHz)
E.plot(levels=levels357GHz, SNRp_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/357GHz_overplot.pdf", vec_scale=None)
# F = overplot_pol(Stokes_UV, Stokes_S2)
# F.plot(SNRp_cut=3.0, SNRi_cut=80.0, savename='./plots/IC5063/S2_overplot.pdf', norm=LogNorm(vmin=5e-20,vmax=5e-18))
G = overplot_pol(Stokes_UV, Stokes_IR, cmap="inferno")
G.plot(
SNRp_cut=2.0,
SNRi_cut=10.0,
savename="./plots/IC5063/IR_overplot.pdf",
vec_scale=None,
norm=LogNorm(Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"] / 1e3, Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"]),
cmap="inferno_r",
)

View File

@@ -1,20 +0,0 @@
#!/usr/bin/python3
import numpy as np
from astropy.io import fits
from lib.plots import overplot_chandra, overplot_pol
from matplotlib.colors import LogNorm
Stokes_UV = fits.open("./data/MRK463E/5960/MRK463E_FOC_b0.05arcsec_c0.10arcsec.fits")
Stokes_IR = fits.open("./data/MRK463E/WFPC2/IR_rot_crop.fits")
Stokes_Xr = fits.open("./data/MRK463E/Chandra/X_ray_crop.fits")
levels = np.geomspace(1.0, 99.0, 7)
A = overplot_chandra(Stokes_UV, Stokes_Xr, norm=LogNorm())
A.plot(levels=levels, SNRp_cut=3.0, SNRi_cut=3.0, vec_scale=5, zoom=1, savename="./plots/MRK463E/Chandra_overplot.pdf")
A.write_to(path1="./data/MRK463E/FOC_data_Chandra.fits", path2="./data/MRK463E/Chandra_data.fits", suffix="aligned")
levels = np.array([0.8, 2, 5, 10, 20, 50]) / 100.0 * Stokes_UV[0].header["photflam"]
B = overplot_pol(Stokes_UV, Stokes_IR, norm=LogNorm())
B.plot(levels=levels, SNRp_cut=3.0, SNRi_cut=3.0, vec_scale=5, norm=LogNorm(8.5e-18, 2.5e-15), savename="./plots/MRK463E/IR_overplot.pdf")
B.write_to(path1="./data/MRK463E/FOC_data_WFPC.fits", path2="./data/MRK463E/WFPC_data.fits", suffix="aligned")

View File

@@ -1,6 +1,9 @@
#!/usr/bin/python
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Project libraries
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
import numpy as np
@@ -170,7 +173,7 @@ def main(infiles, target=None, output_dir="./data/"):
# Reduction parameters
kwargs = {}
# Polarization map output
kwargs["SNRp_cut"] = 3.0
kwargs["P_cut"] = 0.99
kwargs["SNRi_cut"] = 1.0
kwargs["flux_lim"] = 1e-19, 3e-17
kwargs["scale_vec"] = 5
@@ -183,9 +186,7 @@ def main(infiles, target=None, output_dir="./data/"):
new_infiles = []
for i, group in enumerate(grouped_infiles):
new_infiles.append(
FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True)[0]
)
new_infiles.append(FOC_reduction(target=target + "-" + str(i + 1), infiles=["/".join([data_folder, file]) for file in group], interactive=True)[0])
infiles = new_infiles

View File

@@ -1,40 +0,0 @@
#!/usr/bin/python
from getopt import error as get_error
from getopt import getopt
from sys import argv
arglist = argv[1:]
options = "hf:p:i:l:"
long_options = ["help", "fits=", "snrp=", "snri=", "lim="]
fits_path = None
SNRp_cut, SNRi_cut = 3, 3
flux_lim = None
out_txt = None
try:
arg, val = getopt(arglist, options, long_options)
for curr_arg, curr_val in arg:
if curr_arg in ("-h", "--help"):
print("python3 analysis.py -f <path_to_reduced_fits> -p <SNRp_cut> -i <SNRi_cut> -l <flux_lim>")
elif curr_arg in ("-f", "--fits"):
fits_path = str(curr_val)
elif curr_arg in ("-p", "--snrp"):
SNRp_cut = int(curr_val)
elif curr_arg in ("-i", "--snri"):
SNRi_cut = int(curr_val)
elif curr_arg in ("-l", "--lim"):
flux_lim = list("".join(curr_val).split(","))
except get_error as err:
print(str(err))
if fits_path is not None:
from astropy.io import fits
from lib.plots import pol_map
Stokes_UV = fits.open(fits_path)
p = pol_map(Stokes_UV, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, flux_lim=flux_lim)
else:
print("python3 analysis.py -f <path_to_reduced_fits> -p <SNRp_cut> -i <SNRi_cut> -l <flux_lim>")

View File

@@ -1,214 +0,0 @@
#!/usr/bin/python
from src.lib.background import gauss, bin_centers
from src.lib.deconvolve import zeropad
from src.lib.reduction import align_data
from src.lib.plots import princ_angle
from matplotlib.colors import LogNorm
from os.path import join as path_join
from astropy.io import fits
from astropy.wcs import WCS
from scipy.ndimage import shift
from scipy.optimize import curve_fit
import numpy as np
import matplotlib.pyplot as plt
root_dir = path_join('/home/t.barnouin/Documents/Thesis/HST')
root_dir_K = path_join(root_dir, 'Kishimoto', 'output')
root_dir_S = path_join(root_dir, 'FOC_Reduction', 'output')
root_dir_data_S = path_join(root_dir, 'FOC_Reduction', 'data', 'NGC1068', '5144')
root_dir_plot_S = path_join(root_dir, 'FOC_Reduction', 'plots', 'NGC1068', '5144', 'notaligned')
filename_S = "NGC1068_FOC_b10.00pixel_not_aligned.fits"
plt.rcParams.update({'font.size': 15})
SNRi_cut = 30.
SNRp_cut = 3.
data_K = {}
data_S = {}
for d, i in zip(['I', 'Q', 'U', 'P', 'PA', 'sI', 'sQ', 'sU', 'sP', 'sPA'], [0, 1, 2, 5, 8, (3, 0, 0), (3, 1, 1), (3, 2, 2), 6, 9]):
data_K[d] = np.loadtxt(path_join(root_dir_K, d+'.txt'))
with fits.open(path_join(root_dir_data_S, filename_S)) as f:
if not type(i) is int:
data_S[d] = np.sqrt(f[i[0]].data[i[1], i[2]])
else:
data_S[d] = f[i].data
if i == 0:
header = f[i].header
wcs = WCS(header)
convert_flux = header['photflam']
bkg_S = np.median(data_S['I'])/3
bkg_K = np.median(data_K['I'])/3
# zeropad data to get same size of array
shape = data_S['I'].shape
for d in data_K:
data_K[d] = zeropad(data_K[d], shape)
# shift array to get same information in same pixel
data_arr, error_ar, heads, data_msk, shifts, shifts_err = align_data(np.array([data_S['I'], data_K['I']]), [header, header], error_array=np.array(
[data_S['sI'], data_K['sI']]), background=np.array([bkg_S, bkg_K]), upsample_factor=10., ref_center='center', return_shifts=True)
for d in data_K:
data_K[d] = shift(data_K[d], shifts[1], order=1, cval=0.)
# compute pol components from shifted array
for d in [data_S, data_K]:
for i in d:
d[i][np.isnan(d[i])] = 0.
d['P'] = np.where(np.logical_and(np.isfinite(d['I']), d['I'] > 0.), np.sqrt(d['Q']**2+d['U']**2)/d['I'], 0.)
d['sP'] = np.where(np.logical_and(np.isfinite(d['I']), d['I'] > 0.), np.sqrt((d['Q']**2*d['sQ']**2+d['U']**2*d['sU']**2) /
(d['Q']**2+d['U']**2)+((d['Q']/d['I'])**2+(d['U']/d['I'])**2)*d['sI']**2)/d['I'], 0.)
d['d_P'] = np.where(np.logical_and(np.isfinite(d['P']), np.isfinite(d['sP'])), np.sqrt(d['P']**2-d['sP']**2), 0.)
d['PA'] = 0.5*np.arctan2(d['U'], d['Q'])+np.pi
d['SNRp'] = np.zeros(d['d_P'].shape)
d['SNRp'][d['sP'] > 0.] = d['d_P'][d['sP'] > 0.]/d['sP'][d['sP'] > 0.]
d['SNRi'] = np.zeros(d['I'].shape)
d['SNRi'][d['sI'] > 0.] = d['I'][d['sI'] > 0.]/d['sI'][d['sI'] > 0.]
d['mask'] = np.logical_and(d['SNRi'] > SNRi_cut, d['SNRp'] > SNRp_cut)
data_S['mask'], data_K['mask'] = np.logical_and(data_S['mask'], data_K['mask']), np.logical_and(data_S['mask'], data_K['mask'])
#
# Compute histogram of measured polarization in cut
#
bins = int(data_S['mask'].sum()/5)
bin_size = 1./bins
mod_p = np.linspace(0., 1., 300)
for d in [data_S, data_K]:
d['hist'], d['bin_edges'] = np.histogram(d['d_P'][d['mask']], bins=bins, range=(0., 1.))
d['binning'] = bin_centers(d['bin_edges'])
peak, bins_fwhm = d['binning'][np.argmax(d['hist'])], d['binning'][d['hist'] > d['hist'].max()/2.]
fwhm = bins_fwhm[1]-bins_fwhm[0]
p0 = [d['hist'].max(), peak, fwhm]
try:
popt, pcov = curve_fit(gauss, d['binning'], d['hist'], p0=p0)
except RuntimeError:
popt = p0
d['hist_chi2'] = np.sum((d['hist']-gauss(d['binning'], *popt))**2)/d['hist'].size
d['hist_popt'] = popt
fig_p, ax_p = plt.subplots(num="Polarization degree histogram", figsize=(10, 6), constrained_layout=True)
ax_p.errorbar(data_S['binning'], data_S['hist'], xerr=bin_size/2., fmt='b.', ecolor='b', label='P through this pipeline')
ax_p.plot(mod_p, gauss(mod_p, *data_S['hist_popt']), 'b--', label='mean = {1:.2f}, stdev = {2:.2f}'.format(*data_S['hist_popt']))
ax_p.errorbar(data_K['binning'], data_K['hist'], xerr=bin_size/2., fmt='r.', ecolor='r', label="P through Kishimoto's pipeline")
ax_p.plot(mod_p, gauss(mod_p, *data_K['hist_popt']), 'r--', label='mean = {1:.2f}, stdev = {2:.2f}'.format(*data_K['hist_popt']))
ax_p.set(xlabel="Polarization degree", ylabel="Counts", title="Histogram of polarization degree computed in the cut for both pipelines.")
ax_p.legend()
fig_p.savefig(path_join(root_dir_plot_S, "NGC1068_K_pol_deg.png"), bbox_inches="tight", dpi=300)
#
# Compute angular difference between the maps in cut
#
dtheta = np.where(data_S['mask'], 0.5*np.arctan((np.sin(2*data_S['PA'])*np.cos(2*data_K['PA'])-np.cos(2*data_S['PA']) *
np.cos(2*data_K['PA']))/(np.cos(2*data_S['PA'])*np.cos(2*data_K['PA'])+np.cos(2*data_S['PA'])*np.sin(2*data_K['PA']))), np.nan)
fig_pa = plt.figure(num="Polarization degree alignement")
ax_pa = fig_pa.add_subplot(111, projection=wcs)
cbar_ax_pa = fig_pa.add_axes([0.88, 0.12, 0.01, 0.75])
ax_pa.set_title(r"Degree of alignement $\zeta$ of the polarization angles from the 2 pipelines in the cut")
im_pa = ax_pa.imshow(np.cos(2*dtheta), vmin=-1., vmax=1., origin='lower', cmap='bwr', label=r"$\zeta$ between this pipeline and Kishimoto's")
cbar_pa = plt.colorbar(im_pa, cax=cbar_ax_pa, label=r"$\zeta = \cos\left( 2 \cdot \delta\theta_P \right)$")
ax_pa.coords[0].set_axislabel('Right Ascension (J2000)')
ax_pa.coords[1].set_axislabel('Declination (J2000)')
fig_pa.savefig(path_join(root_dir_plot_S, "NGC1068_K_pol_ang.png"), bbox_inches="tight", dpi=300)
#
# Compute power uncertainty difference between the maps in cut
#
eta = np.where(data_S['mask'], np.abs(data_K['d_P']-data_S['d_P'])/np.sqrt(data_S['sP']**2+data_K['sP']**2)/2., np.nan)
fig_dif_p = plt.figure(num="Polarization power difference ratio")
ax_dif_p = fig_dif_p.add_subplot(111, projection=wcs)
cbar_ax_dif_p = fig_dif_p.add_axes([0.88, 0.12, 0.01, 0.75])
ax_dif_p.set_title(r"Degree of difference $\eta$ of the polarization from the 2 pipelines in the cut")
im_dif_p = ax_dif_p.imshow(eta, vmin=0., vmax=2., origin='lower', cmap='bwr_r', label=r"$\eta$ between this pipeline and Kishimoto's")
cbar_dif_p = plt.colorbar(im_dif_p, cax=cbar_ax_dif_p, label=r"$\eta = \frac{2 \left|P^K-P^S\right|}{\sqrt{{\sigma^K_P}^2+{\sigma^S_P}^2}}$")
ax_dif_p.coords[0].set_axislabel('Right Ascension (J2000)')
ax_dif_p.coords[1].set_axislabel('Declination (J2000)')
fig_dif_p.savefig(path_join(root_dir_plot_S, "NGC1068_K_pol_diff.png"), bbox_inches="tight", dpi=300)
#
# Compute angle uncertainty difference between the maps in cut
#
eta = np.where(data_S['mask'], np.abs(data_K['PA']-data_S['PA'])/np.sqrt(data_S['sPA']**2+data_K['sPA']**2)/2., np.nan)
fig_dif_pa = plt.figure(num="Polarization angle difference ratio")
ax_dif_pa = fig_dif_pa.add_subplot(111, projection=wcs)
cbar_ax_dif_pa = fig_dif_pa.add_axes([0.88, 0.12, 0.01, 0.75])
ax_dif_pa.set_title(r"Degree of difference $\eta$ of the polarization from the 2 pipelines in the cut")
im_dif_pa = ax_dif_pa.imshow(eta, vmin=0., vmax=2., origin='lower', cmap='bwr_r', label=r"$\eta$ between this pipeline and Kishimoto's")
cbar_dif_pa = plt.colorbar(im_dif_pa, cax=cbar_ax_dif_pa,
label=r"$\eta = \frac{2 \left|\theta_P^K-\theta_P^S\right|}{\sqrt{{\sigma^K_{\theta_P}}^2+{\sigma^S_{\theta_P}}^2}}$")
ax_dif_pa.coords[0].set_axislabel('Right Ascension (J2000)')
ax_dif_pa.coords[1].set_axislabel('Declination (J2000)')
fig_dif_pa.savefig(path_join(root_dir_plot_S, "NGC1068_K_polang_diff.png"), bbox_inches="tight", dpi=300)
# display both polarization maps to check consistency
# plt.rcParams.update({'font.size': 15})
fig = plt.figure(num="Polarization maps comparison", figsize=(10, 10))
ax = fig.add_subplot(111, projection=wcs)
fig.subplots_adjust(right=0.85)
cbar_ax = fig.add_axes([0.88, 0.12, 0.01, 0.75])
for d in [data_S, data_K]:
d['X'], d['Y'] = np.meshgrid(np.arange(d['I'].shape[1]), np.arange(d['I'].shape[0]))
d['xy_U'], d['xy_V'] = np.where(d['mask'], d['d_P']*np.cos(np.pi/2.+d['PA']), np.nan), np.where(d['mask'], d['d_P']*np.sin(np.pi/2.+d['PA']), np.nan)
im0 = ax.imshow(data_S['I']*convert_flux, norm=LogNorm(data_S['I'][data_S['I'] > 0].min()*convert_flux, data_S['I']
[data_S['I'] > 0].max()*convert_flux), origin='lower', cmap='gray', label=r"$I_{STOKES}$ through this pipeline")
quiv0 = ax.quiver(data_S['X'], data_S['Y'], data_S['xy_U'], data_S['xy_V'], units='xy', angles='uv', scale=0.5, scale_units='xy',
pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.2, color='b', alpha=0.75, label="PA through this pipeline")
quiv1 = ax.quiver(data_K['X'], data_K['Y'], data_K['xy_U'], data_K['xy_V'], units='xy', angles='uv', scale=0.5, scale_units='xy',
pivot='mid', headwidth=0., headlength=0., headaxislength=0., width=0.1, color='r', alpha=0.75, label="PA through Kishimoto's pipeline")
ax.set_title(r"$SNR_P \geq$ "+str(SNRi_cut)+r"$\; & \; SNR_I \geq $"+str(SNRp_cut))
# ax.coords.grid(True, color='white', ls='dotted', alpha=0.5)
ax.coords[0].set_axislabel('Right Ascension (J2000)')
ax.coords[0].set_axislabel_position('b')
ax.coords[0].set_ticklabel_position('b')
ax.coords[1].set_axislabel('Declination (J2000)')
ax.coords[1].set_axislabel_position('l')
ax.coords[1].set_ticklabel_position('l')
# ax.axis('equal')
cbar = plt.colorbar(im0, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
ax.legend(loc='upper right')
fig.savefig(path_join(root_dir_plot_S, "NGC1068_K_comparison.png"), bbox_inches="tight", dpi=300)
# compute integrated polarization parameters on a specific cut
for d in [data_S, data_K]:
d['I_dil'] = np.sum(d['I'][d['mask']])
d['sI_dil'] = np.sqrt(np.sum(d['sI'][d['mask']]**2))
d['Q_dil'] = np.sum(d['Q'][d['mask']])
d['sQ_dil'] = np.sqrt(np.sum(d['sQ'][d['mask']]**2))
d['U_dil'] = np.sum(d['U'][d['mask']])
d['sU_dil'] = np.sqrt(np.sum(d['sU'][d['mask']]**2))
d['P_dil'] = np.sqrt(d['Q_dil']**2+d['U_dil']**2)/d['I_dil']
d['sP_dil'] = np.sqrt((d['Q_dil']**2*d['sQ_dil']**2+d['U_dil']**2*d['sU_dil']**2)/(d['Q_dil']**2+d['U_dil']**2) +
((d['Q_dil']/d['I_dil'])**2+(d['U_dil']/d['I_dil'])**2)*d['sI_dil']**2)/d['I_dil']
d['d_P_dil'] = np.sqrt(d['P_dil']**2-d['sP_dil']**2)
d['PA_dil'] = princ_angle((90./np.pi)*np.arctan2(d['U_dil'], d['Q_dil']))
d['sPA_dil'] = princ_angle((90./(np.pi*(d['Q_dil']**2+d['U_dil']**2)))*np.sqrt(d['Q_dil']**2*d['sU_dil']**2+d['U_dil']**2*d['sU_dil']**2))
print('From this pipeline :\n', "P = {0:.2f} ± {1:.2f} %\n".format(
data_S['d_P_dil']*100., data_S['sP_dil']*100.), "PA = {0:.2f} ± {1:.2f} °".format(data_S['PA_dil'], data_S['sPA_dil']))
print("From Kishimoto's pipeline :\n", "P = {0:.2f} ± {1:.2f} %\n".format(
data_K['d_P_dil']*100., data_K['sP_dil']*100.), "PA = {0:.2f} ± {1:.2f} °".format(data_K['PA_dil'], data_K['sPA_dil']))
# compare different types of error
print("This pipeline : average sI/I={0:.2f} ; sQ/Q={1:.2f} ; sU/U={2:.2f} ; sP/P={3:.2f}".format(np.mean(data_S['sI'][data_S['mask']]/data_S['I'][data_S['mask']]), np.mean(
data_S['sQ'][data_S['mask']]/data_S['Q'][data_S['mask']]), np.mean(data_S['sU'][data_S['mask']]/data_S['U'][data_S['mask']]), np.mean(data_S['sP'][data_S['mask']]/data_S['P'][data_S['mask']])))
print("Kishimoto's pipeline : average sI/I={0:.2f} ; sQ/Q={1:.2f} ; sU/U={2:.2f} ; sP/P={3:.2f}".format(np.mean(data_K['sI'][data_S['mask']]/data_K['I'][data_S['mask']]), np.mean(
data_K['sQ'][data_S['mask']]/data_K['Q'][data_S['mask']]), np.mean(data_K['sU'][data_S['mask']]/data_K['U'][data_S['mask']]), np.mean(data_K['sP'][data_S['mask']]/data_K['P'][data_S['mask']])))
for d, i in zip(['I', 'Q', 'U', 'P', 'PA', 'sI', 'sQ', 'sU', 'sP', 'sPA'], [0, 1, 2, 5, 8, (3, 0, 0), (3, 1, 1), (3, 2, 2), 6, 9]):
data_K[d] = np.loadtxt(path_join(root_dir_K, d+'.txt'))
with fits.open(path_join(root_dir_data_S, filename_S)) as f:
if not type(i) is int:
data_S[d] = np.sqrt(f[i[0]].data[i[1], i[2]])
else:
data_S[d] = f[i].data
if i == 0:
header = f[i].header
# from Kishimoto's pipeline : IQU_dir, IQU_shift, IQU_stat, IQU_trans
# from my pipeline : raw_bg, raw_flat, raw_psf, raw_shift, raw_wav, IQU_dir
# but errors from my pipeline are propagated all along, how to compare then ?
plt.show()

81
package/src/emission_center.py Executable file
View File

@@ -0,0 +1,81 @@
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
def main(infile, target=None, output_dir=None):
from os.path import join as pathjoin
import numpy as np
from astropy.io.fits import open as fits_open
from astropy.wcs import WCS
from lib.plots import polarization_map
from lib.utils import CenterConf, PCconf
from matplotlib.patches import Rectangle
from matplotlib.pyplot import figure, show
output = []
Stokes = fits_open(infile)
stkI = Stokes["I_STOKES"].data
QN, UN, QN_ERR, UN_ERR = np.full((4, stkI.shape[0], stkI.shape[1]), np.nan)
for sflux, nflux in zip(
[Stokes["Q_STOKES"].data, Stokes["U_STOKES"].data, np.sqrt(Stokes["IQU_COV_MATRIX"].data[1, 1]), np.sqrt(Stokes["IQU_COV_MATRIX"].data[2, 2])],
[QN, UN, QN_ERR, UN_ERR],
):
nflux[stkI > 0.0] = sflux[stkI > 0.0] / stkI[stkI > 0.0]
Stokesconf = PCconf(QN, UN, QN_ERR, UN_ERR)
Stokesmask = Stokes["DATA_MASK"].data.astype(bool)
Stokessnr = np.zeros(Stokesmask.shape)
Stokessnr[Stokes["POL_DEG_ERR"].data > 0.0] = (
Stokes["POL_DEG_DEBIASED"].data[Stokes["POL_DEG_ERR"].data > 0.0] / Stokes["POL_DEG_ERR"].data[Stokes["POL_DEG_ERR"].data > 0.0]
)
Stokescentconf, Stokescenter = CenterConf(Stokesconf > 0.99, Stokes["POL_ANG"].data, Stokes["POL_ANG_ERR"].data)
Stokespos = WCS(Stokes[0].header).pixel_to_world(*Stokescenter)
if target is None:
target = Stokes[0].header["TARGNAME"]
fig = figure(figsize=(8, 8.5), layout="constrained")
fig, ax = polarization_map(Stokes, P_cut=0.99, step_vec=1, scale_vec=3, display="pf", fig=fig, width=0.33, linewidth=0.5)
ax.plot(*Stokescenter, marker="+", color="k", lw=3)
ax.plot(*Stokescenter, marker="+", color="gray", lw=1.5, label="Best confidence for center: {0}".format(Stokespos.to_string("hmsdms")))
ax.contour(Stokescentconf, [0.01], colors="k", linewidths=3)
confcentcont = ax.contour(Stokescentconf, [0.01], colors="gray")
# confcont = ax.contour(Stokesconf, [0.9905], colors="r")
# snr3cont = ax.contour(Stokessnr, [3.0], colors="b", linestyles="dashed")
# snr4cont = ax.contour(Stokessnr, [4.0], colors="b")
handles, labels = ax.get_legend_handles_labels()
labels.append(r"Center $Conf_{99\%}$ contour")
handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=confcentcont.get_edgecolor()[0]))
# labels.append(r"Polarization $Conf_{99\%}$ contour")
# handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=confcont.get_edgecolor()[0]))
# labels.append(r"$SNR_P \geq$ 3 contour")
# handles.append(Rectangle((0, 0), 1, 1, fill=False, ls="--", ec=snr3cont.get_edgecolor()[0]))
# labels.append(r"$SNR_P \geq$ 4 contour")
# handles.append(Rectangle((0, 0), 1, 1, fill=False, ec=snr4cont.get_edgecolor()[0]))
ax.legend(handles=handles, labels=labels, bbox_to_anchor=(0.0, -0.02, 1.0, 0.01), loc="upper left", mode="expand", borderaxespad=0.0)
if output_dir is not None:
filename = pathjoin(output_dir, "%s_center.pdf" % target)
fig.savefig(filename, dpi=300, facecolor="None")
output.append(filename)
show()
return output
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Look for the center of emission for a given reduced observation")
parser.add_argument("-t", "--target", metavar="targetname", required=False, help="the name of the target", type=str, default=None)
parser.add_argument("-f", "--file", metavar="path", required=False, help="The full or relative path to the data product", type=str, default=None)
parser.add_argument("-o", "--output_dir", metavar="directory_path", required=False, help="output directory path for the plots", type=str, default="./data")
args = parser.parse_args()
exitcode = main(infile=args.file, target=args.target, output_dir=args.output_dir)
print("Written to: ", exitcode)

View File

@@ -1,4 +1,9 @@
#!/usr/bin/python
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
def main(infiles=None):
@@ -13,7 +18,7 @@ def main(infiles=None):
from numpy.linalg import eig
if infiles is None:
print('Usage: "python get_cdelt.py -f infiles"')
print('Usage: "env python3 get_cdelt.py -f infiles"')
return 1
prod = [["/".join(filepath.split("/")[:-1]), filepath.split("/")[-1]] for filepath in infiles]
data_folder = prod[0][0]

56
package/src/overplot_IC5063.py Executable file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
import numpy as np
from astropy.io import fits
from lib.plots import overplot_pol, overplot_radio
from matplotlib.colors import LogNorm
Stokes_UV = fits.open("./data/IC5063/5918/IC5063_FOC_b0.10arcsec_c0.20arcsec.fits")
# Stokes_18GHz = fits.open("./data/IC5063/radio/IC5063_18GHz.fits")
# Stokes_24GHz = fits.open("./data/IC5063/radio/IC5063_24GHz.fits")
# Stokes_103GHz = fits.open("./data/IC5063/radio/IC5063_103GHz.fits")
# Stokes_229GHz = fits.open("./data/IC5063/radio/IC5063_229GHz.fits")
# Stokes_357GHz = fits.open("./data/IC5063/radio/IC5063_357GHz.fits")
# Stokes_S2 = fits.open("./data/IC5063/POLARIZATION_COMPARISON/S2_rot_crop.fits")
Stokes_IR = fits.open("./data/IC5063/IR/u2e65g01t_c0f_rot.fits")
# levelsMorganti = np.array([1.,2.,3.,8.,16.,32.,64.,128.])
# levelsMorganti = np.logspace(-0.1249, 1.97, 7) / 100.0
# levels18GHz = levelsMorganti * Stokes_18GHz[0].data.max()
# A = overplot_radio(Stokes_UV, Stokes_18GHz)
# A.plot(levels=levels18GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/18GHz_overplot.pdf", scale_vec=None)
# levels24GHz = levelsMorganti * Stokes_24GHz[0].data.max()
# B = overplot_radio(Stokes_UV, Stokes_24GHz)
# B.plot(levels=levels24GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/24GHz_overplot.pdf", scale_vec=None)
# levels103GHz = levelsMorganti * Stokes_103GHz[0].data.max()
# C = overplot_radio(Stokes_UV, Stokes_103GHz)
# C.plot(levels=levels103GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/103GHz_overplot.pdf", scale_vec=None)
# levels229GHz = levelsMorganti * Stokes_229GHz[0].data.max()
# D = overplot_radio(Stokes_UV, Stokes_229GHz)
# D.plot(levels=levels229GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/229GHz_overplot.pdf", scale_vec=None)
# levels357GHz = levelsMorganti * Stokes_357GHz[0].data.max()
# E = overplot_radio(Stokes_UV, Stokes_357GHz)
# E.plot(levels=levels357GHz, P_cut=2.0, SNRi_cut=10.0, savename="./plots/IC5063/357GHz_overplot.pdf", scale_vec=None)
# F = overplot_pol(Stokes_UV, Stokes_S2)
# F.plot(P_cut=3.0, SNRi_cut=80.0, savename='./plots/IC5063/S2_overplot.pdf', norm=LogNorm(vmin=5e-20,vmax=5e-18))
G = overplot_pol(Stokes_UV, Stokes_IR, cmap="inferno")
G.plot(
P_cut=0.99,
SNRi_cut=1.0,
savename="./plots/IC5063/IR_overplot.pdf",
scale_vec=None,
norm=LogNorm(Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"] / 1e3, Stokes_IR[0].data.max() * Stokes_IR[0].header["photflam"]),
cmap="inferno",
)

46
package/src/overplot_MRK463E.py Executable file
View File

@@ -0,0 +1,46 @@
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
import numpy as np
from astropy.io import fits
from lib.plots import overplot_chandra, overplot_pol
from matplotlib.colors import LogNorm
Stokes_UV = fits.open("./data/MRK463E/5960/MRK463E_FOC_b0.05arcsec_c0.10arcsec.fits")
# Stokes_IR = fits.open("./data/MRK463E/WFPC2/IR_rot_crop.fits")
# Stokes_Xr = fits.open("./data/MRK463E/Chandra/X_ray_crop.fits")
Radio = fits.open("./data/MRK463E/EMERLIN/Voorwerpjes_1356+1822_1356+1822_uniform-image.fits")
# levels = np.geomspace(1.0, 99.0, 7)
# A = overplot_chandra(Stokes_UV, Stokes_Xr, norm=LogNorm())
# A.plot(levels=levels, P_cut=0.99, SNRi_cut=1.0, scale_vec=5, zoom=1, savename="./plots/MRK463E/Chandra_overplot.pdf")
# A.write_to(path1="./data/MRK463E/FOC_data_Chandra.fits", path2="./data/MRK463E/Chandra_data.fits", suffix="aligned")
# levels = np.array([0.8, 2, 5, 10, 20, 50]) / 100.0 * Stokes_UV[0].header["photflam"]
# B = overplot_pol(Stokes_UV, Stokes_IR, norm=LogNorm())
# B.plot(levels=levels, P_cut=0.99, SNRi_cut=1.0, scale_vec=5, norm=LogNorm(8.5e-18, 2.5e-15), savename="./plots/MRK463E/IR_overplot.pdf")
# B.write_to(path1="./data/MRK463E/FOC_data_WFPC.fits", path2="./data/MRK463E/WFPC_data.fits", suffix="aligned")
# levels = np.array([0.8, 2, 5, 10, 20, 50]) / 100.0 * Stokes_UV[0].header["photflam"]
levels = np.array([5, 10, 20, 50])
C = overplot_pol(Stokes_UV, Radio, norm=LogNorm())
C.other_im.set(norm=LogNorm(1e-4, 2e-2))
C.plot(
levels=levels,
P_cut=0.99,
SNRi_cut=1.0,
step_vec=0,
scale_vec=3,
norm=LogNorm(1e-4, 2e-2),
cmap="inferno_r",
width=0.5,
linewidth=0.5,
disptype="snri",
savename="./plots/MRK463E/EMERLIN_snri_overplot.pdf",
)
C.write_to(path1="./data/MRK463E/FOC_data_EMERLIN.fits", path2="./data/MRK463E/EMERLIN_data.fits", suffix="aligned")

42
package/src/overplot_NGC1068.py Executable file
View File

@@ -0,0 +1,42 @@
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
from pathlib import Path
from sys import path as syspath
syspath.append(str(Path(__file__).parent.parent))
import numpy as np
from astropy.io import fits
from lib.plots import overplot_pol, plt
from matplotlib.colors import LogNorm
Stokes_UV = fits.open("./data/NGC1068/5144/NGC1068_FOC_b0.05arcsec_c0.07arcsec_crop.fits")
Radio = fits.open("./data/NGC1068/MERLIN-VLA/Combined_crop.fits")
levels = np.logspace(-0.5, 1.99, 7) / 100.0 * Stokes_UV[0].data.max() * Stokes_UV[0].header["photflam"]
A = overplot_pol(Stokes_UV, Radio, norm=LogNorm())
A.plot(
levels=levels,
P_cut=0.99,
SNRi_cut=1.0,
scale_vec=3,
step_vec=1,
norm=LogNorm(5e-5, 1e-1),
cmap="inferno_r",
width=0.8,
linewidth=1.2,
)
A.add_vector(
A.other_wcs.celestial.wcs.crpix - (1.0, 1.0),
pol_deg=0.124,
pol_ang=100.7,
width=2.0,
linewidth=1.0,
scale=1.0 / (A.px_scale * 6.0),
edgecolor="w",
color="b",
label=r"IXPE torus: P = $12.4 (\pm 3.6)$%, PA = $100.7 (\pm 8.3)$°",
)
A.fig_overplot.savefig("./plots/NGC1068/NGC1068_radio_overplot_full.pdf", dpi=300, bbox_inches="tight")
plt.show()
A.write_to(path1="./data/NGC1068/FOC_Radio.fits", path2="./data/NGC1068/Radio_FOC.fits", suffix="aligned")

426
package/src/readfos.py Executable file
View File

@@ -0,0 +1,426 @@
#!/usr/bin/env python3
import matplotlib.pyplot as plt
import numpy as np
from astropy.io.fits import getheader, getdata, hdu
from os.path import join as join_path, exists as path_exists
from os import system
from copy import deepcopy
# consecutive spectra are made up of the summ of all previous ACCUMs, so the S/N increases along sequence
# _c0f.fits - calibrated vacuum wavelength
# _c1f.fits - calibrated fluxes (ergs sec^-1 cm^-2 Angs^-1)
# _c2f.fits - statistical errors (no sky, bkg subtraction, flatfield or sensitivity error)
# _c3f.fits - for SPECTROPOLARIMETRY mode contains I, Q, U, V, linear and circular polarization and polarization position angle
# _c4f.fits - object+sky count rate spectrum (corrected for overscanning, noise rejection, lost signal)
# _c5f.fits - flat-fielded object count rate spectrum (corrected for paired pulses, detector background, flatfield structure, GIM effects)
# _c6f.fits - flat-fielded sky count rate spectrum (corrected for paired pulses, detector background, flatfield structure, GIM effects)
# _c7f.fits - background count rate spectrum (scaled background subtracted from c4 products)
# _c8f.fits - flat-fielded and sky-subtracted object count rate spectrum
def princ_angle(ang):
"""
Return the principal angle in the 0° to 180° quadrant as PA is always
defined at p/m 180°.
----------
Inputs:
ang : float, numpy.ndarray
Angle in degrees. Can be an array of angles.
----------
Returns:
princ_ang : float, numpy.ndarray
Principal angle in the 0°-180° quadrant in the same shape as input.
"""
if not isinstance(ang, np.ndarray):
A = np.array([ang])
else:
A = np.array(ang)
while np.any(A < 0.0):
A[A < 0.0] = A[A < 0.0] + 360.0
while np.any(A >= 180.0):
A[A >= 180.0] = A[A >= 180.0] - 180.0
if type(ang) is type(A):
return A
else:
return A[0]
class specpol(object):
"""
Class object for studying spectropolarimetry.
"""
def __init__(self, other=None):
if isinstance(other, specpol):
# Copy constructor
self.wav = deepcopy(other.wav)
self.wav_err = deepcopy(other.wav_err)
self.I = deepcopy(other.I)
self.Q = deepcopy(other.Q)
self.U = deepcopy(other.U)
self.V = deepcopy(other.V)
self.IQU_cov = deepcopy(other.IQU_cov)
else:
# Initialise to zero
if isinstance(other, int):
self.zero(other)
else:
self.zero()
@classmethod
def zero(self, n=1):
"""
Set all values to zero.
"""
self.wav = np.zeros(n)
self.wav_err = np.zeros((n, 2))
self.I = np.zeros(n)
self.Q = np.zeros(n)
self.U = np.zeros(n)
self.V = np.zeros(n)
self.IQU_cov = np.zeros((4, 4, n))
@property
def I_err(self):
return np.sqrt(self.IQU_cov[0][0])
@property
def Q_err(self):
return np.sqrt(self.IQU_cov[1][1])
@property
def U_err(self):
return np.sqrt(self.IQU_cov[2][2])
@property
def V_err(self):
return np.sqrt(self.IQU_cov[3][3])
@property
def QN(self):
np.seterr(divide="ignore", invalid="ignore")
return self.Q / np.where(self.I > 0, self.I, np.nan)
@property
def QN_err(self):
np.seterr(divide="ignore", invalid="ignore")
return self.Q_err / np.where(self.I > 0, self.I, np.nan)
@property
def UN(self):
np.seterr(divide="ignore", invalid="ignore")
return self.U / np.where(self.I > 0, self.I, np.nan)
@property
def UN_err(self):
np.seterr(divide="ignore", invalid="ignore")
return self.U_err / np.where(self.I > 0, self.I, np.nan)
@property
def VN(self):
np.seterr(divide="ignore", invalid="ignore")
return self.V / np.where(self.I > 0, self.I, np.nan)
@property
def VN_err(self):
np.seterr(divide="ignore", invalid="ignore")
return self.V_err / np.where(self.I > 0, self.I, np.nan)
@property
def PF(self):
np.seterr(divide="ignore", invalid="ignore")
return np.sqrt(self.Q**2 + self.U**2 + self.V**2)
@property
def PF_err(self):
np.seterr(divide="ignore", invalid="ignore")
return np.sqrt(self.Q**2 * self.Q_err**2 + self.U**2 * self.U_err**2 + self.V**2 * self.V_err**2) / np.where(self.PF > 0, self.PF, np.nan)
@property
def P(self):
np.seterr(divide="ignore", invalid="ignore")
return self.PF / np.where(self.I > 0, self.I, np.nan)
@property
def P_err(self):
np.seterr(divide="ignore", invalid="ignore")
return np.where(self.I > 0, np.sqrt(self.PF_err**2 + (self.PF / self.I) ** 2 * self.I_err**2) / self.I, np.nan)
@property
def PA(self):
return princ_angle((90.0 / np.pi) * np.arctan2(self.U, self.Q))
@property
def PA_err(self):
return princ_angle((90.0 / np.pi) * np.sqrt(self.U**2 * self.Q_err**2 + self.Q**2 * self.U_err**2) / np.where(self.PF > 0, self.PF**2, np.nan))
def rotate(self, angle):
alpha = np.pi / 180.0 * angle
mrot = np.array(
[
[1.0, 0.0, 0.0, 0.0],
[0.0, np.cos(2.0 * alpha), np.sin(2.0 * alpha), 0.0],
[0.0, -np.sin(2.0 * alpha), np.cos(2.0 * alpha), 0.0],
[0.0, 0.0, 0.0, 1.0],
]
)
self.I, self.Q, self.U, self.V = np.dot(mrot, np.array([self.I, self.Q, self.U, self.V]))
self.IQU_cov = np.dot(mrot, np.dot(self.IQU_cov.T, mrot.T).T)
def bin_size(self, size):
"""
Rebin spectra to selected bin size in Angstrom.
"""
bin_edges = np.arange(np.floor(self.wav.min()), np.ceil(self.wav.max()), size)
in_bin = np.digitize(self.wav, bin_edges) - 1
spec_b = specpol(bin_edges.shape[0] - 1)
for i in range(bin_edges.shape[0] - 1):
spec_b.wav[i] = np.mean(self.wav[in_bin == i])
spec_b.wav_err[i] = (spec_b.wav[i] - bin_edges[i], bin_edges[i + 1] - spec_b.wav[i])
spec_b.I[i] = np.sum(self.I[in_bin == i])
spec_b.Q[i] = np.sum(self.Q[in_bin == i])
spec_b.U[i] = np.sum(self.U[in_bin == i])
spec_b.V[i] = np.sum(self.V[in_bin == i])
for m in range(4):
spec_b.IQU_cov[m][m][i] = np.sum(self.IQU_cov[m][m][in_bin == i])
for n in [k for k in range(4) if k != m]:
spec_b.IQU_cov[m][n][i] = np.sqrt(np.sum(self.IQU_cov[m][n][in_bin == i] ** 2))
return spec_b
def dump_txt(self, filename, output_dir=""):
"""
Dump current spectra to a text file.
"""
data_dump = np.array([self.wav, self.I, self.Q, self.U, self.V, self.P, self.PA]).T
np.savetxt(join_path(output_dir, filename + ".txt"), data_dump)
return join_path(output_dir, filename)
def plot(self, fig=None, ax=None, savename=None, plots_folder=""):
"""
Display current spectra.
"""
if fig is None:
if ax is None:
self.fig, self.ax = plt.subplots(1, 2, sharex=True, figsize=(20, 5), layout="constrained")
else:
self.ax = ax
else:
if ax is None:
self.fig = fig
self.ax = self.fig.add_subplot(111)
else:
self.fig = fig
self.ax = ax
if isinstance(ax, np.ndarray):
ax1, ax2 = self.ax[:2]
else:
ax1 = self.ax
# Display flux and polarized flux on first ax
ax1.set_xlabel(r"Wavelength [$\AA$]")
ax1.errorbar(self.wav, self.I, xerr=self.wav_err.T, yerr=self.I_err, color="k", fmt=".", label="I")
ax1.errorbar(self.wav, self.PF, xerr=self.wav_err.T, yerr=self.PF_err, color="b", fmt=".", label="PF")
ax1.set_ylabel(r"F$_\lambda$ [erg s$^{-1}$ cm$^{-2} \AA^{-1}$]")
ax1.legend(ncols=2, loc=1)
if isinstance(ax, np.ndarray):
# When given 2 axes, display P and PA on second
ax2.set_xlabel(r"Wavelength [$\AA$]")
ax2.errorbar(self.wav, self.P, xerr=self.wav_err.T, yerr=self.P_err, color="b", fmt=".", label="P")
ax2.set_ylim([0.0, 1.0])
ax2.set_ylabel(r"P", color="b")
ax2.tick_params(axis="y", color="b", labelcolor="b")
ax22 = ax2.twinx()
ax22.errorbar(self.wav, self.PA, xerr=self.wav_err.T, yerr=self.PA_err, color="r", fmt=".", label="PA [°]")
ax22.set_ylim([0.0, 180.0])
ax22.set_ylabel(r"PA", color="r")
ax22.tick_params(axis="y", color="r", labelcolor="r")
h2, l2 = ax2.get_legend_handles_labels()
h22, l22 = ax22.get_legend_handles_labels()
ax2.legend(h2 + h22, l2 + l22, ncols=2, loc=1)
if hasattr(self, "fig") and savename is not None:
self.fig.savefig(join_path(plots_folder, savename + ".pdf"), dpi=300, bbox_inches="tight")
return self.fig, self.ax, join_path(plots_folder, savename + ".pdf")
elif hasattr(self, "fig"):
return self.fig, self.ax
else:
return self.ax
class FOSspecpol(specpol):
"""
Class object for studying FOS SPECTROPOLARYMETRY mode spectra.
"""
def __init__(self, stokes, data_folder=""):
"""
Initialise object from fits filename or hdulist.
"""
if isinstance(stokes, str):
self.file_root = stokes.split("_")[0]
self.hd = getheader(join_path(data_folder, self.file_root + "_c0f.fits"))
wav = getdata(join_path(data_folder, self.file_root + "_c0f.fits"))
stokes = getdata(join_path(data_folder, self.file_root + "_c3f.fits"))
elif isinstance(stokes, hdu.hdulist.HDUList):
self.hd = stokes.header
self.file_root = self.hd["FILENAME"].split("_")[0]
wav = getdata(join_path(data_folder, self.file_root + "_c0f"))
stokes = stokes.data
else:
raise ValueError("Input must be a path to a fits file or an HDUlist")
self.wav = np.concat((wav[0:2, :], wav[0].reshape(1, wav.shape[1]), wav[0].reshape(1, wav.shape[1])), axis=0)
self.wav_err = np.zeros((self.wav.shape[0], self.wav.shape[1], 2))
self.IQU_cov = np.zeros((4, 4, self.wav.shape[0], self.wav.shape[1]))
self.I = stokes[0::14]
self.IQU_cov[0, 0] = stokes[4::14] ** 2
self.Q = stokes[1::14]
self.IQU_cov[1, 1] = stokes[5::14] ** 2
self.U = stokes[2::14]
self.IQU_cov[2, 2] = stokes[6::14] ** 2
self.V = stokes[3::14]
self.IQU_cov[3, 3] = stokes[7::14] ** 2
self.subspec = {}
for i, name in enumerate(["PASS1", "PASS2", "PASS12", "PASS12corr"]):
spec = specpol(self.wav[i].shape[0])
spec.wav, spec.wav_err, spec.I, spec.Q, spec.U, spec.V = self.wav[i], self.wav_err[i], self.I[i], self.Q[i], self.U[i], self.V[i]
spec.IQU_cov = self.IQU_cov[:, :, i, :]
spec.rotate((i != 3) * self.hd["PA_APER"])
self.subspec[name] = spec
self.P_fos = stokes[8::14]
self.P_fos_err = stokes[11::14]
self.PC_fos = stokes[9::14]
self.PC_fos_err = stokes[12::14]
self.PA_fos = princ_angle(
np.degrees(stokes[10::14]) + np.concat((np.ones((3, stokes.shape[1])), np.zeros((1, stokes.shape[1]))), axis=0) * self.hd["PA_APER"]
)
self.PA_fos_err = princ_angle(np.degrees(stokes[13::14]))
def __del__(self):
if hasattr(self, "ax"):
del self.ax
if hasattr(self, "fig"):
del self.fig
def dump_txt(self, filename, spec_list=None, output_dir=""):
"""
Dump current spectra to a text file.
"""
outfiles = []
if spec_list is None:
spec_list = self.subspec
for i, name in enumerate(["PASS1", "PASS2", "PASS12", "PASS12corr"]):
outfiles.append(spec_list[name].dump_txt(filename="_".join([filename, name]), output_dir=output_dir))
return outfiles
def plot(self, spec_list=None, savename=None, plots_folder="", fos=False):
"""
Display current spectra in single figure.
"""
outfiles = []
if hasattr(self, "ax"):
del self.ax
if hasattr(self, "fig"):
del self.fig
if spec_list is None:
spec_list = self.subspec
self.fig, self.ax = plt.subplots(4, 2, sharex=True, sharey="col", figsize=(20, 10), layout="constrained")
for i, (name, title) in enumerate(
[("PASS1", "Pass Direction 1"), ("PASS2", "Pass Direction 2"), ("PASS12", "Pass Direction 1&2"), ("PASS12corr", "Pass Direction 1&2 corrected")]
):
self.ax[i][0].set_title(title)
if fos:
self.ax[i][0] = spec_list[name].plot(ax=self.ax[i][0])
self.ax[i][1].set_xlabel(r"Wavelength [$\AA$]")
self.ax[i][1].errorbar(self.wav[i], self.P_fos[i], xerr=self.wav_err[i].T, yerr=self.P_fos_err[i], color="b", fmt=".", label="P_fos")
self.ax[i][1].set_ylim([0.0, 1.0])
self.ax[i][1].set_ylabel(r"P", color="b")
self.ax[i][1].tick_params(axis="y", color="b", labelcolor="b")
ax22 = self.ax[i][1].twinx()
ax22.errorbar(self.wav[i], self.PA_fos[i], xerr=self.wav_err[i].T, yerr=self.PA_fos_err[i], color="r", fmt=".", label="PA_fos [°]")
ax22.set_ylim([0.0, 180.0])
ax22.set_ylabel(r"PA", color="r")
ax22.tick_params(axis="y", color="r", labelcolor="r")
h2, l2 = self.ax[i][1].get_legend_handles_labels()
h22, l22 = ax22.get_legend_handles_labels()
self.ax[i][1].legend(h2 + h22, l2 + l22, ncols=2, loc=1)
else:
self.ax[i] = spec_list[name].plot(ax=self.ax[i])
self.ax[0][0].set_ylim(ymin=0.0)
self.fig.suptitle("_".join([self.hd["TARGNAME"], str(self.hd["PROPOSID"]), self.hd["FILENAME"], self.hd["APER_ID"]]))
if savename is not None:
self.fig.savefig(join_path(plots_folder, savename + ".pdf"), dpi=300, bbox_inches="tight")
outfiles.append(join_path(plots_folder, savename + ".pdf"))
return outfiles
def bin_size(self, size):
"""
Rebin spectra to selected bin size in Angstrom.
"""
key = "{0:.2f}bin".format(size)
if key not in self.subspec.keys():
self.subspec[key] = dict([])
for name in ["PASS1", "PASS2", "PASS12", "PASS12corr"]:
self.subspec[key][name] = self.subspec[name].bin_size(size)
return self.subspec[key]
def main(infiles, bin_size=None, output_dir=None):
outfiles = []
if infiles is not None:
prod = np.array([["/".join(filepath.split("/")[:-1]), filepath.split("/")[-1]] for filepath in infiles], dtype=str)
obs_dir = "/".join(infiles[0].split("/")[:-1])
if not path_exists(obs_dir):
system("mkdir -p {0:s} {1:s}".format(obs_dir, obs_dir.replace("data", "plots")))
else:
print("Must input files to process.")
return 1
data_folder = prod[0][0]
if output_dir is None:
output_dir = data_folder
try:
plots_folder = data_folder.replace("data", "plots")
except ValueError:
plots_folder = "."
if not path_exists(plots_folder):
system("mkdir -p {0:s} ".format(plots_folder))
infiles = [p[1] for p in prod]
roots = np.unique([file.split("_")[0] for file in infiles])
for file_root in roots:
print(file_root)
spec = FOSspecpol(file_root, data_folder)
filename = "_".join([spec.hd["TARGNAME"], "FOS", str(spec.hd["PROPOSID"]), spec.file_root, spec.hd["APER_ID"]])
if bin_size is not None:
key = "{0:.2f}bin".format(bin_size)
spec.bin_size(bin_size)
outfiles += spec.dump_txt("_".join([filename, key]), spec_list=spec.subspec[key], output_dir=output_dir)
outfiles += spec.plot(savename="_".join([filename, key]), spec_list=spec.subspec[key], plots_folder=plots_folder)
else:
outfiles += spec.dump_txt(filename, output_dir=output_dir)
outfiles += spec.plot(savename=filename, plots_folder=plots_folder)
del spec
plt.show()
return outfiles
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Display and dump FOS Spectropolarimetry")
parser.add_argument("-f", "--files", metavar="path", required=False, nargs="*", help="the full or relative path to the data products", default=None)
parser.add_argument("-b", "--bin", metavar="bin_size", required=False, help="The bin size to resample spectra", type=float, default=None)
parser.add_argument(
"-o", "--output_dir", metavar="directory_path", required=False, help="output directory path for the data products", type=str, default=None
)
args = parser.parse_args()
exitcode = main(infiles=args.files, bin_size=args.bin, output_dir=args.output_dir)
print("Written to: ", exitcode)

7
requirements.txt Normal file
View File

@@ -0,0 +1,7 @@
# Requirements for FOC_Reduction
numpy
scipy
astropy
astroquery # To directly query the Mast archives
matplotlib >= 3.8 # Brought some depreciation on ContourSet