Initial commit
This commit is contained in:
21
.gitignore
vendored
Executable file
21
.gitignore
vendored
Executable file
@@ -0,0 +1,21 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# Ignore any .fits/.npy/.par data file
|
||||
*.fits
|
||||
*.npy
|
||||
*.par
|
||||
*.zip
|
||||
*.tar
|
||||
*.tar.xz
|
||||
|
||||
# Remove local data and plots folders
|
||||
data/
|
||||
plots/
|
||||
2
README.md
Executable file
2
README.md
Executable file
@@ -0,0 +1,2 @@
|
||||
# Project 3 : SZ signal recovery from stacking and characterization of signals
|
||||
users : t.barnouin / k.zhang
|
||||
BIN
report/P3_SZ_Signal_Using_Stacking.pdf
Executable file
BIN
report/P3_SZ_Signal_Using_Stacking.pdf
Executable file
Binary file not shown.
1
report/README.md
Executable file
1
report/README.md
Executable file
@@ -0,0 +1 @@
|
||||
# t.barnouin : directory where we will put our report
|
||||
BIN
report/Spectral Energy Distribution of SZ Catalogue.pdf
Executable file
BIN
report/Spectral Energy Distribution of SZ Catalogue.pdf
Executable file
Binary file not shown.
1
src/README.md
Executable file
1
src/README.md
Executable file
@@ -0,0 +1 @@
|
||||
# t.barnouin : directory where all our code will be
|
||||
208
src/SED.py
Executable file
208
src/SED.py
Executable file
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/ipython3
|
||||
#-*- coding:utf-8 -*-
|
||||
|
||||
import sys, time
|
||||
import concurrent.futures
|
||||
import numpy as np
|
||||
import stack_maps as stack
|
||||
from astropy.io import fits
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
def aperture(stack, px_size, center, angle):
|
||||
"""
|
||||
Create a boolean mask for the region to work on the stack
|
||||
"""
|
||||
yy, xx = np.indices(stack.shape)
|
||||
radius = (angle/60.)/px_size
|
||||
mask = np.sqrt(np.abs(xx-center[0])**2 + np.abs(yy-center[1])**2) <= radius
|
||||
|
||||
return mask
|
||||
|
||||
|
||||
def stack_flux(freq, stacked_data, px_size, center, angle):
|
||||
"""
|
||||
Compute integrated flux in Mjy/sr of stacked image given an aperture angle
|
||||
"""
|
||||
conversion = {100: 244.1, 143: 371.74, 217: 483.690, 353: 287.450, 545: 1., 857: 1.} # Conversion coefficient in Mjy/sr/K
|
||||
mask = aperture(stacked_data, px_size, center, angle)
|
||||
region = stacked_data[mask]
|
||||
background = np.mean(stacked_data[~mask])
|
||||
flux = np.sum(region - background)*conversion[freq]
|
||||
|
||||
return flux
|
||||
|
||||
|
||||
def bootstrap_flux(data_sample, wcs_sample, freq, angle, n_it, savename=None):
|
||||
"""
|
||||
Implement the 'bootstrap' technic to compute flux and error given a sample.
|
||||
"""
|
||||
n_obj = data_sample.shape[0]
|
||||
flux = np.zeros(n_it)
|
||||
for i in range(n_it):
|
||||
ind_it = np.random.randint(0, n_obj, n_obj)
|
||||
data_sample_it = data_sample[ind_it]
|
||||
wcs_sample_it = wcs_sample[ind_it]
|
||||
|
||||
stack_it, px_size, center = stack.stack_sample(data_sample_it, wcs_sample_it)
|
||||
flux[i] = stack_flux(freq, stack_it, px_size, center, angle)
|
||||
|
||||
if savename != None:
|
||||
np.save("../data/flux_{0:s}_{1:d}.npy".format(savename,freq),np.array([flux.mean(), flux.std()]))
|
||||
|
||||
return flux.mean(), flux.std()
|
||||
|
||||
|
||||
def parallel_bootstrap(data_sample, wcs_sample, freq, angle, n_it, savename=None):
|
||||
"""
|
||||
Implement the 'bootstrap' technic with multithreading for multiple frequencies
|
||||
"""
|
||||
nfreq = len(freq)
|
||||
exe = concurrent.futures.ThreadPoolExecutor()
|
||||
future_flux = []
|
||||
flux_sample = np.zeros((nfreq, 2))
|
||||
for i in range(nfreq):
|
||||
future_flux.append(exe.submit(bootstrap_flux, data_sample[i], wcs_sample[i], freq[i], angle, n_it))
|
||||
for i in range(nfreq):
|
||||
flux_sample[i] = future_flux[i].result()
|
||||
|
||||
if savename != None:
|
||||
np.save("../data/flux_{0:s}.npy".format(savename),flux_sample)
|
||||
|
||||
return flux_sample
|
||||
|
||||
|
||||
def s_SZ(nu, y_SZ):
|
||||
"""
|
||||
SED law for thermal SZ effect, in Mjy/sr
|
||||
"""
|
||||
h = 6.62607015e-34 #SI units
|
||||
k_b = 1.380649e-23
|
||||
T_cmb = 2.72548
|
||||
x = h/(k_b*T_cmb)*nu #reduced frequency
|
||||
|
||||
g = x*(np.exp(x)+1.)/(np.exp(x)-1.)-4.
|
||||
return y_SZ*g*x**4*np.exp(x)/(np.exp(x)-1.)**2
|
||||
|
||||
|
||||
def s_d(nu, A_d, beta_d, T_d):
|
||||
"""
|
||||
SED law for thermal dust, in Mjy/sr (appear in IR)
|
||||
"""
|
||||
h = 6.62607015e-34 #SI units
|
||||
k_b = 1.380649e-23
|
||||
nu_0 = 545e9
|
||||
gamma = h/(k_b*T_d)
|
||||
return A_d*(nu/nu_0)**(beta_d+1.)*(np.exp(gamma*nu_0)-1.)/(np.exp(gamma*nu)-1.)
|
||||
|
||||
|
||||
def flux_fit(nu, y_SZ, A_d, beta_d, T_d):
|
||||
"""
|
||||
SED law containing thermal SZ effect and IR contamination by thermal dust
|
||||
"""
|
||||
return s_SZ(nu, y_SZ)+s_d(nu, A_d, beta_d, T_d)
|
||||
|
||||
|
||||
def plot_SED(freq, flux, samplename, save=False):
|
||||
"""
|
||||
Nicely plot SED and contributions from SZ and dust for a given sample
|
||||
"""
|
||||
freq_lin = np.linspace(freq.min(), freq.max(), 300)
|
||||
popt, _ = curve_fit(flux_fit, xdata=freq*1e9, ydata=flux[:,0], p0=[1., 10., 2.3, 16.])
|
||||
chisq = np.sum(((flux[:,0]-flux_fit(freq,*popt))/flux[:,1])**2)
|
||||
|
||||
plt.figure(figsize=(10,5))
|
||||
plt.errorbar(freq,flux[:,0], yerr=flux[:,1], fmt='ok', label=samplename)
|
||||
plt.plot(freq_lin,s_SZ(freq_lin*1e9,popt[0]), 'b--', label=r"$S_{SZ}$ model fit with $y_{SZ}$ = "+"{0:.5f}".format(popt[0]))
|
||||
plt.plot(freq_lin,s_d(freq_lin*1e9,*popt[1:]), 'r--', label=r"$S_{d}$ model fit with $A_{d}$ = "+"{0:.3e}".format(popt[1])+r"; $\beta_{d}$ = "+"{0:.5f}".format(popt[2])+r"; $T_{d}$ = "+"{0:.5f}\n".format(popt[3]))
|
||||
plt.plot(freq_lin,flux_fit(freq_lin*1e9, *popt), 'g--', label=r"$S_{SZ}+S_{d}$ model fit with $\chi^2$/ndf = "+"{0:.5f}".format(chisq/(len(freq)-len(popt))))
|
||||
plt.plot(freq,[0.]*len(freq), 'k--')
|
||||
|
||||
plt.title("SED of the SZ effect for '{0:s}' sample.".format(samplename))
|
||||
plt.xlabel("Frequency [GHz]")
|
||||
plt.ylabel("Flux density [Mjy/sr]")
|
||||
plt.legend()
|
||||
if save:
|
||||
plt.savefig("../plots/SED_{0:s}.png".format(samplename), bbox_inches='tight')
|
||||
plt.show()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def compute_SED(sample, savename, ROI=[0.5, 128], angle=None, n_it=500, plot=False):
|
||||
"""
|
||||
Automatically compute and save SED
|
||||
"""
|
||||
freq = np.array([100, 143, 217, 353, 545, 857])
|
||||
|
||||
if angle == None:
|
||||
rimo = fits.getdata("/home/abeelen/Planck/products_2015/HFI_RIMO_R2.00.fits", 2)
|
||||
true_FWHM = rimo[:]['FWHM']
|
||||
angle = 1.*true_FWHM[freq == 143]
|
||||
|
||||
print("Getting sample data for {0:s}...".format(savename))
|
||||
t0 = time.time()
|
||||
wcs_sample, data_sample = stack.get_data_sample(freq, sample, ROI=ROI, savename=savename)
|
||||
t1 = time.time()
|
||||
print("Elapsed time : {0:f}sec".format(t1-t0))
|
||||
|
||||
stack_freq = np.zeros((len(freq), data_sample.shape[2], data_sample.shape[3]))
|
||||
for i in range(len(freq)):
|
||||
stack_freq[i] = stack.stack_sample(data_sample[i], wcs_sample[i])[0]
|
||||
np.save("../data/stack_{0:s}.npy".format(savename),stack_freq)
|
||||
|
||||
print("Computing flux values...")
|
||||
t2 = time.time()
|
||||
flux_sample = parallel_bootstrap(data_sample, wcs_sample, freq, angle, n_it, savename=savename)
|
||||
t3 = time.time()
|
||||
print("Elapsed time : {0:f}sec".format(t3-t2))
|
||||
|
||||
if plot:
|
||||
plot_SED(freq, flux_sample, savename, save=True)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
# General information on the Planck data channels
|
||||
rimo = fits.getdata("/home/abeelen/Planck/products_2015/HFI_RIMO_R2.00.fits", 2)
|
||||
true_FWHM = rimo[:]['FWHM']
|
||||
|
||||
frequencies = np.array([100, 143, 217, 353, 545, 857])
|
||||
|
||||
# Get objects sample from PSZ catalog
|
||||
with fits.open("../data/PSZ2v1.fits") as hdu:
|
||||
PSZ_catalog = hdu[1].data
|
||||
|
||||
clusters = PSZ_catalog[PSZ_catalog['redshift'] > 0]
|
||||
candidates = PSZ_catalog[(PSZ_catalog['redshift'] < 0) & (PSZ_catalog['validation'] < 0)]
|
||||
# redshift_sup0_4 = PSZ_catalog[PSZ_catalog['redshift'] > 0.4]
|
||||
# redshift_inf0_4 = PSZ_catalog[np.abs(PSZ_catalog['redshift']) < 0.4]
|
||||
# QN_inf0_4 = PSZ_catalog[PSZ_catalog['q_neural'] < 0.4]
|
||||
QN_sup0_4 = PSZ_catalog[PSZ_catalog['q_neural'] > 0.4]
|
||||
|
||||
# Compute SED and plot
|
||||
angle = 1.*true_FWHM[frequencies == 143]
|
||||
|
||||
#compute_SED(clusters, "clusters", ROI=[0.5, 128], angle=angle, n_it=500, plot=True)
|
||||
#compute_SED(candidates, "candidates", ROI=[0.5, 128], angle=angle, n_it=500, plot=True)
|
||||
#compute_SED(QN_sup0_4, "QN_sup0_4", ROI=[0.5, 128], angle=angle, n_it=500, plot=True)
|
||||
flux_clusters = np.load("../data/flux_clusters.npy")
|
||||
flux_candidates = np.load("../data/flux_candidates.npy")
|
||||
flux_QN_sup0_4 = np.load("../data/flux_QN_sup0_4.npy")
|
||||
|
||||
plot_SED(frequencies, flux_clusters, "Known clusters", save=True)
|
||||
plot_SED(frequencies, flux_candidates, "Candidate clusters", save=True)
|
||||
plot_SED(frequencies, flux_QN_sup0_4, "High-reliability clusters", save=True)
|
||||
|
||||
stack_clusters = np.load("../data/stack_clusters.npy")
|
||||
stack_candidates = np.load("../data/stack_candidates.npy")
|
||||
stack_QN_sup0_4 = np.load("../data/stack_QN_sup0_4.npy")
|
||||
|
||||
stack.plot_stacks(np.array([stack_clusters, stack_candidates, stack_QN_sup0_4]), ["Known clusters", "Candidate clusters", "High-reliability clusters"], save=True)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
246
src/stack_maps.py
Executable file
246
src/stack_maps.py
Executable file
@@ -0,0 +1,246 @@
|
||||
#!/usr/bin/ipython3
|
||||
#-*- coding:utf-8 -*-
|
||||
|
||||
import sys
|
||||
import matplotlib.pyplot as plt
|
||||
from astropy.io import fits
|
||||
from astropy import wcs
|
||||
import numpy as np
|
||||
import healpy as hp
|
||||
|
||||
|
||||
def create_WCS(sky_pos, ROI=[0.5, 128], radec=False):
|
||||
"""
|
||||
Create a WCS object centered at given sky position.
|
||||
"""
|
||||
w = wcs.WCS(naxis=2)
|
||||
center = np.array([int(ROI[1]/2.), int(ROI[1]/2.)])
|
||||
w.wcs.crpix = center
|
||||
w.wcs.crval = sky_pos
|
||||
w.wcs.cdelt = [-ROI[0]/60., ROI[0]/60.]
|
||||
|
||||
if radec:
|
||||
w.wcs.ctype = ['RA--TAN','DEC--TAN']
|
||||
else:
|
||||
w.wcs.ctype = ['GLON-TAN','GLAT-TAN']
|
||||
|
||||
return w
|
||||
|
||||
|
||||
def create_ROI(hpmap, sky_pos, ROI=[0.5, 128], radec=False):
|
||||
"""
|
||||
Create a projected map given a Healpix map and a region of interest.
|
||||
"""
|
||||
nside = hp.get_nside(hpmap)
|
||||
w = create_WCS(sky_pos, ROI, radec)
|
||||
|
||||
data = np.zeros((ROI[1], ROI[1]))
|
||||
yy, xx = np.indices(data.shape)
|
||||
theta, phi = w.wcs_pix2world(xx, yy, 0)
|
||||
ipx = hp.ang2pix(nside, theta, phi, lonlat=True)
|
||||
data = hpmap[ipx]
|
||||
|
||||
return w, data
|
||||
|
||||
|
||||
def plot_ROI(w, data, name=None, freq=None, pos=None, saveplot=False):
|
||||
"""
|
||||
Display a nice plot of the given data.
|
||||
"""
|
||||
fig = plt.figure(figsize=(10,5))
|
||||
ax = plt.subplot(projection=w, label='overlays', figure=fig)
|
||||
|
||||
ax.imshow(data, origin='lower')
|
||||
|
||||
ax.coords.grid(True, color='white', ls='solid')
|
||||
ax.coords[0].set_axislabel('Galactic Longitude')
|
||||
ax.coords[1].set_axislabel('Galactic Latitude')
|
||||
|
||||
overlay = ax.get_coords_overlay('fk5')
|
||||
overlay.grid(color='white', ls='dotted')
|
||||
overlay[0].set_axislabel('Right Ascension (J2000)')
|
||||
overlay[1].set_axislabel('Declination (J2000)')
|
||||
|
||||
if name != None:
|
||||
ax.set_title("Object {0:s} at {1:d}GHz".format(name, freq), y=1.0, pad=+40)
|
||||
if saveplot:
|
||||
plt.savefig("../plots/{0:s}_G{1:.2f}±{2:.2f}_{2:d}GHz.png".format(name, *pos, freq), bbox_inches='tight')
|
||||
plt.show()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def save_file(map_name,sky_pos,header,data):
|
||||
"""
|
||||
Save given projected map to fits file with appropriate header.
|
||||
"""
|
||||
hdu = fits.PrimaryHDU(data,header)
|
||||
filename = '../data/' + str(map_name) + "_G{0:.2f}±{1:.2f}".format(*sky_pos) + '.fits'
|
||||
|
||||
hdu.writeto(filename, overwrite=True)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def get_data_sample(freq, sample, ROI=[0.5, 128], savename=None):
|
||||
"""
|
||||
Construct on array (nobj, npix, npix) at a given frequency for a given sample and ROI parameters. Allow local saving.
|
||||
"""
|
||||
if type(freq) == int:
|
||||
freq = np.array([freq])
|
||||
nfreq = len(freq)
|
||||
nobj = len(sample)
|
||||
|
||||
# Get Planck data
|
||||
dirf = "../data/Planck2015/"
|
||||
fits_basename = "HFI_SkyMap_{0:d}_2048_R2.00_full.fits"
|
||||
|
||||
data_sample = np.zeros((nfreq, nobj, ROI[1], ROI[1]))
|
||||
wcs_sample = np.zeros((nfreq, nobj), dtype=object)
|
||||
for i in range(nfreq):
|
||||
hpmap = hp.read_map(dirf+fits_basename.format(freq[i]), verbose=False)
|
||||
for j in range(nobj):
|
||||
pos_obj = [sample['GLON'][j], sample['GLAT'][j]]
|
||||
wcs_obj, data_obj = create_ROI(hpmap, pos_obj, ROI, radec=False)
|
||||
data_sample[i,j] = data_obj
|
||||
wcs_sample[i,j] = wcs_obj
|
||||
|
||||
if nfreq == 1:
|
||||
wcs_sample, data_sample = wcs_sample[0], data_sample[0]
|
||||
|
||||
if savename != None:
|
||||
np.save("../data/wcs_{0:s}.npy".format(savename), wcs_sample)
|
||||
np.save("../data/data_{0:s}.npy".format(savename), data_sample)
|
||||
|
||||
return wcs_sample, data_sample
|
||||
|
||||
|
||||
def save_sample(sample, sample_name, freq, ROI=[0.5, 128]): #sample from PSZ2v1.fits, freq from planck full sky map
|
||||
# Get Planck data
|
||||
dirf = "../data/Planck2015/"
|
||||
fits_basename = "HFI_SkyMap_{0:d}_2048_R2.00_full.fits"
|
||||
hpmap = hp.read_map(dirf+fits_basename.format(freq), verbose=False)
|
||||
|
||||
hdul = fits.HDUList([])
|
||||
primary_hdu = fits.PrimaryHDU()
|
||||
hdul.append(primary_hdu)
|
||||
hdr = hdul[0].header
|
||||
hdr['sample'] = (sample_name, 'Same of the sample')
|
||||
hdr['freq'] = (freq, 'Detection frequency in GHz')
|
||||
hdr['nobj'] = len(sample)
|
||||
hdr['ROI_size'] = (ROI[1], 'Size in pixels of the ROI (square)')
|
||||
hdr['px_size'] = (ROI[0], 'Size of a pixel in arcmin')
|
||||
|
||||
for i in range(len(sample)):
|
||||
sample_pos = [sample['GLON'][i], sample['GLAT'][i]]
|
||||
sample_w, sample_data = create_ROI(hpmap, sample_pos, ROI, radec=False)
|
||||
|
||||
sample_header = sample_w.to_header()
|
||||
sample_header['targname'] = sample['NAME'][i]
|
||||
|
||||
hdu_sample = fits.ImageHDU(data=sample_data,header=sample_header)
|
||||
hdul.append(hdu_sample)
|
||||
|
||||
filename = '../data/' + str(sample_name) + "_{0:d}GHz".format(freq) + '.fits'
|
||||
hdul.writeto(filename, overwrite=True)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def data_sample_from_fits(sample):
|
||||
"""
|
||||
Construct an array (ncluster, npix, npix) of 'sample'.
|
||||
Input 'sample' must be a valid path to a fits file.
|
||||
"""
|
||||
with fits.open(sample) as hdul:
|
||||
header = hdul[0].header
|
||||
|
||||
nobj = header['nobj']
|
||||
ROI_size = header['ROI_size']
|
||||
data_sample = np.zeros((nobj, ROI_size, ROI_size))
|
||||
wcs_sample = np.zeros(nobj,dtype=object)
|
||||
|
||||
for i in range(nobj):
|
||||
data_sample[i] = hdul[i+1].data
|
||||
wcs_sample[i] = wcs.WCS(hdul[i+1].header)
|
||||
|
||||
return wcs_sample, data_sample
|
||||
|
||||
|
||||
def stack_sample(data_sample, wcs_sample=None):
|
||||
"""
|
||||
Compute the stack of a given sample.
|
||||
Input 'sample' can either be an array (ncluster, npixel, npixel) or a path to fits file containing the sample.
|
||||
"""
|
||||
if str(type(data_sample)) != "<class 'numpy.ndarray'>":
|
||||
wcs_sample, data_sample = data_sample_from_fits(sample)
|
||||
|
||||
data_stack = data_sample.mean(axis=0)
|
||||
px_size = np.array([np.abs(wcs_sample[i].wcs.cdelt) for i in range(wcs_sample.shape[0])]).mean()
|
||||
center = np.array([wcs_sample[i].wcs.crpix for i in range(wcs_sample.shape[0])]).mean(axis=0)
|
||||
|
||||
return data_stack, px_size, center
|
||||
|
||||
|
||||
def plot_stacks(stack_list, stack_names, save=False):
|
||||
"""
|
||||
Plot together stacks at each frequencies for different samples.
|
||||
"""
|
||||
freq = np.array([100, 143, 217, 353, 545, 857])
|
||||
nfreq, nsample = len(freq), len(stack_list)
|
||||
|
||||
grid = plt.GridSpec(nsample, nfreq, hspace=0, wspace=0)
|
||||
fig = plt.figure(figsize=(16,9))
|
||||
for i in range(nsample):
|
||||
for j in range(nfreq):
|
||||
ax = fig.add_subplot(grid[i,j], xticks=[], yticks=[])
|
||||
ax.imshow(stack_list[i,j], origin='lower')
|
||||
if i == nsample-1:
|
||||
ax.set_xlabel("{0:d}GHz".format(freq[j]))
|
||||
if j == 0:
|
||||
ax.set_ylabel(stack_names[i])
|
||||
if save:
|
||||
filename = "../plots/Stacks"
|
||||
for name in stack_names:
|
||||
filename += "_"+name
|
||||
plt.savefig(filename+".png", bbox_inches='tight')
|
||||
plt.show()
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
# Get Planck data
|
||||
dirf = "../data/Planck2015/"
|
||||
ext = ".fits"
|
||||
fits_basename = "HFI_SkyMap_{0:d}_2048_R2.00_{1:s}"
|
||||
mission = "full" #Can be 'full', 'halfmission-1', 'halfmission-2'
|
||||
frequencies = np.array([100, 143, 217, 353, 545, 857])
|
||||
|
||||
hfi_maps = np.array([fits_basename.format(freq,mission) for freq in frequencies])
|
||||
|
||||
## Get known clusters and candidates
|
||||
with fits.open("../data/PSZ2v1.fits") as hdu:
|
||||
PSZ_catalog = hdu[1].data
|
||||
|
||||
clusters = PSZ_catalog[PSZ_catalog['redshift']>0]
|
||||
candidates = PSZ_catalog[(PSZ_catalog['redshift']<0) & (PSZ_catalog['validation']<0)]
|
||||
|
||||
# wcs_clusters, data_clusters = get_data_sample(frequencies, clusters, ROI=[0.5, 128], savename="clusters")
|
||||
# wcs_candidates, data_candidates = get_data_sample(frequencies, candidates, ROI=[0.5, 128], savename="candidates")
|
||||
wcs_clusters, data_clusters = np.load("../data/wcs_clusters.npy"), np.load("../data/data_clusters.npy")
|
||||
wcs_candidates, data_candidates = np.load("../data/wcs_candidates.npy"), np.load("../data/data_candidates.npy")
|
||||
|
||||
stacks_clusters = np.zeros((len(frequencies), data_clusters.shape[2], data_clusters.shape[3]))
|
||||
stacks_candidates = np.zeros((len(frequencies), data_candidates.shape[2], data_candidates.shape[3]))
|
||||
|
||||
for i in range(len(frequencies)):
|
||||
stacks_clusters[i] = stack_sample(data_clusters[i], wcs_clusters[i])[0]
|
||||
stacks_candidates[i] = stack_sample(data_candidates[i], wcs_candidates[i])[0]
|
||||
|
||||
plot_stacks(np.array([stacks_clusters, stacks_candidates]), ['clusters', 'candidates'], save=True)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user