modify files to comply with pep8 format

This commit is contained in:
2024-02-26 16:30:10 +01:00
parent d2b59cf05a
commit 893cf339c7
12 changed files with 1751 additions and 1659 deletions

View File

@@ -17,17 +17,20 @@ def divide_proposal(products):
Divide observation in proposals by time or filter
"""
for pid in np.unique(products['Proposal ID']):
obs = products[products['Proposal ID']==pid].copy()
close_date = np.unique(np.array([TimeDelta(np.abs(Time(obs['Start']).unix-date.unix),format='sec') < 7.*u.d for date in obs['Start']], dtype=bool), axis=0)
if len(close_date)>1:
obs = products[products['Proposal ID'] == pid].copy()
close_date = np.unique(np.array([TimeDelta(np.abs(Time(obs['Start']).unix-date.unix), format='sec')
< 7.*u.d for date in obs['Start']], dtype=bool), axis=0)
if len(close_date) > 1:
for date in close_date:
products['Proposal ID'][np.any([products['Dataset']==dataset for dataset in obs['Dataset'][date]],axis=0)] = "_".join([obs['Proposal ID'][date][0],str(obs['Start'][date][0])[:10]])
products['Proposal ID'][np.any([products['Dataset'] == dataset for dataset in obs['Dataset'][date]], axis=0)
] = "_".join([obs['Proposal ID'][date][0], str(obs['Start'][date][0])[:10]])
for pid in np.unique(products['Proposal ID']):
obs = products[products['Proposal ID']==pid].copy()
same_filt = np.unique(np.array(np.sum([obs['Filters'][:,1:]==filt[1:] for filt in obs['Filters']],axis=2)<3,dtype=bool),axis=0)
if len(same_filt)>1:
obs = products[products['Proposal ID'] == pid].copy()
same_filt = np.unique(np.array(np.sum([obs['Filters'][:, 1:] == filt[1:] for filt in obs['Filters']], axis=2) < 3, dtype=bool), axis=0)
if len(same_filt) > 1:
for filt in same_filt:
products['Proposal ID'][np.any([products['Dataset']==dataset for dataset in obs['Dataset'][filt]],axis=0)] = "_".join([obs['Proposal ID'][filt][0],"_".join([fi for fi in obs['Filters'][filt][0][1:] if fi[:-1]!="CLEAR"])])
products['Proposal ID'][np.any([products['Dataset'] == dataset for dataset in obs['Dataset'][filt]], axis=0)] = "_".join(
[obs['Proposal ID'][filt][0], "_".join([fi for fi in obs['Filters'][filt][0][1:] if fi[:-1] != "CLEAR"])])
return products
@@ -78,22 +81,22 @@ def get_product_list(target=None, proposal_id=None):
for c, n_c in zip(select_cols, cols):
results.rename_column(c, n_c)
results['Proposal ID'] = Column(results['Proposal ID'],dtype='U35')
results['Filters'] = Column(np.array([filt.split(";") for filt in results['Filters']],dtype=str))
results['Proposal ID'] = Column(results['Proposal ID'], dtype='U35')
results['Filters'] = Column(np.array([filt.split(";") for filt in results['Filters']], dtype=str))
results['Start'] = Column(Time(results['Start']))
results['Stop'] = Column(Time(results['Stop']))
results = divide_proposal(results)
obs = results.copy()
### Remove single observations for which a FIND filter is used
to_remove=[]
# Remove single observations for which a FIND filter is used
to_remove = []
for i in range(len(obs)):
if "F1ND" in obs[i]['Filters']:
to_remove.append(i)
obs.remove_rows(to_remove)
### Remove observations for which a polarization filter is missing
polfilt = {"POL0":0,"POL60":1,"POL120":2}
# Remove observations for which a polarization filter is missing
polfilt = {"POL0": 0, "POL60": 1, "POL120": 2}
for pid in np.unique(obs['Proposal ID']):
used_pol = np.zeros(3)
for dataset in obs[obs['Proposal ID'] == pid]:
@@ -102,26 +105,26 @@ def get_product_list(target=None, proposal_id=None):
obs.remove_rows(np.arange(len(obs))[obs['Proposal ID'] == pid])
tab = unique(obs, ['Target name', 'Proposal ID'])
obs["Obs"] = [np.argmax(np.logical_and(tab['Proposal ID']==data['Proposal ID'],tab['Target name']==data['Target name']))+1 for data in obs]
obs["Obs"] = [np.argmax(np.logical_and(tab['Proposal ID'] == data['Proposal ID'], tab['Target name'] == data['Target name']))+1 for data in obs]
try:
n_obs = unique(obs[["Obs", "Filters", "Start", "Central wavelength", "Instrument",
"Size", "Target name", "Proposal ID", "PI last name"]], 'Obs')
n_obs = unique(obs[["Obs", "Filters", "Start", "Central wavelength", "Instrument", "Size", "Target name", "Proposal ID", "PI last name"]], 'Obs')
except IndexError:
raise ValueError(
"There is no observation with POL0, POL60 and POL120 for {0:s} in HST/FOC Legacy Archive".format(target))
b = np.zeros(len(results), dtype=bool)
if not proposal_id is None and str(proposal_id) in obs['Proposal ID']:
if proposal_id is not None and str(proposal_id) in obs['Proposal ID']:
b[results['Proposal ID'] == str(proposal_id)] = True
else:
n_obs.pprint(len(n_obs)+2)
a = [np.array(i.split(":"), dtype=str) for i in input("select observations to be downloaded ('1,3,4,5' or '1,3:5' or 'all','*' default to 1)\n>").split(',')]
if a[0][0]=='':
a = [np.array(i.split(":"), dtype=str)
for i in input("select observations to be downloaded ('1,3,4,5' or '1,3:5' or 'all','*' default to 1)\n>").split(',')]
if a[0][0] == '':
a = [[1]]
if a[0][0] in ['a','all','*']:
b = np.ones(len(results),dtype=bool)
if a[0][0] in ['a', 'all', '*']:
b = np.ones(len(results), dtype=bool)
else:
a = [np.array(i,dtype=int) for i in a]
a = [np.array(i, dtype=int) for i in a]
for i in a:
if len(i) > 1:
for j in range(i[0], i[1]+1):
@@ -135,19 +138,19 @@ def get_product_list(target=None, proposal_id=None):
dataproduct_type=['image'],
calib_level=[2],
description="DADS C0F file - Calibrated exposure WFPC/WFPC2/FOC/FOS/GHRS/HSP")
products['proposal_id'] = Column(products['proposal_id'],dtype='U35')
products['proposal_id'] = Column(products['proposal_id'], dtype='U35')
products['target_name'] = Column(observations['target_name'])
for prod in products:
prod['proposal_id'] = results['Proposal ID'][results['Dataset']==prod['productFilename'][:len(results['Dataset'][0])].upper()][0]
prod['proposal_id'] = results['Proposal ID'][results['Dataset'] == prod['productFilename'][:len(results['Dataset'][0])].upper()][0]
for prod in products:
prod['target_name'] = observations['target_name'][observations['obsid']==prod['obsID']][0]
prod['target_name'] = observations['target_name'][observations['obsid'] == prod['obsID']][0]
tab = unique(products, ['target_name', 'proposal_id'])
if len(tab)>1 and np.all(tab['target_name']==tab['target_name'][0]):
if len(tab) > 1 and np.all(tab['target_name'] == tab['target_name'][0]):
target = tab['target_name'][0]
products["Obs"] = [np.argmax(np.logical_and(tab['proposal_id']==data['proposal_id'],tab['target_name']==data['target_name']))+1 for data in products]
products["Obs"] = [np.argmax(np.logical_and(tab['proposal_id'] == data['proposal_id'], tab['target_name'] == data['target_name']))+1 for data in products]
return target, products
@@ -155,17 +158,17 @@ def retrieve_products(target=None, proposal_id=None, output_dir='./data'):
"""
Given a target name and a proposal_id, create the local directories and retrieve the fits files from the MAST Archive
"""
target, products = get_product_list(target=target,proposal_id=proposal_id)
target, products = get_product_list(target=target, proposal_id=proposal_id)
prodpaths = []
data_dir = path_join(output_dir, target)
# data_dir = path_join(output_dir, target)
out = ""
for obs in unique(products,'Obs'):
for obs in unique(products, 'Obs'):
filepaths = []
#obs_dir = path_join(data_dir, obs['prodposal_id'])
#if obs['target_name']!=target:
# obs_dir = path_join(data_dir, obs['prodposal_id'])
# if obs['target_name']!=target:
obs_dir = path_join(path_join(output_dir, target), obs['proposal_id'])
if not path_exists(obs_dir):
system("mkdir -p {0:s} {1:s}".format(obs_dir,obs_dir.replace("data","plots")))
system("mkdir -p {0:s} {1:s}".format(obs_dir, obs_dir.replace("data", "plots")))
for file in products['productFilename'][products['Obs'] == obs['Obs']]:
fpath = path_join(obs_dir, file)
if not path_exists(fpath):
@@ -173,8 +176,8 @@ def retrieve_products(target=None, proposal_id=None, output_dir='./data'):
products['dataURI'][products['productFilename'] == file][0], local_path=fpath)[0])
else:
out += "{0:s} : Exists\n".format(file)
filepaths.append([obs_dir,file])
prodpaths.append(np.array(filepaths,dtype=str))
filepaths.append([obs_dir, file])
prodpaths.append(np.array(filepaths, dtype=str))
return target, prodpaths
@@ -183,12 +186,12 @@ if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Query MAST for target products')
parser.add_argument('-t','--target', metavar='targetname', required=False,
parser.add_argument('-t', '--target', metavar='targetname', required=False,
help='the name of the target', type=str, default=None)
parser.add_argument('-p','--proposal_id', metavar='proposal_id', required=False,
parser.add_argument('-p', '--proposal_id', metavar='proposal_id', required=False,
help='the proposal id of the data products', type=int, default=None)
parser.add_argument('-o','--output_dir', metavar='directory_path', required=False,
parser.add_argument('-o', '--output_dir', metavar='directory_path', required=False,
help='output directory path for the data products', type=str, default="./data")
args = parser.parse_args()
prodpaths = retrieve_products(target=args.target, proposal_id=args.proposal_id)
print(prodpaths)
print(prodpaths)