Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import glob
import os
import os.path
import tables
import numpy as np
import astropy.units as u
from astropy.table import vstack
from astropy.coordinates import SkyCoord
from datetime import datetime, timedelta
from ctapipe.io import read_table
from utils import (
parse_arguments,
get_coordinates,
print_runs,
)
# Argument parser
args = parse_arguments(description="Runlist creation",
create_runlist=True,
)
user = False
source = args.source_name
if args.verbose:
print(f'Selected source: {source}')
# overwrite search through astropy if user specifies coordinates
if args.ra and args.dec:
ra = args.ra
dec = args.dec
user = True
else:
ra, dec = get_coordinates(args)
source_coordinates = SkyCoord(ra=ra * u.deg, dec=dec * u.deg)
if user is True and args.verbose:
print(f'Coordinates in deg of selected source given by the user: {source_coordinates}')
# Load all the available night-wise DL1_datacheck_YYYYMMDD.h5 files (or at least those of the nights which contain the data you intend to analyze)
# The files are available in the IT cluster under the directories: /fefs/aswg/data/real/OSA/DL1DataCheck_LongTerm/v0.9/YYYYMMDD/
datacheck_files = []
if args.night:
add_date = []
for night in args.night:
sub_day = datetime.strptime(night, '%Y%m%d').date() - timedelta(days=1)
add_date.append(sub_day.strftime('%Y%m%d'))
for date in add_date:
if date not in args.night:
args.night.append(date)
args.night.sort(key=lambda x: datetime.strptime(x, '%Y%m%d'))
if args.verbose:
print(f"Nights selected: {args.night}.")
for night in args.night:
datacheck_file = glob.glob(f"/fefs/aswg/data/real/OSA/DL1DataCheck_LongTerm/v0.9/{night}/DL1_datacheck_20*.h5")
datacheck_files.append(datacheck_file[0])
else:
datacheck_files = glob.glob("/fefs/aswg/data/real/OSA/DL1DataCheck_LongTerm/v0.9/20*/DL1_datacheck_20*.h5")
datacheck_files.sort()
if args.verbose:
print(f'A total of {len(datacheck_files)} files will be read')
dcheck_runsummary = []
for file in datacheck_files:
# print(file)
# Check that the file contains the necessary info
with tables.open_file(file) as a:
if "pedestals" not in a.root:
if args.verbose:
print(f"file {file} does not contain the interleaved pedestals table... Skipping!!")
continue
if "flatfield" not in a.root:
if args.verbose:
print(f"file {file} does not contain the interleaved flatfield table... Skipping!!")
continue
table = read_table(file, "/runsummary/table")
# Set to 0 the number of flatfield events in nan (means none was found)
table['num_flatfield'] = np.where(np.isnan(table['num_flatfield']), 0, table['num_flatfield'])
dcheck_runsummary.append(table)
# Ignore metadata conflicts below. i.e. metadata_conflicts='silent'.
# It is just that columns which contain some nan are float64, while those which do not are float32
dcheck_runsummary = vstack(dcheck_runsummary, metadata_conflicts='silent')
# Exclude runs with issues in interleaved pedestals
ped_ok_mask = np.isfinite(dcheck_runsummary['num_pedestals'])
print('Removed runs:', np.array(dcheck_runsummary['runnumber'][~ped_ok_mask]))
# Find the runs with pointing close to the source of interest
telescope_pointing = SkyCoord(ra=dcheck_runsummary['mean_ra'] * u.deg, dec=dcheck_runsummary['mean_dec'] * u.deg)
angular_distance = source_coordinates.separation(telescope_pointing)
# Select wobble pointings at ~0.4 deg from the source:
source_mask = ((angular_distance > 0.35 * u.deg) & (angular_distance < 0.45 * u.deg))
print_runs(dcheck_runsummary, source_mask, by_date=True)
dates = [datetime.utcfromtimestamp(t - 0.5 * 86400).strftime('%Y%m%d') for t in dcheck_runsummary['time'][source_mask]]
dcheck_runsummary_masked = dcheck_runsummary['runnumber', 'time'][source_mask]
dcheck_runsummary_masked['date'] = dates
del dcheck_runsummary_masked['time']
if args.verbose:
print(dcheck_runsummary_masked)
# write the txt file
dcheck_runsummary_masked.write(os.environ.get('CONFIG_FOLDER') + f'/{source}_runlist.txt', format='pandas.csv', header=False, sep=' ')
if args.verbose:
if os.path.exists(os.environ.get('CONFIG_FOLDER') + f'/{source}_runlist.txt'):
print('')
print(f'Runlist file {source}_runlist.txt created in {os.environ.get("CONFIG_FOLDER")}')
else:
print('ERROR: Not able to create runlist file.')