Skip to content

Commit

Permalink
Merge pull request #880 from cta-observatory/merge_run_summaries
Browse files Browse the repository at this point in the history
Create script to merge run summaries with drive logs into a single file
  • Loading branch information
rlopezcoto authored Oct 31, 2023
2 parents 89762bf + 469de7c commit 644e43b
Show file tree
Hide file tree
Showing 2 changed files with 216 additions and 16 deletions.
187 changes: 187 additions & 0 deletions lstchain/scripts/lstchain_merge_run_summaries.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
"""
Create a summary of all runs from daily run summaries,
adding pointing information.
It is also possible to append a single night to an already
existing merged summary file when date is especified.
"""

import logging
from argparse import ArgumentParser
from datetime import datetime
from pathlib import Path

import astropy.units as u
import numpy as np
from astropy.table import Table, vstack
from astropy.time import Time
from ctapipe_io_lst import LSTEventSource
from ctapipe_io_lst.event_time import read_run_summary
from ctapipe_io_lst.pointing import PointingSource

log = logging.getLogger('create_run_overview')

RUN_SUMMARY_COLUMNS = [
'run_id',
'n_subruns',
'run_type',
'run_start',
]

base_path = Path('/fefs/aswg/data/real/monitoring')

parser = ArgumentParser()
parser.add_argument('output_file', type=Path)
parser.add_argument('-m', '--monitoring-path', type=Path, default=base_path)
parser.add_argument('-v', '--verbose', action='store_true')
# Option to only append a single night to an existing merged summary file.
# By default, the whole directory with nightly summaries is processed.
parser.add_argument(
'-d',
'--date',
type=str,
default=None,
help='Date in YYYYMMDD format. When the date is given append only '
'the summary of that night to an existing merged summary file',
)


SUBARRAY = LSTEventSource.create_subarray()


def get_pointing_info(times, drive_report):
pointing_source = PointingSource(
SUBARRAY,
drive_report_path=drive_report,
)
try:
pointing_source._read_drive_report_for_tel(1)
valid = pointing_source.drive_log[1]['unix_time'] != 0
pointing_source.drive_log[1] = pointing_source.drive_log[1][valid]
if np.count_nonzero(valid) < 2:
raise ValueError('Not enough values')

except:
return {
'ra': np.full(len(times), np.nan) * u.deg,
'dec': np.full(len(times), np.nan) * u.deg,
'alt': np.full(len(times), np.nan) * u.rad,
'az': np.full(len(times), np.nan) * u.rad,
}

pointing_info = {k: [] for k in ('ra', 'dec', 'alt', 'az')}

for time in times:
try:
ra, dec = pointing_source.get_pointing_position_icrs(tel_id=1, time=time)
except ValueError:
ra = dec = np.nan * u.deg

pointing_info['ra'].append(ra)
pointing_info['dec'].append(dec)

try:
altaz = pointing_source.get_pointing_position_altaz(tel_id=1, time=time)
alt = altaz.altitude
az = altaz.azimuth
except ValueError:
alt = az = np.nan * u.rad

pointing_info['alt'].append(alt)
pointing_info['az'].append(az)

return pointing_info


def merge_run_summary_with_pointing(run_summary, drive_report):
table = read_run_summary(run_summary)[RUN_SUMMARY_COLUMNS]

if len(table) == 0:
return None

table['run_start'] = Time(table['run_start'] / 1e9, format='unix_tai', scale='utc')

pointing_info = get_pointing_info(table['run_start'], drive_report)
for k, v in pointing_info.items():
table[k] = u.Quantity(v)

# add date as column and remove from meta date so merging does not complain
table['date'] = table.meta['date']
del table.meta['date']
del table.meta['lstchain_version']

table['run_start'].format = 'isot'
# reorder columns
table = table[
[
'date',
'run_id',
'run_type',
'n_subruns',
'run_start',
'ra',
'dec',
'alt',
'az',
]
]
return table


def main():
args = parser.parse_args()

if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)

if args.date is not None:
# Append only the run summary corresponding to this date
night = datetime.strptime(args.date, '%Y%m%d')
if not args.output_file.is_file():
raise ValueError(f'Output file {args.output_file} does not exist.')

log.info(f'Appending {args.date} to {args.output_file}')
run_summary_dir = args.monitoring_path / 'RunSummary'
drive_report_dir = args.monitoring_path / 'DrivePositioning'
run_summary = run_summary_dir / f'RunSummary_{args.date}.ecsv'
drive_report = drive_report_dir / f'DrivePosition_log_{args.date}.txt'
summary = merge_run_summary_with_pointing(run_summary, drive_report)
if summary is not None:
old_summary = Table.read(args.output_file)
new_summary = vstack([old_summary, summary])
new_summary.write(args.output_file, overwrite=True)
return

# Otherwise, merge all summary files found
run_summary_dir = args.monitoring_path / 'RunSummary'
drive_report_dir = args.monitoring_path / 'DrivePositioning'

run_summaries = sorted(run_summary_dir.glob('RunSummary*.ecsv'))
log.info('Found %d run summaries', len(run_summaries))

summaries = []

for run_summary in run_summaries:
log.debug('Processing %s', run_summary)
night = datetime.strptime(run_summary.stem, 'RunSummary_%Y%m%d')

drive_report = drive_report_dir / f'DrivePosition_log_{night:%Y%m%d}.txt'

if not drive_report.is_file():
log.error(f'No drive report found for {night:%Y-%m-%d}')
continue

summary = merge_run_summary_with_pointing(
run_summary,
drive_report,
)
if summary is not None:
summaries.append(summary)

vstack(summaries).write(args.output_file, overwrite=True)


if __name__ == '__main__':
main()
45 changes: 29 additions & 16 deletions lstchain/scripts/tests/test_lstchain_scripts.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import json
import os
import shutil
import subprocess as sp
from pathlib import Path

import numpy as np
import pandas as pd
Expand All @@ -8,24 +11,17 @@
import tables
from importlib.resources import files
from astropy import units as u
from astropy.table import Table
from astropy.time import Time
from ctapipe.instrument import SubarrayDescription

from ctapipe.io import read_table

from lstchain.io.io import (
dl1_params_lstcam_key,
dl2_params_lstcam_key,
dl1_images_lstcam_key,
get_dataset_keys,
get_srcdep_params,
dl1_params_tel_mon_ped_key,
dl1_params_tel_mon_cal_key,
dl1_params_tel_mon_flat_key,
)

from lstchain.io.config import get_standard_config, get_srcdep_config
import json
from lstchain.io.config import get_srcdep_config, get_standard_config
from lstchain.io.io import (dl1_images_lstcam_key, dl1_params_lstcam_key,
dl1_params_tel_mon_cal_key,
dl1_params_tel_mon_flat_key,
dl1_params_tel_mon_ped_key, dl2_params_lstcam_key,
get_dataset_keys, get_srcdep_params)


def find_entry_points(package_name):
Expand Down Expand Up @@ -493,9 +489,10 @@ def test_mc_r0_to_dl2(tmp_path, rf_models, mc_gamma_testfile):


def test_read_mc_dl2_to_QTable(simulated_dl2_file):
from lstchain.io.io import read_mc_dl2_to_QTable
import astropy.units as u

from lstchain.io.io import read_mc_dl2_to_QTable

events, sim_info, simu_geomag = read_mc_dl2_to_QTable(simulated_dl2_file)
assert "true_energy" in events.colnames
assert sim_info.energy_max == 5 * u.TeV
Expand All @@ -516,9 +513,10 @@ def test_read_data_dl2_to_QTable(temp_dir_observed_files, observed_dl1_files):

@pytest.mark.private_data
def test_run_summary(run_summary_path):
from astropy.table import Table
from datetime import datetime

from astropy.table import Table

date = "20200218"

assert run_summary_path.is_file()
Expand All @@ -539,3 +537,18 @@ def test_run_summary(run_summary_path):
assert "dragon_reference_source" in run_summary_table.columns

assert (run_summary_table["run_type"] == ["DATA", "PEDCALIB", "DATA"]).all()


@pytest.mark.private_data
def test_merge_run_summaries(tmp_path):
test_data = Path(os.getenv('LSTCHAIN_TEST_DATA', 'test_data'))
monitoring_path = test_data / "real/monitoring"
merged_file = tmp_path / "merged_summaries.ecsv"
run_program(
"lstchain_merge_run_summaries", "-m", monitoring_path, merged_file
)
assert merged_file.exists()
table = Table.read(merged_file)
# Since no drive log is present in the test sample, only the runs
# of the first summary appear
assert 2005 in table["run_id"]

0 comments on commit 644e43b

Please sign in to comment.