Skip to content

Implement routine testing, similar to Eventdisplay release testing #17

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Oct 3, 2023
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
# V2DL5 - high-level analysis for VERITAS with gammapy



[![DOI](https://zenodo.org/badge/673002313.svg)](https://zenodo.org/badge/latestdoi/673002313)
[![gammapy](https://img.shields.io/badge/powered%20by-gammapy-orange.svg?style=flat)](https://www.gammapy.org/)
[![GitHub Super-Linter](https://github.com/GernotMaier/V2DL5/actions/workflows/linter.yml/badge.svg)](https://github.com/marketplace/actions/super-linter)

This is a collection of simple scripts for the high-level analysis of VERITAS data with gammapy.

This includes for a given list of runs or for a cone search around a given direction or named source.
Allows to run analysis scripts for a given list of runs or for a cone search around the given on\_region direction.

- source detection analysis including integral flux (or flux upper limits), reflection region model
- spectral analysis, reflected region model
Expand Down
22 changes: 8 additions & 14 deletions examples/reflected_region.yml
Original file line number Diff line number Diff line change
@@ -1,18 +1,11 @@
---
# Configuration for a reflected region analysis
# Provides:
# - significances (TS) (overall; per run)
# - fluxes above given energy threshold
# - energy spectra

observations:
datastore: ../../../VTS/DL3/v490/point-like/
obs_cone:
target: Crab
frame: icrs
lon: 83.628700 deg
lat: 22.014700 deg
radius: 5 deg
datastore: ../../../VTS/DL3/v490/dl3_pointlike_moderate2tel/
# search for observations around the on_region direction
# within a maximum offset of obs_cone_radius
obs_cone_radius: 1.5 deg
required_irf: ['aeff', 'edisp']

datasets:
Expand All @@ -23,14 +16,15 @@ datasets:
energy: {min: 0.1 TeV, max: 40 TeV, nbins: 10}
energy_true: {min: 0.05 TeV, max: 100 TeV, nbins: 40}
on_region:
target: Crab
frame: icrs
lon: 83.633 deg
lat: 22.014 deg
lon: 83.628700 deg
lat: 22.014700 deg
exclusion_region:
on_radius: 0.5 deg
magnitude_B: 7
star_exclusion_radius: 0.30 deg
fov: 4.5 deg
fov: 3.5 deg
star_file: ./data/hip_mag9.fits.gz
containment_correction: false
safe_mask:
Expand Down
75 changes: 47 additions & 28 deletions v2dl5/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
"""

import logging
from pathlib import Path

import astropy.units as u
import numpy as np
import yaml
from gammapy.datasets import Datasets, FluxPointsDataset, SpectrumDataset
from gammapy.datasets import Datasets, SpectrumDataset
from gammapy.estimators import FluxPointsEstimator, LightCurveEstimator
from gammapy.makers import (
ReflectedRegionsBackgroundMaker,
Expand Down Expand Up @@ -44,19 +45,19 @@ class Analysis:

"""

def __init__(self, args_dict=None, sky_regions=None, data=None):
def __init__(self, args_dict=None, sky_regions=None, v2dl5_data=None):
self._logger = logging.getLogger(__name__)

self.args_dict = args_dict
self._output_dir = args_dict.get("output_dir", None)
self.sky_regions = sky_regions
self._data = data
self.v2dl5_data = v2dl5_data

self.datasets = None
self.spectral_model = None
self.flux_points = None
self.lightcurve_per_obs = None
self.lightcurve_per_night = None
self.light_curve_per_obs = None
self.light_curve_per_night = None

def run(self):
"""
Expand All @@ -72,25 +73,35 @@ def run(self):
self._define_spectral_models(model=self.args_dict["fit"]["model"])
self._spectral_fits(datasets=_data_sets)
self._flux_points(_data_sets)
self.lightcurve_per_obs = self._light_curve(_data_sets, None)
self.lightcurve_per_night = self._nightly_light_curve(_data_sets)
self.light_curve_per_obs = self._light_curve(_data_sets, None)
self.light_curve_per_night = self._nightly_light_curve(_data_sets)

def plot(self):
"""
Plot all results.

"""

for dataset in self.datasets:
v2dl5_plot.plot_fit(dataset, self._output_dir)
_plot_dir = Path(f"{self._output_dir}/plots")

v2dl5_plot.plot_flux_points(self.flux_points, self._output_dir)
v2dl5_plot.plot_sed(
FluxPointsDataset(data=self.flux_points, models=self.spectral_model.copy()),
self._output_dir,
plotter = v2dl5_plot.Plot(
v2dl5_data=self.v2dl5_data,
data_set=self.datasets,
on_region=self.sky_regions.on_region,
output_dir=_plot_dir,
)
plotter.plot_event_histograms()
plotter.plot_irfs()
plotter.plot_maps(exclusion_mask=self.sky_regions.exclusion_mask)
plotter.plot_source_statistics()
plotter.plot_spectra(
flux_points=self.flux_points,
model=self.spectral_model,
)
plotter.plot_light_curves(
light_curve_per_obs=self.light_curve_per_obs,
light_curve_per_night=self.light_curve_per_night,
)
v2dl5_plot.plot_light_curve(self.lightcurve_per_obs, "per observation", self._output_dir)
v2dl5_plot.plot_light_curve(self.lightcurve_per_night, "per night", self._output_dir)

def write(self):
"""
Expand All @@ -101,8 +112,8 @@ def write(self):
for dataset in self.datasets:
self._write_datasets(dataset, f"{dataset.name}.fits.gz")
self._write_datasets(self.flux_points, "flux_points.ecsv", "gadf-sed")
self._write_datasets(self.lightcurve_per_obs, "lightcurve_per_obs.ecsv", "lightcurve")
self._write_datasets(self.lightcurve_per_night, "lightcurve_per_night.ecsv", "lightcurve")
self._write_datasets(self.light_curve_per_obs, "light_curve_per_obs.ecsv", "lightcurve")
self._write_datasets(self.light_curve_per_night, "light_curve_per_night.ecsv", "lightcurve")
if self.spectral_model:
self._write_yaml(self.spectral_model.to_dict(), "spectral_model.yaml")

Expand All @@ -124,12 +135,12 @@ def _write_datasets(self, datasets, filename, file_format=None):
if datasets is None:
return

_ofile = f"{self._output_dir}/{filename}"
self._logger.info("Writing datasets to %s", _ofile)
_out_file = f"{self._output_dir}/data/{filename}"
self._logger.info("Writing datasets to %s", _out_file)
if file_format is not None:
datasets.write(_ofile, overwrite=True, format=file_format)
datasets.write(_out_file, overwrite=True, format=file_format)
else:
datasets.write(_ofile, overwrite=True)
datasets.write(_out_file, overwrite=True)

def _write_yaml(self, data_dict, filename):
"""
Expand All @@ -144,9 +155,11 @@ def _write_yaml(self, data_dict, filename):

"""

_ofile = f"{self._output_dir}/{filename}"
self._logger.info("Writing dataset to %s", _ofile)
with open(_ofile, "w", encoding="utf-8") as outfile:
_data_dir = Path(f"{self._output_dir}/data")
_data_dir.mkdir(parents=True, exist_ok=True)
_out_file = f"{_data_dir}/{filename}"
self._logger.info("Writing dataset to %s", _out_file)
with open(_out_file, "w", encoding="utf-8") as outfile:
yaml.dump(data_dict, outfile, default_flow_style=False)

def _data_reduction(self):
Expand Down Expand Up @@ -178,13 +191,16 @@ def _data_reduction(self):

self.datasets = Datasets()

for obs_id, observation in zip(self._data.runs, self._data.get_observations()):
for obs_id, observation in zip(self.v2dl5_data.runs, self.v2dl5_data.get_observations()):
dataset = dataset_maker.run(dataset_empty.copy(name=str(obs_id)), observation)
dataset_on_off = bkg_maker.run(dataset, observation)
dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
self.datasets.append(dataset_on_off)

print("Run-wise results:")
self._print_results(self.datasets.info_table(cumulative=False))
print("Cumulative results:")
self._print_results(self.datasets.info_table(cumulative=True))

def _print_results(self, info_table):
"""
Expand Down Expand Up @@ -214,10 +230,11 @@ def _print_results(self, info_table):
"sqrt_ts",
]
)
print()

def _spectral_fits(self, datasets=None):
"""
Perform spectral fits.
Spectral fitting.

"""

Expand Down Expand Up @@ -251,8 +268,6 @@ def _define_spectral_models(self, model=None):

self.spectral_model = SkyModel(spectral_model=_spectral_model, name=model)

return [self.spectral_model]

def _flux_points(self, datasets):
"""
Calculate flux points.
Expand Down Expand Up @@ -314,6 +329,7 @@ def _light_curve(self, datasets, time_intervals=None):
def _nightly_light_curve(self, _data_sets):
"""
Combine observations per night and calculate light curve.
Not applicable for stacked analysis.

Parameters
----------
Expand All @@ -322,6 +338,9 @@ def _nightly_light_curve(self, _data_sets):

"""

if self.args_dict["datasets"]["stack"]:
return None

self._logger.info("Estimating daily light curve")

time_intervals = v2dl5_time.get_list_of_nights(
Expand Down
14 changes: 3 additions & 11 deletions v2dl5/configuration.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""
V2DL5 configuration

Includes reading of configuration from file and default parameters.
Read configuration from file (or us default parameters).

"""

Expand All @@ -27,9 +27,6 @@ def configuration(args):
if args.config is not None:
args_dict.update(_read_config_from_file(args.config))

# args_dict["target"] = args.target
# args_dict["ra"] = args.ra
# args_dict["dec"] = args.dec
args_dict["run_list"] = args.run_list
args_dict["output_dir"] = args.output_dir

Expand Down Expand Up @@ -69,13 +66,7 @@ def _default_config():
return {
"observations": {
"datastore": "../../../VTS/DL3/v490/point-like/",
"target": None,
"obs_cone": {
"frame": "icrs",
"lon": "83.628700 deg",
"lat": "22.014700 deg",
"radius": "5 deg",
},
"obs_cone_radius": "5. deg",
"required_irf": ["aeff", "edisp"],
},
"datasets": {
Expand All @@ -88,6 +79,7 @@ def _default_config():
}
},
"on_region": {
"target": None,
"frame": "icrs",
"lon": "83.633 deg",
"lat": "22.014 deg",
Expand Down
Loading