Skip to content

Bug fixes for bulk processing #23

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Oct 22, 2023
10 changes: 6 additions & 4 deletions examples/reflected_region.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ datasets:
stack: false
geom:
axes:
energy: {min: 0.1 TeV, max: 40 TeV, nbins: 10}
energy: {min: 0.1 TeV, max: 100 TeV, nbins: 30}
energy_true: {min: 0.05 TeV, max: 100 TeV, nbins: 40}
on_region:
target: Crab
frame: icrs
lon: 83.628700 deg
lat: 22.014700 deg
# frame: icrs
# lon: 83.628700 deg
# lat: 22.014700 deg
exclusion_region:
on_radius: 0.5 deg
magnitude_B: 7
Expand All @@ -35,6 +35,8 @@ datasets:
fit:
fit_range: {min: 0.1 TeV, max: 20 TeV}
model: pl
index: 2.5
reference_energy: 1 TeV

flux_points:
energy: {min: 0.1 TeV, max: 20 TeV, nbins: 10}
Expand Down
87 changes: 56 additions & 31 deletions v2dl5/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
PowerLawSpectralModel,
SkyModel,
)
from IPython.display import display

import v2dl5.plot as v2dl5_plot
import v2dl5.time as v2dl5_time
Expand Down Expand Up @@ -55,6 +54,7 @@ def __init__(self, args_dict=None, sky_regions=None, v2dl5_data=None):

self.datasets = None
self.spectral_model = None
self.fit_results = None
self.flux_points = None
self.light_curve_per_obs = None
self.light_curve_per_night = None
Expand All @@ -70,7 +70,7 @@ def run(self):
_data_sets = Datasets(self.datasets).stack_reduce()
else:
_data_sets = self.datasets
self._define_spectral_models(model=self.args_dict["fit"]["model"])
self._define_spectral_models(model=self.args_dict["fit"])
self._spectral_fits(datasets=_data_sets)
self._flux_points(_data_sets)
self.light_curve_per_obs = self._light_curve(_data_sets, None)
Expand All @@ -94,10 +94,13 @@ def plot(self):
plotter.plot_irfs()
plotter.plot_maps(exclusion_mask=self.sky_regions.exclusion_mask)
plotter.plot_source_statistics()
plotter.plot_spectra(
flux_points=self.flux_points,
model=self.spectral_model,
)
if self.fit_results.success:
plotter.plot_spectra(
flux_points=self.flux_points,
model=self.spectral_model,
)
else:
self._logger.warning("Skipping spectral plots because fit failed")
plotter.plot_light_curves(
light_curve_per_obs=self.light_curve_per_obs,
light_curve_per_night=self.light_curve_per_night,
Expand All @@ -112,12 +115,16 @@ def write(self):
for dataset in self.datasets:
self._write_datasets(dataset, f"{dataset.name}.fits.gz")
self._write_datasets(self.flux_points, "flux_points.ecsv", "gadf-sed")
self._write_datasets(self.light_curve_per_obs, "light_curve_per_obs.ecsv", "lightcurve")
self._write_datasets(self.light_curve_per_night, "light_curve_per_night.ecsv", "lightcurve")
self._write_datasets(
self.light_curve_per_obs, "light_curve_per_obs.ecsv", "lightcurve", "flux"
)
self._write_datasets(
self.light_curve_per_night, "light_curve_per_night.ecsv", "lightcurve", "flux"
)
if self.spectral_model:
self._write_yaml(self.spectral_model.to_dict(), "spectral_model.yaml")

def _write_datasets(self, datasets, filename, file_format=None):
def _write_datasets(self, datasets, filename, file_format=None, sed_type=None):
"""
Write datasets to disk.

Expand All @@ -138,7 +145,10 @@ def _write_datasets(self, datasets, filename, file_format=None):
_out_file = f"{self._output_dir}/data/{filename}"
self._logger.info("Writing datasets to %s", _out_file)
if file_format is not None:
datasets.write(_out_file, overwrite=True, format=file_format)
if sed_type is not None:
datasets.write(_out_file, overwrite=True, format=file_format, sed_type=sed_type)
else:
datasets.write(_out_file, overwrite=True, format=file_format)
else:
datasets.write(_out_file, overwrite=True)

Expand Down Expand Up @@ -197,9 +207,9 @@ def _data_reduction(self):
dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
self.datasets.append(dataset_on_off)

print("Run-wise results:")
self._logger.info("Run-wise results:")
self._print_results(self.datasets.info_table(cumulative=False))
print("Cumulative results:")
self._logger.info("Cumulative results:")
self._print_results(self.datasets.info_table(cumulative=True))

def _print_results(self, info_table):
Expand Down Expand Up @@ -241,32 +251,34 @@ def _spectral_fits(self, datasets=None):
datasets.models = self.spectral_model

_fit = Fit()
result_joint = _fit.run(datasets=datasets)
print(result_joint)
display(result_joint.models.to_parameters_table())
self.fit_results = _fit.run(datasets=datasets)
self._logger.info(self.fit_results)
self.fit_results.models.to_parameters_table().pprint()

def _define_spectral_models(self, model=None):
""" "
def _define_spectral_models(self, model):
"""
Spectral models

"""

_spectral_model = None
if model == "pl":
if model.get("model", "pl") == "pl":
_spectral_model = PowerLawSpectralModel(
amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"),
index=2,
reference=1 * u.TeV,
index=model.get("index", 2.0),
reference=u.Quantity(model.get("reference_energy", "1.0 TeV")),
)
elif model == "ecpl":
elif model["model"] == "ecpl":
_spectral_model = ExpCutoffPowerLawSpectralModel(
amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"),
index=2,
lambda_=0.1 * u.Unit("TeV-1"),
reference=1 * u.TeV,
index=model.get("index", 2.0),
lambda_=u.Quantity(model.get("lambda", "0.1 TeV-1")),
reference=u.Quantity(model.get("reference_energy", "1.0 TeV")),
)

self.spectral_model = SkyModel(spectral_model=_spectral_model, name=model)
self.spectral_model = SkyModel(
spectral_model=_spectral_model, name=model.get("model", "pl")
)

def _flux_points(self, datasets):
"""
Expand All @@ -287,8 +299,10 @@ def _flux_points(self, datasets):
fpe = FluxPointsEstimator(energy_edges=energy_edges, selection_optional="all")
self.flux_points = fpe.run(datasets=datasets)

display(self.flux_points.to_table(sed_type="dnde", formatted=True))
display(self.flux_points.to_table(sed_type="e2dnde", formatted=True))
self.flux_points.to_table(sed_type="dnde", formatted=True).pprint()
self.flux_points.to_table(sed_type="dnde", formatted=True).pprint_all()
self.flux_points.to_table(sed_type="e2dnde", formatted=True).pprint()
self.flux_points.to_table(sed_type="e2dnde", formatted=True).pprint_all()

def _light_curve(self, datasets, time_intervals=None):
"""
Expand Down Expand Up @@ -318,11 +332,24 @@ def _light_curve(self, datasets, time_intervals=None):
* u.TeV,
time_intervals=time_intervals,
reoptimize=False,
selection_optional="all",
selection_optional=["all"],
)
_light_curve = lc_maker_1d.run(datasets)

display(_light_curve.to_table(sed_type="flux", format="lightcurve"))
_table = _light_curve.to_table(sed_type="flux", format="lightcurve")
print(
_table[
"time_min",
"time_max",
"e_min",
"e_max",
"flux",
"flux_err",
"flux_ul",
"sqrt_ts",
]
)
_table.pprint_all()

return _light_curve

Expand All @@ -347,8 +374,6 @@ def _nightly_light_curve(self, _data_sets):
_data_sets, time_zone=self.args_dict["light_curve"]["time_zone"]
)

print(time_intervals, type(time_intervals))

return self._light_curve(_data_sets, time_intervals=time_intervals)

def _get_energy_axis(self, name="energy"):
Expand Down
5 changes: 3 additions & 2 deletions v2dl5/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,10 @@ def _read_config_from_file(config):
try:
args_dict = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
_logger.error(exc)
raise

print(args_dict)
_logger.info(args_dict)
return args_dict


Expand Down
33 changes: 26 additions & 7 deletions v2dl5/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,21 +63,32 @@ def get_data_store(self):

return self._data_store

def get_observations(self, reflected_region=True):
def get_observations(self, reflected_region=True, skip_missing=False):
"""
Return observations.

Reflected region analysis requires effective area and energy dispersion only.
Return list of observations.

Parameters
----------
reflected_region : bool
Reflected region analysis.
skip_missing : bool
Skip missing observations.

Returns
-------
observations : list of `~gammapy.data.Observation`
List of observations.

"""

required_irf = "point-like"
return self._data_store.get_observations(self.runs, required_irf=required_irf)
required_irf = "full-enclosure"
if reflected_region:
required_irf = "point-like"
return self._data_store.get_observations(
self.runs,
required_irf=required_irf,
skip_missing=skip_missing,
)

def _from_run_list(self, run_list):
"""
Expand All @@ -94,12 +105,17 @@ def _from_run_list(self, run_list):
return None

try:
_runs = np.loadtxt(run_list, dtype=int)
_runs = np.loadtxt(run_list, dtype=int, usecols=0)
except OSError:
self._logger.error("Run list %s not found.", run_list)
raise

_runs = [_runs] if _runs.ndim == 0 else np.ndarray.tolist(_runs)

self._logger.info("Reading run list with %d observations from %s", len(_runs), run_list)
if len(_runs) == 0:
self._logger.error("Run list is empty.")
raise ValueError
return _runs

def _from_target(self, obs_cone_radius):
Expand Down Expand Up @@ -185,6 +201,9 @@ def _update_gti(self, bti):

"""

if bti is None:
return

for obs in self.get_observations():
bti_pairs = [
(item["bti_start"], item["bti_start"] + item["bti_length"])
Expand Down
Loading