Skip to content

Commit 2592d2c

Browse files
authored
Merge pull request #33 from GernotMaier/package-data
Add stars to package data
2 parents 57e7181 + 240eb81 commit 2592d2c

9 files changed

+123
-44
lines changed

README.md

+22-2
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,7 @@ python v2dl5/scripts/generate_runlist.py \
5757

5858
```console
5959
python v2dl5/scripts/reflected_region_analysis.py \
60-
--obs_table ../../../VTS/DL3/v490/dl3_pointlike_moderate2tel/obs-index.fits.gz \
61-
--runlist my_output_dir/runlist.txt \
60+
--run_list my_output_dir/runlist.txt \
6261
--config examples/reflected_region_analysis.yml \
6362
--output_dir my_output_dir
6463
```
@@ -70,3 +69,24 @@ python v2dl5/scripts/plot_binary_light_curves.py \
7069
--instrument VERITAS \
7170
--configuration examples/binary_lightcurve_plotting.yml
7271
```
72+
73+
## Auxiliary data
74+
75+
Auxiliary data is stored in v2dl5/data and available at run time. This includes:
76+
77+
### Star catalogues
78+
79+
Hippargos catalog for stars with magnitude < 9: [v2dl5/data/hip_mag9.fits.gz](v2dl5/data/hip_mag9.fits.gz).
80+
Star catalogs are listed in the configuration files as
81+
82+
```yaml
83+
datasets:
84+
exclusion_region:
85+
on_radius: 0.5 deg
86+
magnitude_B: 7
87+
star_exclusion_radius: 0.30 deg
88+
fov: 3.5 deg
89+
star_file: hip_mag9.fits.gz
90+
```
91+
92+
Star catalogs are expected to be in the [v2dl5/data](v2dl5/data) directory and of fits format.README.md

examples/reflected_region.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ datasets:
2626
magnitude_B: 7
2727
star_exclusion_radius: 0.30 deg
2828
fov: 3.5 deg
29-
star_file: ./data/hip_mag9.fits.gz
29+
star_file: hip_mag9.fits.gz
3030
containment_correction: false
3131
safe_mask:
3232
methods: ['aeff-max']

pyproject.toml

+4
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[tool.setuptools]
66
packages=["v2dl5"]
7+
include-package-data = true
78

89
[tool.setuptools_scm]
910
write_to = "v2dl5/_version.py"
@@ -45,6 +46,9 @@ dependencies = [
4546
"bug tracker" = "https://github.com/GernotMaier/V2DL5/issues"
4647

4748
[project.scripts]
49+
v2dl5-generate-runlist = "v2dl5.scripts.generate_runlist:main"
50+
v2dl5-print-simplified-light-curve = "v2dl5.scripts.print_simplified_light_curve:main"
51+
v2dl5-plot-binary-light-curves = "v2dl5.scripts.plot_binary_light_curves:main"
4852
v2dl5-reflected-region-analysis = "v2dl5.scripts.reflected_region_analysis:main"
4953

5054
[tool.pytest.ini_options]
File renamed without changes.
File renamed without changes.

v2dl5/light_curves/data_reader.py

+36-35
Original file line numberDiff line numberDiff line change
@@ -62,14 +62,11 @@ def _read_fluxes_from_file(self, data_config):
6262
Read flux from file.
6363
6464
"""
65-
6665
try:
67-
if data_config["file_name"].endswith(".csv") or data_config["file_name"].endswith(
68-
".ecsv"
69-
):
66+
if data_config["file_name"].endswith((".csv", ".ecsv")):
7067
return self._read_fluxes_from_ecsv_file(data_config)
7168
except KeyError:
72-
self._logger.error("File name not found in configuration file")
69+
self._logger.error(f"File name not found in configuration {data_config}")
7370
raise KeyError
7471

7572
def _add_orbital_parameters(self, data):
@@ -131,34 +128,38 @@ def _read_fluxes_from_ecsv_file(self, data_config, TimeMinMax=True, MJD_min=-1.0
131128
"""
132129
table = Table.read(data_config["file_name"])
133130
f = {}
134-
if not TimeMinMax:
135-
table["time_min"] = table["time"].data
136-
table["time_max"] = table["time"].data + 0.1
137-
138-
# MJD filter
139-
condition = np.ones(len(table), dtype=bool)
140-
if MJD_min > -1:
141-
condition &= table["time_min"] > MJD_min
142-
if MJD_max > -1:
143-
condition &= table["time_max"] < MJD_max
144-
table = table[condition]
145-
146-
f = {}
147-
f["time_min"] = table["time_min"].data.tolist()
148-
f["time_max"] = table["time_max"].data.tolist()
149-
f["flux"] = table["flux"].data.flatten().tolist()
150-
if "flux_err" in table.colnames:
151-
f["flux_err"] = table["flux_err"].data.flatten().tolist()
152-
else:
153-
up = table["flux_up"].data.flatten().tolist()
154-
down = table["flux_down"].data.flatten().tolist()
155-
f["flux_err"] = [0.5 * abs(u - d) for u, d in zip(up, down)]
156-
f["MJD"] = [0.5 * (a + b) for a, b in zip(f["time_min"], f["time_max"])]
157-
f["MJD_err"] = [0.5 * (b - a) for a, b in zip(f["time_min"], f["time_max"])]
158-
if "flux_ul" in table.colnames:
159-
flux_ul = table["flux_ul"].data.flatten().tolist()
160-
is_ul = table["is_ul"].data.flatten().tolist()
161-
f["flux_ul"] = [flux if is_ul else -1.0 for flux, is_ul in zip(flux_ul, is_ul)]
162-
else:
163-
f["flux_ul"] = [-1.0 for _ in f["flux"]]
131+
try:
132+
if not TimeMinMax:
133+
table["time_min"] = table["time"].data
134+
table["time_max"] = table["time"].data + 0.1
135+
136+
# MJD filter
137+
condition = np.ones(len(table), dtype=bool)
138+
if MJD_min > -1:
139+
condition &= table["time_min"] > MJD_min
140+
if MJD_max > -1:
141+
condition &= table["time_max"] < MJD_max
142+
table = table[condition]
143+
144+
f = {}
145+
f["time_min"] = table["time_min"].data.tolist()
146+
f["time_max"] = table["time_max"].data.tolist()
147+
f["flux"] = table["flux"].data.flatten().tolist()
148+
if "flux_err" in table.colnames:
149+
f["flux_err"] = table["flux_err"].data.flatten().tolist()
150+
else:
151+
up = table["flux_up"].data.flatten().tolist()
152+
down = table["flux_down"].data.flatten().tolist()
153+
f["flux_err"] = [0.5 * abs(u - d) for u, d in zip(up, down)]
154+
f["MJD"] = [0.5 * (a + b) for a, b in zip(f["time_min"], f["time_max"])]
155+
f["MJD_err"] = [0.5 * (b - a) for a, b in zip(f["time_min"], f["time_max"])]
156+
if "flux_ul" in table.colnames:
157+
flux_ul = table["flux_ul"].data.flatten().tolist()
158+
is_ul = table["is_ul"].data.flatten().tolist()
159+
f["flux_ul"] = [flux if is_ul else -1.0 for flux, is_ul in zip(flux_ul, is_ul)]
160+
else:
161+
f["flux_ul"] = [-1.0 for _ in f["flux"]]
162+
except KeyError:
163+
self._logger.error(f"Incomplete data file; key not found in {data_config['file_name']}")
164+
raise KeyError
164165
return f

v2dl5/scripts/plot_binary_light_curves.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -66,10 +66,7 @@ def _parse():
6666

6767

6868
def main():
69-
"""
70-
Binary light-curve plotting
71-
72-
"""
69+
"""Binary light-curve plotting."""
7370
logging.root.setLevel(logging.INFO)
7471

7572
args = _parse()
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
#!/usr/bin/python
2+
"""
3+
Read gammapy-generated light curve files and print a simplified and readable version of the data.
4+
5+
"""
6+
7+
import argparse
8+
import logging
9+
10+
import numpy as np
11+
from astropy.table import Table
12+
13+
14+
def extract_single_value(column):
15+
return [
16+
float(value[0]) if isinstance(value, (list, np.ndarray)) else float(value)
17+
for value in column
18+
]
19+
20+
21+
def main():
22+
"""Main function to read and print light curve data."""
23+
parser = argparse.ArgumentParser(description="Read and print light curve data.")
24+
25+
parser.add_argument(
26+
"--file",
27+
type=str,
28+
required=True,
29+
help="File (ecsv format) containing light curve data.",
30+
)
31+
32+
args = parser.parse_args()
33+
34+
logging.basicConfig(level=logging.INFO)
35+
36+
complicated_table = Table.read(args.file, format="ascii.ecsv")
37+
data_short = {
38+
"e_min": extract_single_value(complicated_table["e_min"]),
39+
"time_min": complicated_table["time_min"],
40+
"time_max": complicated_table["time_max"],
41+
"flux": extract_single_value(complicated_table["flux"]),
42+
"flux_err": extract_single_value(complicated_table["flux_err"]),
43+
"flux_ul": extract_single_value(complicated_table["flux_ul"]),
44+
"significance": extract_single_value(complicated_table["sqrt_ts"]),
45+
}
46+
47+
short_table = Table(data_short)
48+
print(short_table)
49+
50+
51+
if __name__ == "__main__":
52+
main()

v2dl5/sky_regions.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@
33
"""
44

55
import logging
6-
import os
76

87
from astropy import units as u
98
from astropy.coordinates import Angle, SkyCoord, name_resolve
109
from astropy.io import fits
1110
from astropy.table import Table
1211
from gammapy.maps import WcsGeom
12+
from importlib_resources import files
1313
from regions import CircleSkyRegion
1414

1515

@@ -155,6 +155,9 @@ def _read_bright_star_catalogue(self, exclusion_region_dict=None, max_wobble_dis
155155
"""
156156
Read bright star catalogue from file.
157157
158+
Catalogue files are expected to be in the v2dl5/data directory and are distributed as part
159+
of the v2dl5 package.
160+
158161
Parameters
159162
----------
160163
exclusion_region_dict: dict
@@ -176,7 +179,9 @@ def _read_bright_star_catalogue(self, exclusion_region_dict=None, max_wobble_dis
176179
self._logger.info(
177180
"Reading bright star catalogue from %s", exclusion_region_dict["star_file"]
178181
)
179-
hip = fits.open(os.path.expandvars(exclusion_region_dict["star_file"]))
182+
star_file = files("v2dl5.data").joinpath("data/" + exclusion_region_dict["star_file"])
183+
hip = fits.open(star_file)
184+
180185
catalogue = Table(hip[1].data)
181186
catalogue = catalogue[
182187
catalogue["Vmag"] + catalogue["B-V"] < exclusion_region_dict["magnitude_B"]

0 commit comments

Comments
 (0)