Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DL2 and DL3 semi-automatic scripts #250

Draft
wants to merge 33 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
cea9fa6
Update README.md
Elisa-Visentin Aug 21, 2024
f9ab472
in & MC v in config
Elisa-Visentin Sep 6, 2024
6c6d32f
in v: merging MAGIC
Elisa-Visentin Sep 6, 2024
adb7abf
in v: coincidence
Elisa-Visentin Sep 6, 2024
b1660d1
in v: fix directories
Elisa-Visentin Sep 6, 2024
27ed173
minor bug
Elisa-Visentin Sep 6, 2024
9ad1904
in v: stereo
Elisa-Visentin Sep 6, 2024
52369e5
in v: merge stereo
Elisa-Visentin Sep 6, 2024
1f9db89
linter
Elisa-Visentin Sep 6, 2024
332acc1
Create wobble_db.py
Elisa-Visentin Sep 6, 2024
441531f
fix config
Elisa-Visentin Sep 10, 2024
136e8a1
Merge branch 'auto_MCP_DL2_DL3' into fix
Elisa-Visentin Sep 10, 2024
5a7d22f
lint
Elisa-Visentin Sep 10, 2024
283835f
Merge branch 'fix' of https://github.com/cta-observatory/magic-cta-pi…
Elisa-Visentin Sep 10, 2024
cc57152
wobble script (draft)
Elisa-Visentin Sep 11, 2024
8e28118
minor fixes
Elisa-Visentin Sep 11, 2024
306f4c3
black
Elisa-Visentin Sep 11, 2024
240fa4d
rename wobble col.
Elisa-Visentin Sep 11, 2024
964867e
lint
Elisa-Visentin Sep 11, 2024
9bdea51
fix db
Elisa-Visentin Sep 11, 2024
84c1a50
lint
Elisa-Visentin Sep 11, 2024
ffb05b0
minor bug fix
Elisa-Visentin Sep 11, 2024
c4fc1e6
Merge pull request #255 from cta-observatory/fix
jsitarek Sep 12, 2024
3941763
ra dec db
Elisa-Visentin Sep 12, 2024
10c55ea
minor fixes
Elisa-Visentin Sep 12, 2024
26e8d52
Update wobble_db.py
Elisa-Visentin Sep 12, 2024
f4b775e
Merge pull request #254 from cta-observatory/wobble
jsitarek Sep 12, 2024
027529b
minor fixes
Elisa-Visentin Sep 13, 2024
5c2e324
Merge branch 'auto_MCP_DL2_DL3' into db_ra_dec
Elisa-Visentin Sep 13, 2024
9f01945
bug fix
Elisa-Visentin Sep 16, 2024
40bb433
Merge branch 'auto_MCP_DL2_DL3' into v_in_out
Elisa-Visentin Sep 16, 2024
38ff6b5
Merge pull request #252 from cta-observatory/v_in_out
jsitarek Sep 17, 2024
dcc0898
Merge pull request #259 from cta-observatory/db_ra_dec
jsitarek Sep 19, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions magicctapipe/scripts/lst1_magic/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,11 @@ Eventually, to merge DL1 stereo (LST) subruns into runs, we run the `merge_stere

> $ merge_stereo (-c config_auto_MCP.yaml)

### Random forest and DL1 to DL2
### DL1 to DL2

TBD.

### Instrument response function and DL3
### DL2 to DL3

TBD.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,19 @@ def configfile_coincidence(target_dir, source_name, config_file):
"event_coincidence": config_dict["event_coincidence"],
}

file_name = f"{target_dir}/v{__version__}/{source_name}/config_coincidence.yaml"
conf_dir = f"{target_dir}/v{__version__}/{source_name}"
os.makedirs(conf_dir, exist_ok=True)

file_name = f"{conf_dir}/config_coincidence.yaml"

with open(file_name, "w") as f:

yaml.dump(conf, f, default_flow_style=False)


def linking_bash_lst(target_dir, LST_runs, source_name, LST_version, env_name, cluster):
def linking_bash_lst(
target_dir, LST_runs, source_name, LST_version, env_name, cluster, version
):

"""
This function links the LST data paths to the working directory and creates bash scripts.
Expand All @@ -92,11 +97,13 @@ def linking_bash_lst(target_dir, LST_runs, source_name, LST_version, env_name, c
Name of the conda environment
cluster : str
Cluster system
version : str
Version of the input (DL1 MAGIC runs) data
"""

coincidence_DL1_dir = f"{target_dir}/v{__version__}/{source_name}"

MAGIC_DL1_dir = f"{target_dir}/v{__version__}/{source_name}/DL1"
MAGIC_DL1_dir = f"{target_dir}/v{version}/{source_name}/DL1"

dates = [os.path.basename(x) for x in glob.glob(f"{MAGIC_DL1_dir}/Merged/[0-9]*")]
if cluster != "SLURM":
Expand Down Expand Up @@ -199,6 +206,9 @@ def main():
source = config["data_selection"]["source_name_output"]

cluster = config["general"]["cluster"]
in_version = config["directories"]["real_input_version"]
if in_version == "":
in_version == __version__

if source_in is None:
source_list = joblib.load("list_sources.dat")
Expand Down Expand Up @@ -227,6 +237,7 @@ def main():
LST_version,
env_name,
cluster,
in_version,
) # linking the data paths to current working directory

print("***** Submitting processess to the cluster...")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
directories:
workspace_dir: "/fefs/aswg/workspace/elisa.visentin/auto_MCP_PR/" # Output directory where all the data products will be saved.

MC_version: "" #MCP version used to process MC; if not provided, current MCP version will be used
real_input_version: "" #MCP version used to process input real data (to be used in case you want to run only the last steps of the analysis, using lower-level data previously processed); if not provided, current MCP version will be used

data_selection:
source_name_database: "CrabNebula" # MUST BE THE SAME AS IN THE DATABASE; Set to null to process all sources in the given time range.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,16 @@ def main():
out_h5,
key=out_key,
)
if "ra" in df_old:
df_cut["ra"] = np.nan
if "dec" in df_old:
df_cut["dec"] = np.nan
if "MC_dec" in df_old:
df_cut["MC_dec"] = np.nan
if "point_source" in df_old:
df_cut["point_source"] = np.nan
if "wobble_offset" in df_old:
df_cut["wobble_offset"] = np.nan
df_cut = pd.concat([df_old, df_cut]).drop_duplicates(
subset="LST1_run", keep="first"
)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
"""
Add, to LST database, infos about coordinates and extension of the sources and MC declination to be used to process the source

Usage:
$ set_ra_dec (-b YYYYMMDD -e YYYYMMDD -s source_dict -m mc_dec)
"""

import argparse
import json

import numpy as np
import pandas as pd
import yaml
from astropy.coordinates import SkyCoord
from astropy.coordinates.name_resolve import NameResolveError

from magicctapipe.io import resource_file


def main():

"""
Main function
"""

parser = argparse.ArgumentParser()

parser.add_argument(
"--begin-date",
"-b",
dest="begin",
type=int,
default=0,
help="First date to update database (YYYYMMDD)",
)
parser.add_argument(
"--end-date",
"-e",
dest="end",
type=int,
default=0,
help="End date to update database (YYYYMMDD)",
)
parser.add_argument(
"--dict-source",
"-s",
dest="source",
type=str,
default="./source_dict.json",
help="File with dictionary of info (RA/Dec/point-source) for sources",
)
parser.add_argument(
"--mc-dec",
"-m",
dest="dec_mc",
type=str,
default="./dec_mc.json",
help="File with list of MC declinations",
)

args = parser.parse_args()
config_file = resource_file("database_config.yaml")

with open(
config_file, "rb"
) as fc: # "rb" mode opens the file in binary format for reading
config_dict = yaml.safe_load(fc)

LST_h5 = config_dict["database_paths"]["LST"]
LST_key = config_dict["database_keys"]["LST"]

df_LST = pd.read_hdf(
LST_h5,
key=LST_key,
)
if "ra" not in df_LST:
df_LST["ra"] = np.nan
if "dec" not in df_LST:
df_LST["dec"] = np.nan
if "MC_dec" not in df_LST:
df_LST["MC_dec"] = np.nan
if "point_source" not in df_LST:
df_LST["point_source"] = np.nan
df_LST_full = df_LST.copy(deep=True)
if args.begin != 0:
df_LST = df_LST[df_LST["DATE"].astype(int) >= args.begin]
if args.end != 0:
df_LST = df_LST[df_LST["DATE"].astype(int) <= args.end]

sources = np.unique(df_LST["source"])
with open(args.source) as f:
dict_source = f.read()
with open(args.dec_mc) as f:
mc_dec = f.read()
source_dict = json.loads(dict_source)
dec_mc = np.asarray(json.loads(mc_dec)).astype(np.float64)
print("MC declinations: \t", dec_mc)
print("\n\nChecking RA/Dec...\n\n")
i = 0
for src in sources:

try:
coord = SkyCoord.from_name(src)
if src == "Crab":
coord = SkyCoord.from_name("CrabNebula")
src_dec = coord.dec.degree
src_ra = coord.ra.degree

except NameResolveError:
print(f"{i}: {src} not found in astropy. Looking to the dictionaries...")
if (
(src in source_dict.keys())
and (source_dict.get(src)[0] != "NaN")
and (source_dict.get(src)[1] != "NaN")
):
src_ra = float(source_dict.get(src)[0])
src_dec = float(source_dict.get(src)[1])

else:
print(
f"\t {i}: {src} RA and/or Dec not in the dictionary. Please update the dictionary"
)
src_ra = np.nan
src_dec = np.nan

i += 1
df_LST["ra"] = np.where(df_LST["source"] == src, src_ra, df_LST["ra"])
df_LST["dec"] = np.where(df_LST["source"] == src, src_dec, df_LST["dec"])
if not (np.isnan(src_dec)):
df_LST["MC_dec"] = np.where(
df_LST["source"] == src,
float(dec_mc[np.argmin(np.abs(src_dec - dec_mc))]),
df_LST["MC_dec"],
)
print("\n\nChecking if point source...\n\n")
i = 0
for src in sources:
if (src in source_dict.keys()) and (source_dict.get(src)[2] != "NaN"):
src_point = str(source_dict.get(src)[2])
df_LST["point_source"] = np.where(
df_LST["source"] == src, src_point, df_LST["point_source"]
)
else:
print(
f"\t {i}: {src} extension information not in the dictionaries. Please add it to the dictionaries"
)
i += 1
df_LST = pd.concat([df_LST, df_LST_full]).drop_duplicates(
subset="LST1_run", keep="first"
)
df_LST.to_hdf(
LST_h5,
key=LST_key,
mode="w",
min_itemsize={
"lstchain_versions": 20,
"last_lstchain_file": 90,
"processed_lstchain_file": 90,
},
)


if __name__ == "__main__":
main()
Loading