diff --git a/environment.yml b/environment.yml index 296e8e17..229de5e2 100644 --- a/environment.yml +++ b/environment.yml @@ -1,5 +1,5 @@ # A conda environment with all useful package for ctapipe developers -name: magic-lst1 +name: magic-lst channels: - conda-forge dependencies: diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index c8c2c51e..d9e15156 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -7,6 +7,7 @@ from math import trunc from ctapipe.utils.download import download_file_cached from magicctapipe.utils import resource_file +import yaml maxjoint = 13000 maxmonly = 500 @@ -42,11 +43,11 @@ "20201216_M2_05093711.014_Y_CrabNebula-W0.40+035.root", ] DL1_LST_data = ["dl1_LST-1.Run03265.0094.h5"] + """ Temporary paths """ - @pytest.fixture(scope="session") def temp_DL1_gamma(tmp_path_factory): return tmp_path_factory.mktemp("DL1_gammas") @@ -231,12 +232,10 @@ def temp_DL2_real_monly(tmp_path_factory): def temp_DL3_monly(tmp_path_factory): return tmp_path_factory.mktemp("DL3_monly") - """ Custom data """ - @pytest.fixture(scope="session") def dl2_test(temp_DL2_test): """ @@ -270,12 +269,10 @@ def pd_test(): df = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6]]), columns=["a", "b"]) return df - """ Remote paths (to download test files) """ - @pytest.fixture(scope="session") def base_url(): return "http://www.magic.iac.es/mcp-testdata" @@ -286,12 +283,10 @@ def env_prefix(): # ENVIRONMENT VARIABLES TO BE CREATED return "MAGIC_CTA_DATA_" - """ Downloads: files """ - @pytest.fixture(scope="session") def dl0_gamma(base_url, env_prefix): gamma_dl0 = [] @@ -375,21 +370,52 @@ def dl1_lst(base_url, env_prefix): @pytest.fixture(scope="session") def config(): - config_path = resource_file("config.yaml") + config_path = resource_file("test_config.yaml") return config_path @pytest.fixture(scope="session") def config_monly(): - config_path = resource_file("config_monly.yaml") - return config_path + config_path = resource_file("test_config_monly.yaml") + return config_path + + +@pytest.fixture(scope="session") +def config_gen(): + config_path = resource_file("test_config_general.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config + + +@pytest.fixture(scope="session") +def config_gen_4lst(): + config_path = resource_file("test_config_general_4LST.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config + + +@pytest.fixture(scope="session") +def config_calib(): + config_path = resource_file("test_config_calib.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config + + +@pytest.fixture(scope="session") +def config_check(): + config_path = resource_file("test_check_list.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config """ Data processing """ - @pytest.fixture(scope="session") def gamma_l1(temp_DL1_gamma, dl0_gamma, config): """ diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index 34fa8194..d7ec912f 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -4,15 +4,15 @@ get_num_islands_MAGIC, clean_image_params, ) +from .leakage import get_leakage +from .calib import calibrate -from .leakage import ( - get_leakage, -) __all__ = [ "MAGICClean", "PixelTreatment", "get_num_islands_MAGIC", + "calibrate", "clean_image_params", "get_leakage", ] diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py new file mode 100644 index 00000000..95ae4410 --- /dev/null +++ b/magicctapipe/image/calib.py @@ -0,0 +1,140 @@ +import numpy as np +from ctapipe.image import ( + apply_time_delta_cleaning, + number_of_islands, + tailcuts_clean, +) +from ctapipe.instrument import CameraGeometry +from lstchain.image.cleaning import apply_dynamic_cleaning +from lstchain.image.modifier import ( + add_noise_in_pixels, + random_psf_smearer, + set_numba_seed +) +from magicctapipe.image import MAGICClean + + +__all__ = [ + "calibrate" +] + + +def calibrate(event, tel_id, config, calibrator, is_lst, obs_id=None, camera_geoms=None, magic_clean=None): + + """ + This function calibrates the camera image for a single event of a telescope + + Parameters + ---------- + event: event + From an EventSource + tel_id: int + Telescope ID + config: dictionary + Parameters for image extraction and calibration + calibrator: CameraCalibrator (ctapipe.calib) + ctapipe object needed to calibrate the camera + is_lst: bool + Whether the telescope is a LST + obs_id: int + Observation ID. Unsed in case of LST telescope + camera_geoms: telescope.camera.geometry + Camera geometry. Used in case of LST telescope + magic_clean: dictionary (1 entry per MAGIC telescope) + Each entry is a MAGICClean object using the telescope camera geometry. Used in case of MAGIC telescope + + + Returns + ------- + signal_pixels: boolean mask + Mask of the pixels selected by the cleaning + image: numpy array + Array of number of p.e. in the camera pixels + peak_time: numpy array + Array of the signal peak time in the camera pixels + + """ + if (not is_lst) and (magic_clean==None): + raise ValueError("Check the provided parameters and the telescope type; MAGIC calibration not possible if magic_clean not provided") + if (is_lst) and (obs_id==None): + raise ValueError("Check the provided parameters and the telescope type; LST calibration not possible if obs_id not provided") + if (is_lst) and (camera_geoms==None): + raise ValueError("Check the provided parameters and the telescope type; LST calibration not possible if gamera_geoms not provided") + if (not is_lst) and (type(magic_clean[tel_id])!=MAGICClean): + raise ValueError("Check the provided magic_clean parameter; MAGIC calibration not possible if magic_clean not a dictionary of MAGICClean objects") + if (is_lst) and (type(camera_geoms[tel_id])!=CameraGeometry): + raise ValueError("Check the provided camera_geoms parameter; LST calibration not possible if camera_geoms not a dictionary of CameraGeometry objects") + + calibrator._calibrate_dl0(event, tel_id) + calibrator._calibrate_dl1(event, tel_id) + + image = event.dl1.tel[tel_id].image.astype(np.float64) + peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + + if not is_lst: + use_charge_correction = config["charge_correction"]["use"] + + if use_charge_correction: + # Scale the charges by the correction factor + image *= config["charge_correction"]["factor"] + + # Apply the image cleaning + signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( + event_image=image, event_pulse_time=peak_time + ) + + else: + increase_nsb = config["increase_nsb"].pop("use") + increase_psf = config["increase_psf"]["use"] + use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") + use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") + use_only_main_island = config["use_only_main_island"] + + if increase_nsb: + rng = np.random.default_rng(obs_id) + # Add extra noise in pixels + image = add_noise_in_pixels(rng, image, **config["increase_nsb"]) + + if increase_psf: + set_numba_seed(obs_id) + # Smear the image + image = random_psf_smearer( + image=image, + fraction=config["increase_psf"]["fraction"], + indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, + indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, + ) + + # Apply the image cleaning + signal_pixels = tailcuts_clean( + camera_geoms[tel_id], image, **config["tailcuts_clean"] + ) + + if use_time_delta_cleaning: + signal_pixels = apply_time_delta_cleaning( + geom=camera_geoms[tel_id], + mask=signal_pixels, + arrival_times=peak_time, + **config["time_delta_cleaning"], + ) + + if use_dynamic_cleaning: + signal_pixels = apply_dynamic_cleaning( + image, signal_pixels, **config["dynamic_cleaning"] + ) + + if use_only_main_island: + _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) + n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) + + # The first index means the pixels not surviving + # the cleaning, so should not be considered + n_pixels_on_island[0] = 0 + max_island_label = np.argmax(n_pixels_on_island) + signal_pixels[island_labels != max_island_label] = False + + config["increase_nsb"]["use"]=increase_nsb + config["time_delta_cleaning"]["use"]=use_time_delta_cleaning + config["dynamic_cleaning"]["use"]=use_dynamic_cleaning + + return signal_pixels, image, peak_time diff --git a/magicctapipe/image/tests/test_calib.py b/magicctapipe/image/tests/test_calib.py new file mode 100644 index 00000000..7229686e --- /dev/null +++ b/magicctapipe/image/tests/test_calib.py @@ -0,0 +1,347 @@ +from magicctapipe.image.calib import calibrate +import pytest + + + +from ctapipe.calib import CameraCalibrator + +from ctapipe.io import EventSource + +from magicctapipe.image import MAGICClean +from traitlets.config import Config + + +@pytest.fixture(scope="session") +def tel_id_LST(): + return 1 + + +@pytest.fixture(scope="session") +def tel_id_MAGIC(): + return 2 + + +def test_calibrate_LST(dl0_gamma, config_calib, tel_id_LST): + + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + obs_id = event_source.obs_ids[0] + + subarray = event_source.subarray + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id, telescope in tel_descriptions.items(): + camera_geoms[tel_id]= telescope.camera.geometry + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + signal_pixels, image, peak_time = calibrate( + event=event, + tel_id=tel_id_LST, + obs_id=obs_id, + config=config_lst, + camera_geoms=camera_geoms, + calibrator=calibrator_lst, + is_lst=True, + ) + + assert len(signal_pixels)==1855 + assert signal_pixels.dtype==bool + assert len(image)==1855 + assert len(peak_time)==1855 + + config_lst["image_extractor"]["type"]=extractor_type_lst + + +def test_calibrate_MAGIC(dl0_gamma, config_calib, tel_id_MAGIC): + + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + + subarray = event_source.subarray + + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id, telescope in tel_descriptions.items(): + camera_geoms[tel_id]= telescope.camera.geometry + + + config_magic = config_calib["MAGIC"] + config_magic["magic_clean"].update({"find_hotpixels": False}) + + extractor_type_magic = config_magic["image_extractor"].pop("type") + config_extractor_magic = {extractor_type_magic: config_magic["image_extractor"]} + magic_clean = {} + for k in [1,2]: + + magic_clean[k] = MAGICClean(camera_geoms[k], config_magic["magic_clean"]) + calibrator_magic = CameraCalibrator( + image_extractor_type=extractor_type_magic, + config=Config(config_extractor_magic), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): + signal_pixels, image, peak_time = calibrate( + event=event, + tel_id=tel_id_MAGIC, + config=config_magic, + magic_clean=magic_clean, + calibrator=calibrator_magic, + is_lst=False, + ) + + assert len(signal_pixels)==1039 + assert signal_pixels.dtype==bool + assert len(image)==1039 + assert len(peak_time)==1039 + + config_magic["image_extractor"]["type"]=extractor_type_magic + + +def test_calibrate_exc_1(dl0_gamma, config_calib, tel_id_MAGIC): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + subarray = event_source.subarray + config_magic = config_calib["MAGIC"] + config_magic["magic_clean"].update({"find_hotpixels": False}) + extractor_type_magic = config_magic["image_extractor"].pop("type") + config_extractor_magic = {extractor_type_magic: config_magic["image_extractor"]} + calibrator_magic = CameraCalibrator( + image_extractor_type=extractor_type_magic, + config=Config(config_extractor_magic), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided parameters and the telescope type; MAGIC calibration not possible if magic_clean not provided", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_MAGIC, + config=config_magic, + calibrator=calibrator_magic, + is_lst=False, + ) + config_magic["image_extractor"]["type"]=extractor_type_magic + + +def test_calibrate_exc_2(dl0_gamma, config_calib, tel_id_LST): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + + + subarray = event_source.subarray + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id, telescope in tel_descriptions.items(): + camera_geoms[tel_id]= telescope.camera.geometry + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided parameters and the telescope type; LST calibration not possible if obs_id not provided", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_LST, + config=config_lst, + camera_geoms=camera_geoms, + calibrator=calibrator_lst, + is_lst=True, + ) + config_lst["image_extractor"]["type"]=extractor_type_lst + + +def test_calibrate_exc_3(dl0_gamma, config_calib, tel_id_LST): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + obs_id = event_source.obs_ids[0] + + subarray = event_source.subarray + + + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided parameters and the telescope type; LST calibration not possible if gamera_geoms not provided", + ): + signal_pixels, image, peak_time = calibrate( + event=event, + tel_id=tel_id_LST, + obs_id=obs_id, + config=config_lst, + calibrator=calibrator_lst, + is_lst=True, + ) + config_lst["image_extractor"]["type"]=extractor_type_lst + + +def test_calibrate_exc_4(dl0_gamma, config_calib, tel_id_MAGIC): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + subarray = event_source.subarray + tel_descriptions = subarray.tel + magic_clean={} + + for tel_id in range(len(tel_descriptions.items())): + magic_clean[tel_id]= f"camera {tel_id}" + config_magic = config_calib["MAGIC"] + config_magic["magic_clean"].update({"find_hotpixels": False}) + extractor_type_magic = config_magic["image_extractor"].pop("type") + config_extractor_magic = {extractor_type_magic: config_magic["image_extractor"]} + calibrator_magic = CameraCalibrator( + image_extractor_type=extractor_type_magic, + config=Config(config_extractor_magic), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided magic_clean parameter; MAGIC calibration not possible if magic_clean not a dictionary of MAGICClean objects", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_MAGIC, + config=config_magic, + calibrator=calibrator_magic, + magic_clean=magic_clean, + is_lst=False, + ) + config_magic["image_extractor"]["type"]=extractor_type_magic + + +def test_calibrate_exc_5(dl0_gamma, config_calib, tel_id_LST): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + obs_id = event_source.obs_ids[0] + + subarray = event_source.subarray + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id in range(len(tel_descriptions.items())): + camera_geoms[tel_id]= f"camera {tel_id}" + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided camera_geoms parameter; LST calibration not possible if camera_geoms not a dictionary of CameraGeometry objects", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_LST, + obs_id=obs_id, + config=config_lst, + camera_geoms=camera_geoms, + calibrator=calibrator_lst, + is_lst=True, + ) + config_lst["image_extractor"]["type"]=extractor_type_lst + diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index fa5fb40f..4931f171 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -6,29 +6,35 @@ RealEventInfoContainer, SimEventInfoContainer, ) -from .gadf import ( - create_event_hdu, - create_gh_cuts_hdu, - create_gti_hdu, - create_pointing_hdu, -) from .io import ( + check_input_list, format_object, get_dl2_mean, get_stereo_events, + get_stereo_events_old, load_dl2_data_file, load_irf_files, load_lst_dl1_data_file, load_magic_dl1_data_files, load_mc_dl2_data_file, load_train_data_files, + load_train_data_files_tel, save_pandas_data_in_table, + telescope_combinations, ) +from .gadf import ( + create_event_hdu, + create_gh_cuts_hdu, + create_gti_hdu, + create_pointing_hdu, +) + __all__ = [ "BaseEventInfoContainer", "RealEventInfoContainer", "SimEventInfoContainer", + "check_input_list", "create_event_hdu", "create_gh_cuts_hdu", "create_gti_hdu", @@ -36,11 +42,14 @@ "format_object", "get_dl2_mean", "get_stereo_events", + "get_stereo_events_old", "load_dl2_data_file", "load_irf_files", "load_lst_dl1_data_file", "load_magic_dl1_data_files", "load_mc_dl2_data_file", "load_train_data_files", + "load_train_data_files_tel", "save_pandas_data_in_table", + "telescope_combinations", ] diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index f51b8807..77b9664e 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -2,7 +2,6 @@ # coding: utf-8 import logging - import numpy as np from astropy import units as u from astropy.coordinates import SkyCoord @@ -10,9 +9,9 @@ from astropy.table import QTable from astropy.time import Time from magicctapipe import __version__ -from magicctapipe.io.io import TEL_COMBINATIONS from magicctapipe.utils.functions import HEIGHT_ORM, LAT_ORM, LON_ORM from pyirf.binning import split_bin_lo_hi +#from .io import telescope_combinations __all__ = [ "create_gh_cuts_hdu", @@ -125,6 +124,12 @@ def create_event_hdu( If the source name cannot be resolved and also either or both of source RA/Dec coordinate is set to None """ + TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 + } #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) mjdreff, mjdrefi = np.modf(MJDREF.mjd) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 72d49679..66f00865 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -23,33 +23,26 @@ from pyirf.utils import calculate_source_fov_offset, calculate_theta __all__ = [ + "check_input_list", "format_object", + "get_dl2_mean", "get_stereo_events", - "get_dl2_mean", + "get_stereo_events_old", + "load_dl2_data_file", + "load_irf_files", "load_lst_dl1_data_file", "load_magic_dl1_data_files", - "load_train_data_files", "load_mc_dl2_data_file", - "load_dl2_data_file", - "load_irf_files", + "load_train_data_files", + "load_train_data_files_tel", "save_pandas_data_in_table", + "telescope_combinations", ] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) -# The telescope IDs and names -TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} - -# The telescope combination types -TEL_COMBINATIONS = { - "M1_M2": [2, 3], # combo_type = 0 - "LST1_M1": [1, 2], # combo_type = 1 - "LST1_M2": [1, 3], # combo_type = 2 - "LST1_M1_M2": [1, 2, 3], # combo_type = 3 -} - # The pandas multi index to classify the events simulated by different # telescope pointing directions but have the same observation ID GROUP_INDEX_TRAIN = ["obs_id", "event_id", "true_alt", "true_az"] @@ -66,6 +59,119 @@ DEAD_TIME_LST = 7.6 * u.us DEAD_TIME_MAGIC = 26 * u.us +def check_input_list(config): + """ + This function checks if the input telescope list is organized as follows: + 1) All 4 LSTs and 2 MAGICs must be listed + 2) All 4 LSTs must come before the MAGICs + And it raises an exception in case these rules are not satisfied. + + Below we give two examples of valid lists: + i) + mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + ii) + mc_tel_ids: + LST-4: 1 + LST-2: 7 + LST-3: 9 + LST-1: 0 + MAGIC-II: 2 + MAGIC-I: 3 + + And here one example of an unvalid list: + iii) + mc_tel_ids: + LST-4: 1 + LST-1: 0 + MAGIC-II: 2 + LST-3: 9 + MAGIC-I: 3 + + Parameters + ---------- + config: dict + dictionary imported from the yaml configuration file with information about the telescope IDs. + + Returns + ------- + This function will raise an exception if the input list is not properly organized. + """ + + list_of_tel_names = list(config["mc_tel_ids"].keys()) + standard_list_of_tels = ["LST-1", "LST-2", "LST-3", "LST-4", "MAGIC-I", "MAGIC-II"] + + if len(list_of_tel_names) != 6: + raise Exception(f"Number of telescopes found in the configuration file is {len(list_of_tel_names)}. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.") + else: + for tel_name in list_of_tel_names[0:4]: + if tel_name in standard_list_of_tels[0:4]: + pass + else: + raise Exception(f"Entry '{tel_name}' not accepted as an LST. Please make sure that the first four telescopes are LSTs, e.g.: 'LST-1', 'LST-2', 'LST-3', and 'LST-4'") + + for tel_name in list_of_tel_names[4:6]: + if tel_name in standard_list_of_tels[4:6]: + pass + else: + raise Exception(f"Entry '{tel_name}' not accepted as a MAGIC. Please make sure that the last two telescopes are MAGICs, e.g.: 'MAGIC-I', and 'MAGIC-II'") + return + + +def telescope_combinations(config): + """ + Generates all possible telescope combinations without repetition. E.g.: "LST1_M1", "LST2_LST4_M2", "LST1_LST2_LST3_M1" and so on. + + Parameters + ---------- + config: dict + yaml file with information about the telescope IDs. + + Returns + ------- + TEL_NAMES: dict + Dictionary with telescope IDs and names. + TEL_COMBINATIONS: dict + Dictionary with all telescope combinations with no repetions. + """ + + + TEL_NAMES = {} + for k, v in config["mc_tel_ids"].items(): # Here we swap the dictionary keys and values just for convenience. + if v > 0: + TEL_NAMES[v] = k + + TEL_COMBINATIONS = {} + keys = list(TEL_NAMES.keys()) + + def recursive_solution(current_tel, current_comb): + + if current_tel == len(keys): # The function stops once we reach the last telescope + return + + current_comb_name = current_comb[0] + '_' + TEL_NAMES[keys[current_tel]] # Name of the combo (at this point it can even be a single telescope) + current_comb_list = current_comb[1] + [keys[current_tel]] # List of telescopes (including individual telescopes) + + if len(current_comb_list) > 1: # We save them in the new dictionary excluding the single-telescope values + TEL_COMBINATIONS[current_comb_name[1:]] = current_comb_list; + + current_comb = [current_comb_name, current_comb_list] # We save the current results in this varible to recal the function recursively ("for" loop below) + + for i in range(1, len(keys)-current_tel): + recursive_solution(current_tel+i, current_comb) + + + for key in range(len(keys)): + recursive_solution(key, ['',[]]) + + + return TEL_NAMES, TEL_COMBINATIONS + def format_object(input_object): """ @@ -92,9 +198,9 @@ def format_object(input_object): string = string.replace("'", "").replace(",", "") return string + - -def get_stereo_events( +def get_stereo_events_old( event_data, quality_cuts=None, group_index=["obs_id", "event_id"] ): """ @@ -117,7 +223,12 @@ def get_stereo_events( event_data_stereo: pandas.core.frame.DataFrame Data frame of the stereo events surviving the quality cuts """ - + TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 + } #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) event_data_stereo = event_data.copy() # Apply the quality cuts @@ -172,6 +283,92 @@ def get_stereo_events( return event_data_stereo + +def get_stereo_events( + event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"], eval_multi_combo=True +): + """ + Gets the stereo events surviving specified quality cuts. + + It also adds the telescope multiplicity `multiplicity` and + combination types `combo_type` to the output data frame. + + Parameters + ---------- + event_data: pandas.core.frame.DataFrame + Data frame of shower events + config: dict + Read from the yaml file with information about the telescope IDs. + quality_cuts: str + Quality cuts applied to the input data + group_index: list + Index to group telescope events + eval_multi_combo: bool + If True, multiplicity is recalculated, combination type is assigned to each event and the fraction of events per combination type is shown + + + Returns + ------- + event_data_stereo: pandas.core.frame.DataFrame + Data frame of the stereo events surviving the quality cuts + """ + + TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) + + event_data_stereo = event_data.copy() + + # Apply the quality cuts + if quality_cuts is not None: + event_data_stereo.query(quality_cuts, inplace=True) + + # Extract stereo events + event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() + event_data_stereo.query("multiplicity > 1", inplace=True) + if eval_multi_combo==True: + # Check the total number of events + n_events_total = len(event_data_stereo.groupby(group_index).size()) + logger.info(f"\nIn total {n_events_total} stereo events are found:") + + n_events_per_combo = {} + + # Loop over every telescope combination type + for combo_type, (tel_combo, tel_ids) in enumerate(TEL_COMBINATIONS.items()): + multiplicity = len(tel_ids) + + df_events = event_data_stereo.query( + f"(tel_id == {tel_ids}) & (multiplicity == {multiplicity})" + ).copy() + + # Here we recalculate the multiplicity and apply the cut again, + # since with the above cut the events belonging to other + # combination types are also extracted. For example, in case of + # tel_id = [1, 2], the tel 1 events of the combination [1, 3] + # and the tel 2 events of the combination [2, 3] remain in the + # data frame, whose multiplicity will be recalculated as 1 and + # so will be removed with the following cuts. + + df_events["multiplicity"] = df_events.groupby(group_index).size() + df_events.query(f"multiplicity == {multiplicity}", inplace=True) + + # Assign the combination type + event_data_stereo.loc[df_events.index, "combo_type"] = combo_type + + n_events = len(df_events.groupby(group_index).size()) + percentage = 100 * n_events / n_events_total + + key = f"{tel_combo} (type {combo_type})" + value = f"{n_events:.0f} events ({percentage:.1f}%)" + + n_events_per_combo[key] = value + + event_data_stereo = event_data_stereo.astype({"combo_type": int}) + + # Show the number of events per combination type + logger.info(format_object(n_events_per_combo)) + + return event_data_stereo + + def get_dl2_mean(event_data, weight_type="simple", group_index=["obs_id", "event_id"]): """ Gets mean DL2 parameters per shower event. @@ -373,7 +570,7 @@ def load_lst_dl1_data_file(input_file): return event_data, subarray -def load_magic_dl1_data_files(input_dir): +def load_magic_dl1_data_files(input_dir, config): """ Loads MAGIC DL1 data files for the event coincidence with LST-1. @@ -381,6 +578,8 @@ def load_magic_dl1_data_files(input_dir): ---------- input_dir: str Path to a directory where input MAGIC DL1 data files are stored + config: dict + yaml file with information about the telescope IDs. Returns ------- @@ -394,7 +593,9 @@ def load_magic_dl1_data_files(input_dir): FileNotFoundError If any DL1 data files are not found in the input directory """ - + + TEL_NAMES, _ = telescope_combinations(config) + # Find the input files file_mask = f"{input_dir}/dl1_*.h5" @@ -477,6 +678,12 @@ def load_train_data_files( If any DL1-stereo data files are not found in the input directory """ + TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 + } #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) # Find the input files file_mask = f"{input_dir}/dl1_stereo_*.h5" @@ -515,7 +722,7 @@ def load_train_data_files( if true_event_class is not None: event_data["true_event_class"] = true_event_class - event_data = get_stereo_events(event_data, group_index=GROUP_INDEX_TRAIN) + event_data = get_stereo_events_old(event_data, group_index=GROUP_INDEX_TRAIN) data_train = {} @@ -528,6 +735,92 @@ def load_train_data_files( return data_train +def load_train_data_files_tel(input_dir, config, offaxis_min=None, offaxis_max=None, true_event_class=None): + """ + Loads DL1-stereo data files and separates the shower events per + telescope combination type for training RFs. + + Parameters + ---------- + input_dir: str + Path to a directory where input DL1-stereo files are stored + config: dict + yaml file with information about the telescope IDs. + offaxis_min: str + Minimum shower off-axis angle allowed, whose format should be + acceptable by `astropy.units.quantity.Quantity` + offaxis_max: str + Maximum shower off-axis angle allowed, whose format should be + acceptable by `astropy.units.quantity.Quantity` + true_event_class: int + True event class of the input events + + + Returns + ------- + data_train: dict + Data frames of the shower events separated telescope-wise + + + Raises + ------ + FileNotFoundError + If any DL1-stereo data files are not found in the input + directory + """ + + TEL_NAMES, _ = telescope_combinations(config) + + # Find the input files + file_mask = f"{input_dir}/dl1_stereo_*.h5" + + input_files = glob.glob(file_mask) + input_files.sort() + + if len(input_files) == 0: + raise FileNotFoundError( + "Could not find any DL1-stereo data files in the input directory." + ) + + # Load the input files + logger.info("\nThe following DL1-stereo data files are found:") + + data_list = [] + + for input_file in input_files: + logger.info(input_file) + + df_events = pd.read_hdf(input_file, key="events/parameters") + data_list.append(df_events) + + event_data = pd.concat(data_list) + event_data.set_index(GROUP_INDEX_TRAIN, inplace=True) + event_data.sort_index(inplace=True) + + if offaxis_min is not None: + offaxis_min = u.Quantity(offaxis_min).to_value("deg") + event_data.query(f"off_axis >= {offaxis_min}", inplace=True) + + if offaxis_max is not None: + offaxis_max = u.Quantity(offaxis_max).to_value("deg") + event_data.query(f"off_axis <= {offaxis_max}", inplace=True) + + if true_event_class is not None: + event_data["true_event_class"] = true_event_class + + event_data = get_stereo_events(event_data, config, group_index=GROUP_INDEX_TRAIN) + + data_train = {} + + # Loop over every telescope + for tel_id in TEL_NAMES.keys(): + df_events = event_data.query(f"tel_id == {tel_id}") + + if not df_events.empty: + data_train[tel_id] = df_events + + return data_train + def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): """ @@ -563,13 +856,13 @@ def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2) ValueError If the input event type is not known """ - + # Load the input file df_events = pd.read_hdf(input_file, key="events/parameters") df_events.set_index(["obs_id", "event_id", "tel_id"], inplace=True) df_events.sort_index(inplace=True) - df_events = get_stereo_events(df_events, quality_cuts) + df_events = get_stereo_events_old(df_events, quality_cuts) logger.info(f"\nExtracting the events of the '{event_type}' type...") @@ -687,13 +980,14 @@ def load_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): ValueError If the input event type is not known """ + # Load the input file event_data = pd.read_hdf(input_file, key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - event_data = get_stereo_events(event_data, quality_cuts) + event_data = get_stereo_events_old(event_data, quality_cuts) logger.info(f"\nExtracting the events of the '{event_type}' type...") @@ -783,7 +1077,6 @@ def load_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): return event_table, on_time, deadc - def load_irf_files(input_dir_irf): """ Loads input IRF data files for the IRF interpolation and checks the @@ -863,6 +1156,7 @@ def load_irf_files(input_dir_irf): logger.info(input_file) irf_hdus = fits.open(input_file) irf_data["file_names"].append(input_file) + # Read the header header = irf_hdus["EFFECTIVE AREA"].header @@ -891,6 +1185,7 @@ def load_irf_files(input_dir_irf): irf_data["energy_bins"].append(energy_bins) irf_data["fov_offset_bins"].append(fov_offset_bins) irf_data["migration_bins"].append(migration_bins) + irf_data["file_names"] = np.array(irf_data["file_names"]) # Read additional IRF data and bins if they exist if "PSF" in irf_hdus: @@ -980,7 +1275,6 @@ def load_irf_files(input_dir_irf): irf_data["grid_points"] = np.array(irf_data["grid_points"]) irf_data["energy_dispersion"] = np.array(irf_data["energy_dispersion"]) irf_data["migration_bins"] = np.array(irf_data["migration_bins"]) - irf_data["file_names"] = np.array(irf_data["file_names"]) if "gh_cuts" in irf_data: irf_data["gh_cuts"] = np.array(irf_data["gh_cuts"]) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index e6cdfe9a..4951b93a 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -1,14 +1,17 @@ from magicctapipe.io.io import ( + check_input_list, format_object, get_dl2_mean, get_stereo_events, load_train_data_files, + load_train_data_files_tel, load_mc_dl2_data_file, load_irf_files, save_pandas_data_in_table, load_magic_dl1_data_files, load_lst_dl1_data_file, load_dl2_data_file, + telescope_combinations, ) import pytest @@ -16,6 +19,66 @@ import pandas as pd +def test_check_input_list(config_check): + """ + Test on different dictionaries + """ + + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':4, 'MAGIC-I':5, 'MAGIC-II':6}}) + + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':3, 'LST-3':0, 'LST-4':0, 'MAGIC-I':2, 'MAGIC-II':6}}) + + check_input_list({'mc_tel_ids':{'LST-2':1, 'LST-1':3, 'LST-4':0, 'LST-3':0, 'MAGIC-II':2, 'MAGIC-I':6}}) + + with pytest.raises( + Exception, + match="Number of telescopes found in the configuration file is 5. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", + ): + check_input_list(config_check) + + with pytest.raises( + Exception, + match="Number of telescopes found in the configuration file is 5. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'MAGIC-I':4, 'MAGIC-II':5}}) + + with pytest.raises( + Exception, + match="Number of telescopes found in the configuration file is 7. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':6, 'LST-5':7, 'MAGIC-I':4, 'MAGIC-II':5}}) + + with pytest.raises( + Exception, + match="Entry 'LSTT-1' not accepted as an LST. Please make sure that the first four telescopes are LSTs, e.g.: 'LST-1', 'LST-2', 'LST-3', and 'LST-4'", + ): + check_input_list({'mc_tel_ids':{'LSTT-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':6, 'MAGIC-I':4, 'MAGIC-II':5}}) + + with pytest.raises( + Exception, + match="Entry 'MAGIC-III' not accepted as a MAGIC. Please make sure that the last two telescopes are MAGICs, e.g.: 'MAGIC-I', and 'MAGIC-II'", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':6, 'MAGIC-I':4, 'MAGIC-III':5}}) + + with pytest.raises( + Exception, + match="Entry 'MAGIC-I' not accepted as an LST. Please make sure that the first four telescopes are LSTs, e.g.: 'LST-1', 'LST-2', 'LST-3', and 'LST-4'", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'MAGIC-I':4, 'LST-3':3, 'LST-4':6, 'MAGIC-II':5}}) + + +def test_telescope_combinations(config_gen, config_gen_4lst): + """ + Simple check on telescope combinations + """ + M_LST, M_LST_comb = telescope_combinations(config_gen) + LSTs, LSTs_comb = telescope_combinations(config_gen_4lst) + assert M_LST == {1: 'LST-1', 2: 'MAGIC-I', 3: 'MAGIC-II'} + assert M_LST_comb == {'LST-1_MAGIC-I': [1, 2], 'LST-1_MAGIC-I_MAGIC-II': [1, 2, 3], 'LST-1_MAGIC-II': [1, 3], 'MAGIC-I_MAGIC-II': [2, 3]} + assert LSTs == {1: 'LST-1', 3: 'LST-2', 2: 'LST-3', 5: 'LST-4'} + assert LSTs_comb == {'LST-1_LST-2': [1, 3], 'LST-1_LST-2_LST-3': [1, 3, 2], 'LST-1_LST-2_LST-3_LST-4': [1, 3, 2, 5], 'LST-1_LST-2_LST-4': [1, 3, 5], 'LST-1_LST-3': [1, 2], 'LST-1_LST-3_LST-4': [1, 2, 5], 'LST-1_LST-4': [1, 5], 'LST-2_LST-3': [3, 2], 'LST-2_LST-3_LST-4': [3, 2, 5], 'LST-2_LST-4': [3, 5], 'LST-3_LST-4': [2, 5]} + + def test_format_object(): """ Simple check on a string @@ -36,7 +99,7 @@ def test_save_pandas_data_in_table(temp_pandas, pd_test): assert df.equals(df1) -def test_get_stereo_events_mc(gamma_stereo, p_stereo): +def test_get_stereo_events_mc(gamma_stereo, p_stereo, config_gen): """ Check on stereo data reading """ @@ -52,12 +115,12 @@ def test_get_stereo_events_mc(gamma_stereo, p_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] > 1) assert np.all(data["combo_type"] >= 0) -def test_get_stereo_events_mc_cut(gamma_stereo, p_stereo): +def test_get_stereo_events_mc_cut(gamma_stereo, p_stereo, config_gen): """ Check on quality cuts """ @@ -71,7 +134,7 @@ def test_get_stereo_events_mc_cut(gamma_stereo, p_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) @@ -94,7 +157,7 @@ def test_load_train_data_files_g(gamma_stereo): """ events = load_train_data_files(str(gamma_stereo[0])) - assert list(events.keys()) == ["M1_M2", "LST1_M1", "LST1_M2", "LST1_M1_M2"] + assert list(events.keys()) == ["LST1_M1", "LST1_M2", "LST1_M1_M2"] data = events["LST1_M1"] assert np.all(data["combo_type"]) == 1 assert "off_axis" in data.columns @@ -108,7 +171,7 @@ def test_load_train_data_files_off(gamma_stereo): events = load_train_data_files( str(gamma_stereo[0]), offaxis_min="0.2 deg", offaxis_max="0.5 deg" ) - data = events["LST1_M2"] + data = events["LST1_M1"] assert np.all(data["off_axis"] >= 0.2) assert np.all(data["off_axis"] <= 0.5) @@ -124,6 +187,53 @@ def test_load_train_data_files_exc(temp_train_exc): _ = load_train_data_files(str(temp_train_exc)) +def test_load_train_data_files_tel_p(p_stereo, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(p_stereo[0]),config_gen) + assert list(events.keys()) == [1,2,3] + data = events[2] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_g(gamma_stereo, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(gamma_stereo[0]), config_gen) + assert list(events.keys()) == [1,2,3] + data = events[1] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_off(gamma_stereo, config_gen): + """ + Check off-axis cut + """ + events = load_train_data_files_tel( + str(gamma_stereo[0]), config=config_gen, offaxis_min="0.2 deg", offaxis_max="0.5 deg" + ) + data = events[1] + assert np.all(data["off_axis"] >= 0.2) + assert np.all(data["off_axis"] <= 0.5) + + +def test_load_train_data_files_tel_exc(temp_train_exc, config_gen): + """ + Check on exceptions + """ + with pytest.raises( + FileNotFoundError, + match="Could not find any DL1-stereo data files in the input directory.", + ): + _ = load_train_data_files(str(temp_train_exc), config_gen) + + def test_load_mc_dl2_data_file(p_dl2, gamma_dl2): """ Checks on default loading @@ -221,7 +331,7 @@ def test_load_irf_files(IRF): """ Check on IRF dictionaries """ - + irf, header = load_irf_files(str(IRF)) assert set(list(irf.keys())).issubset( set( @@ -296,12 +406,12 @@ def test_load_lst_dl1_data_file(dl1_lst): assert s1.all() -def test_load_magic_dl1_data_files(merge_magic): +def test_load_magic_dl1_data_files(merge_magic, config_gen): """ Check on MAGIC DL1 """ - events, _ = load_magic_dl1_data_files(str(merge_magic)) + events, _ = load_magic_dl1_data_files(str(merge_magic), config_gen) assert list(events.index.names) == ["obs_id_magic", "event_id_magic", "tel_id"] assert "event_id" not in events.columns events = events.reset_index() @@ -310,7 +420,7 @@ def test_load_magic_dl1_data_files(merge_magic): assert s1.all() -def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): +def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc, config_gen): """ Check on MAGIC DL1: exceptions (no DL1 files) """ @@ -318,10 +428,10 @@ def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): FileNotFoundError, match="Could not find any DL1 data files in the input directory.", ): - _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc)) + _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc), config_gen) -def test_get_stereo_events_data(coincidence_stereo): +def test_get_stereo_events_data(coincidence_stereo, config_gen): """ Check on stereo data reading """ @@ -330,12 +440,12 @@ def test_get_stereo_events_data(coincidence_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] > 1) assert np.all(data["combo_type"] >= 0) -def test_get_stereo_events_data_cut(coincidence_stereo): +def test_get_stereo_events_data_cut(coincidence_stereo, config_gen): """ Check on quality cuts """ @@ -344,7 +454,7 @@ def test_get_stereo_events_data_cut(coincidence_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) diff --git a/magicctapipe/io/tests/test_io_monly.py b/magicctapipe/io/tests/test_io_monly.py index c498b87c..135b782d 100644 --- a/magicctapipe/io/tests/test_io_monly.py +++ b/magicctapipe/io/tests/test_io_monly.py @@ -3,6 +3,7 @@ get_dl2_mean, get_stereo_events, load_train_data_files, + load_train_data_files_tel, load_mc_dl2_data_file, load_irf_files, save_pandas_data_in_table, @@ -36,7 +37,7 @@ def test_save_pandas_data_in_table(temp_pandas, pd_test): assert df.equals(df1) -def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly): +def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly, config_gen): """ Check on stereo data reading """ @@ -52,12 +53,12 @@ def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] == 2) - assert np.all(data["combo_type"] == 0) + assert np.all(data["combo_type"] == 3) -def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly): +def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly, config_gen): """ Check on quality cuts """ @@ -71,7 +72,7 @@ def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) @@ -124,6 +125,53 @@ def test_load_train_data_files_exc(temp_train_exc): _ = load_train_data_files(str(temp_train_exc)) +def test_load_train_data_files_tel_p(p_stereo_monly, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(p_stereo_monly[0]),config_gen) + assert list(events.keys()) == [2,3] + data = events[2] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_g(gamma_stereo_monly, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(gamma_stereo_monly[0]), config_gen) + assert list(events.keys()) == [2,3] + data = events[3] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_off(gamma_stereo_monly, config_gen): + """ + Check off-axis cut + """ + events = load_train_data_files_tel( + str(gamma_stereo_monly[0]), config=config_gen, offaxis_min="0.2 deg", offaxis_max="0.5 deg" + ) + data = events[2] + assert np.all(data["off_axis"] >= 0.2) + assert np.all(data["off_axis"] <= 0.5) + + +def test_load_train_data_files_tel_exc(temp_train_exc, config_gen): + """ + Check on exceptions + """ + with pytest.raises( + FileNotFoundError, + match="Could not find any DL1-stereo data files in the input directory.", + ): + _ = load_train_data_files(str(temp_train_exc), config_gen) + + def test_load_mc_dl2_data_file(p_dl2_monly, gamma_dl2_monly): """ Checks on default loading @@ -299,12 +347,12 @@ def test_load_lst_dl1_data_file(dl1_lst): assert s1.all() -def test_load_magic_dl1_data_files(merge_magic_monly): +def test_load_magic_dl1_data_files(merge_magic_monly, config_gen): """ Check on MAGIC DL1 """ - events, _ = load_magic_dl1_data_files(str(merge_magic_monly)) + events, _ = load_magic_dl1_data_files(str(merge_magic_monly), config_gen) assert list(events.index.names) == ["obs_id_magic", "event_id_magic", "tel_id"] assert "event_id" not in events.columns events = events.reset_index() @@ -313,7 +361,7 @@ def test_load_magic_dl1_data_files(merge_magic_monly): assert s1.all() -def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): +def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc, config_gen): """ Check on MAGIC DL1: exceptions (no DL1 files) """ @@ -321,10 +369,10 @@ def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): FileNotFoundError, match="Could not find any DL1 data files in the input directory.", ): - _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc)) + _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc), config_gen) -def test_get_stereo_events_data(stereo_monly): +def test_get_stereo_events_data(stereo_monly, config_gen): """ Check on stereo data reading """ @@ -333,12 +381,12 @@ def test_get_stereo_events_data(stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) - assert np.all(data["multiplicity"] == 2) - assert np.all(data["combo_type"] == 0) + data = get_stereo_events(event_data, config_gen) + assert np.all(data["multiplicity"] == 2) + assert np.all(data["combo_type"] == 3) -def test_get_stereo_events_data_cut(stereo_monly): +def test_get_stereo_events_data_cut(stereo_monly, config_gen): """ Check on quality cuts """ @@ -347,7 +395,7 @@ def test_get_stereo_events_data_cut(stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) diff --git a/magicctapipe/reco/estimators.py b/magicctapipe/reco/estimators.py index 5bd02a4c..97caed90 100644 --- a/magicctapipe/reco/estimators.py +++ b/magicctapipe/reco/estimators.py @@ -7,13 +7,14 @@ import numpy as np import pandas as pd import sklearn.ensemble -from magicctapipe.io.io import TEL_NAMES + __all__ = ["EnergyRegressor", "DispRegressor", "EventClassifier"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) +TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) class EnergyRegressor: @@ -60,7 +61,7 @@ def fit(self, event_data): event_data: pandas.core.frame.DataFrame Data frame of shower events """ - + self.telescope_rfs.clear() # Loop over every telescope diff --git a/magicctapipe/resources/LST_runs.txt b/magicctapipe/resources/LST_runs.txt new file mode 100644 index 00000000..53b010ff --- /dev/null +++ b/magicctapipe/resources/LST_runs.txt @@ -0,0 +1,10 @@ +2020_11_18,2923 +2020_11_18,2924 +2020_12_07,3093 +2020_12_07,3094 +2020_12_07,3095 +2020_12_07,3096 +2020_12_15,3265 +2020_12_15,3266 +2020_12_15,3267 +2020_12_15,3268 diff --git a/magicctapipe/resources/MAGIC_runs.txt b/magicctapipe/resources/MAGIC_runs.txt new file mode 100644 index 00000000..ce49672b --- /dev/null +++ b/magicctapipe/resources/MAGIC_runs.txt @@ -0,0 +1,30 @@ +2020_11_19,5093174 +2020_11_19,5093175 +2020_12_08,5093491 +2020_12_08,5093492 +2020_12_16,5093711 +2020_12_16,5093712 +2020_12_16,5093713 +2020_12_16,5093714 +2021_02_14,5094483 +2021_02_14,5094484 +2021_02_14,5094485 +2021_02_14,5094486 +2021_02_14,5094487 +2021_02_14,5094488 +2021_03_16,5095265 +2021_03_16,5095266 +2021_03_16,5095267 +2021_03_16,5095268 +2021_03_16,5095271 +2021_03_16,5095272 +2021_03_16,5095273 +2021_03_16,5095277 +2021_03_16,5095278 +2021_03_16,5095281 +2021_03_18,5095376 +2021_03_18,5095377 +2021_03_18,5095380 +2021_03_18,5095381 +2021_03_18,5095382 +2021_03_18,5095383 diff --git a/magicctapipe/resources/test_check_list.yaml b/magicctapipe/resources/test_check_list.yaml new file mode 100644 index 00000000..15b4296d --- /dev/null +++ b/magicctapipe/resources/test_check_list.yaml @@ -0,0 +1,7 @@ +mc_tel_ids: + LST-2: 1 + LST-2: 3 + LST-4: 0 + LST-3: 0 + MAGIC-II: 2 + MAGIC-I: 6 \ No newline at end of file diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/test_config.yaml similarity index 96% rename from magicctapipe/resources/config.yaml rename to magicctapipe/resources/test_config.yaml index f6067ee2..8394a031 100644 --- a/magicctapipe/resources/config.yaml +++ b/magicctapipe/resources/test_config.yaml @@ -1,9 +1,11 @@ mc_tel_ids: LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 MAGIC-I: 2 MAGIC-II: 3 - LST: image_extractor: type: "LocalPeakWindowSum" @@ -240,6 +242,8 @@ create_irf: dl2_to_dl3: interpolation_method: "nearest" # select "nearest", "linear" or "cubic" + interpolation_scheme: "cosZdAz" # select "cosZdAz" or "cosZd" + max_distance: "45. deg" # angle type Quantity, or comment out to remove the cut source_name: "Crab" source_ra: null # used when the source name cannot be resolved source_dec: null # used when the source name cannot be resolved diff --git a/magicctapipe/resources/test_config_calib.yaml b/magicctapipe/resources/test_config_calib.yaml new file mode 100644 index 00000000..22d8b395 --- /dev/null +++ b/magicctapipe/resources/test_config_calib.yaml @@ -0,0 +1,60 @@ +LST: + image_extractor: + type: "LocalPeakWindowSum" + window_shift: 4 + window_width: 8 + + increase_nsb: + use: true + extra_noise_in_dim_pixels: 1.27 + extra_bias_in_dim_pixels: 0.665 + transition_charge: 8 + extra_noise_in_bright_pixels: 2.08 + + increase_psf: + use: false + fraction: null + + tailcuts_clean: + picture_thresh: 8 + boundary_thresh: 4 + keep_isolated_pixels: false + min_number_picture_neighbors: 2 + + time_delta_cleaning: + use: true + min_number_neighbors: 1 + time_limit: 2 + + dynamic_cleaning: + use: true + threshold: 267 + fraction: 0.03 + + use_only_main_island: false + + +MAGIC: + image_extractor: + type: "SlidingWindowMaxSum" + window_width: 5 + apply_integration_correction: false + + charge_correction: + use: true + factor: 1.143 + + magic_clean: + use_time: true + use_sum: true + picture_thresh: 6 + boundary_thresh: 3.5 + max_time_off: 4.5 + max_time_diff: 1.5 + find_hotpixels: true + pedestal_type: "from_extractor_rndm" + + muon_ring: + thr_low: 25 + tailcut: [12, 8] + ring_completeness_threshold: 25 \ No newline at end of file diff --git a/magicctapipe/resources/test_config_general.yaml b/magicctapipe/resources/test_config_general.yaml new file mode 100644 index 00000000..0a15083c --- /dev/null +++ b/magicctapipe/resources/test_config_general.yaml @@ -0,0 +1,11 @@ +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +general: + focal_length : "effective" #effective #nominal + diff --git a/magicctapipe/resources/test_config_general_4LST.yaml b/magicctapipe/resources/test_config_general_4LST.yaml new file mode 100644 index 00000000..bdca6480 --- /dev/null +++ b/magicctapipe/resources/test_config_general_4LST.yaml @@ -0,0 +1,11 @@ +mc_tel_ids: + LST-1: 1 + LST-2: 3 + LST-3: 2 + LST-4: 5 + MAGIC-I: 0 + MAGIC-II: 0 + +general: + focal_length : "effective" #effective #nominal + diff --git a/magicctapipe/resources/config_monly.yaml b/magicctapipe/resources/test_config_monly.yaml similarity index 96% rename from magicctapipe/resources/config_monly.yaml rename to magicctapipe/resources/test_config_monly.yaml index 200673f2..5cc7a085 100644 --- a/magicctapipe/resources/config_monly.yaml +++ b/magicctapipe/resources/test_config_monly.yaml @@ -1,5 +1,8 @@ mc_tel_ids: LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 MAGIC-I: 2 MAGIC-II: 3 @@ -240,6 +243,8 @@ create_irf: dl2_to_dl3: interpolation_method: "nearest" # select "nearest", "linear" or "cubic" + interpolation_scheme: "cosZdAz" # select "cosZdAz" or "cosZd" + max_distance: "45. deg" # angle type Quantity, or comment out to remove the cut source_name: "Crab" source_ra: null # used when the source name cannot be resolved source_dec: null # used when the source name cannot be resolved diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index da9c695c..6259c02f 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -1,12 +1,12 @@ -# Script for MAGIC and MAGIC+LST-1 analysis +# Script for MAGIC and MAGIC+LST analysis -This folder contains scripts to perform MAGIC-only or MAGIC+LST-1 analysis. +This folder contains scripts to perform MAGIC-only or MAGIC+LST analysis. Each script can be called from the command line from anywhere in your system (some console scripts are created during installation). Please run them with `-h` option for the first time to check what are the options available. ## MAGIC-only analysis -MAGIC-only analysis starts from MAGIC calibrated data (\_Y\_ files). The analysis flow is as following: +MAGIC-only analysis starts from MAGIC-calibrated data (\_Y\_ files). The analysis flow is as follows: - `magic_calib_to_dl1.py` on real and MC data (if you use MCs produced with MMCS), to convert them into DL1 format - if you use SimTelArray MCs, run `lst1_magic_mc_dl0_to_dl1.py` over them to convert them into DL1 format @@ -17,9 +17,9 @@ MAGIC-only analysis starts from MAGIC calibrated data (\_Y\_ files). The analysi - `lst1_magic_create_irf.py` to create the IRF (use `magic_stereo` as `irf_type` in the configuration file) - `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files -## MAGIC+LST-1 analysis +## MAGIC+LST analysis: overview -MAGIC+LST-1 analysis starts from MAGIC calibrated data (\_Y\_ files), LST-1 DL1 data and SimTelArray DL0 data. The analysis flow is as following: +MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data and SimTelArray DL0 data. The analysis flow is as following: - `magic_calib_to_dl1.py` on real MAGIC data, to convert them into DL1 format - `lst1_magic_mc_dl0_to_dl1.py` over SimTelArray MCs to convert them into DL1 format diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py index d3d129ec..70d21c2f 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py @@ -29,8 +29,7 @@ from astropy.coordinates import AltAz, SkyCoord, angular_separation from ctapipe.coordinates import TelescopeFrame from ctapipe.instrument import SubarrayDescription -from magicctapipe.io import get_stereo_events, save_pandas_data_in_table -from magicctapipe.io.io import TEL_COMBINATIONS +from magicctapipe.io import get_stereo_events_old, save_pandas_data_in_table from magicctapipe.reco import DispRegressor, EnergyRegressor, EventClassifier __all__ = ["apply_rfs", "reconstruct_arrival_direction", "dl1_stereo_to_dl2"] @@ -38,7 +37,12 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) - +TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 +} #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) def apply_rfs(event_data, estimator): """ @@ -262,7 +266,7 @@ def dl1_stereo_to_dl2(input_file_dl1, input_dir_rfs, output_dir): is_simulation = "true_energy" in event_data.columns logger.info(f"\nIs simulation: {is_simulation}") - event_data = get_stereo_events(event_data) + event_data = get_stereo_events_old(event_data) subarray = SubarrayDescription.from_hdf(input_file_dl1) tel_descriptions = subarray.tel diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 8f3fbf77..62c67d56 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -2,24 +2,24 @@ # coding: utf-8 """ -This script searches for coincident events from LST-1 and MAGIC joint +This script searches for coincident events from LST and MAGIC joint observation data offline using their timestamps. It applies the -coincidence window to LST-1 events, and checks the coincidence within +coincidence window to LST events, and checks the coincidence within the time offset region specified in the configuration file. Since the optimal time offset changes depending on the telescope distance along the pointing direction, it is recommended to input one subrun file for -LST-1 data, whose observation time is usually around 10 seconds so the +LST data, whose observation time is usually around 10 seconds so the change of the distance is negligible. The MAGIC standard stereo analysis discards the events when one of the telescope images cannot survive the cleaning or fail to compute the DL1 parameters. However, it's possible to perform the stereo analysis if -LST-1 sees these events. Thus, it checks the coincidence for each -telescope combination (i.e., LST1 + M1 and LST1 + M2) and keeps the +LST sees these events. Thus, it checks the coincidence for each +telescope combination (e.g., LST1 + M1 and LST1 + M2) and keeps the MAGIC events even if they do not have their MAGIC-stereo counterparts. -The MAGIC-stereo events, observed during the LST-1 observation time -period but not coincident with any LST-1 events, are also saved in the +The MAGIC-stereo events, observed during the LST observation time +period but not coincident with any LST events, are also saved in the output file, but they are not yet used for the high level analysis. Unless there is any particular reason, please use the default half width @@ -40,12 +40,16 @@ around the possible offset found by the pre offset search. Instead of that, you can also define the offset scan range in the configuration file. -Usage: +Usage per single LST data file (indicated if you want to do tests): $ python lst1_magic_event_coincidence.py ---input-file-lst dl1/LST-1/dl1_LST-1.Run03265.0040.h5 +--input-file-lst dl1/LST/dl1_LST.Run03265.0040.h5 --input-dir-magic dl1/MAGIC (--output-dir dl1_coincidence) (--config-file config.yaml) + +Broader usage: +This script is called automatically from the script "coincident_events.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -54,21 +58,21 @@ import time from decimal import Decimal from pathlib import Path - import numpy as np import pandas as pd import yaml from astropy import units as u from ctapipe.instrument import SubarrayDescription -from magicctapipe.io import ( +from magicctapipe.io import ( get_stereo_events, load_lst_dl1_data_file, load_magic_dl1_data_files, save_pandas_data_in_table, + telescope_combinations, + check_input_list, ) -from magicctapipe.io.io import TEL_NAMES -__all__ = ["event_coincidence"] +__all__ = ["event_coincidence","telescope_positions"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -80,44 +84,86 @@ # The final digit of timestamps TIME_ACCURACY = 100 * u.ns -# The telescope positions used in a simulation -TEL_POSITIONS = { - 1: [-8.09, 77.13, 0.78] * u.m, # LST-1 - 2: [39.3, -62.55, -0.97] * u.m, # MAGIC-I - 3: [-31.21, -14.57, 0.2] * u.m, # MAGIC-II -} + +def telescope_positions(config): + """ + This function computes the telescope positions with respect to the array baricenter. + The array can have any configuration, e.g.: M1+M2+LST1+LST2, the full MAGIC+LST array, etc. + + Parameters + ---------- + config: dict + dictionary generated from an yaml file with information about the telescope IDs. + + Returns + ------- + TEL_POSITIONS: dict + Dictionary with telescope positions in the baricenter reference frame of the adopted array. + """ + + #Telescope positions in meters in a generic reference frame: + RELATIVE_POSITIONS = { + "LST-1" : [-70.930, -52.070, 43.00], + "LST-2" : [-35.270, 66.140, 32.00], + "LST-3" : [75.280 , 50.490, 28.70], + "LST-4" : [30.910 , -64.540, 32.00], + "MAGIC-I" : [-23.540, -191.750, 41.25], + "MAGIC-II" : [-94.05, -143.770, 42.42] + } + + telescopes_in_use = {} + tel_cp=config["mc_tel_ids"].copy() + for k, v in tel_cp.copy().items(): + if v <= 0: + # Here we remove the telescopes with ID (i.e. "v") <= 0 from the dictionary: + tel_cp.pop(k) + else: + # Here we check if the telescopes "k" listed in the configuration file are indeed LSTs or MAGICs: + if k in RELATIVE_POSITIONS.keys(): + telescopes_in_use[v] = RELATIVE_POSITIONS[k] + else: + raise Exception(f"Telescope {k} not allowed in analysis. The telescopes allowed are LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.") + + average_xyz=np.array([RELATIVE_POSITIONS[k] for k in tel_cp.keys()]).mean(axis=0) + TEL_POSITIONS = {} + for k, v in telescopes_in_use.items(): + TEL_POSITIONS[k] = list(np.round(np.asarray(v)-average_xyz,2)) * u.m + return TEL_POSITIONS def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): """ - Searches for coincident events from LST-1 and MAGIC joint + Searches for coincident events from LST and MAGIC joint observation data offline using their timestamps. Parameters ---------- input_file_lst: str - Path to an input LST-1 DL1 data file + Path to an input LST DL1 data file input_dir_magic: str Path to a directory where input MAGIC DL1 data files are stored output_dir: str Path to a directory where to save an output DL1 data file config: dict - Configuration for the LST-1 + MAGIC combined analysis + Configuration for the LST + MAGIC combined analysis """ config_coinc = config["event_coincidence"] - # Load the input LST-1 DL1 data file - logger.info(f"\nInput LST-1 DL1 data file: {input_file_lst}") + TEL_NAMES, _ = telescope_combinations(config) + + TEL_POSITIONS = telescope_positions(config) + # Load the input LST DL1 data file + logger.info(f"\nInput LST DL1 data file: {input_file_lst}") event_data_lst, subarray_lst = load_lst_dl1_data_file(input_file_lst) # Load the input MAGIC DL1 data files logger.info(f"\nInput MAGIC directory: {input_dir_magic}") - event_data_magic, subarray_magic = load_magic_dl1_data_files(input_dir_magic) + event_data_magic, subarray_magic = load_magic_dl1_data_files(input_dir_magic, config) - # Exclude the parameters non-common to LST-1 and MAGIC data + # Exclude the parameters non-common to LST and MAGIC data timestamp_type_lst = config_coinc["timestamp_type_lst"] logger.info(f"\nLST timestamp type: {timestamp_type_lst}") @@ -136,21 +182,21 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): window_half_width = u.Quantity(window_half_width).to("ns") window_half_width = u.Quantity(window_half_width.round(), dtype=int) - pre_offset_search = False - if "pre_offset_search" in config_coinc: + if "pre_offset_search" in config_coinc: #looking for the boolean value of pre_offset_search in the configuration file pre_offset_search = config_coinc["pre_offset_search"] if pre_offset_search: logger.info("\nPre offset search will be performed.") - n_pre_offset_search_events = config_coinc["n_pre_offset_search_events"] + n_pre_offset_search_events = config_coinc["n_pre_offset_search_events"] #n_pre_offset_search_events is the number of events used to estimate the time offset. Around 100 events may be enough else: logger.info("\noffset scan range defined in the config file will be used.") offset_start = u.Quantity(config_coinc["time_offset"]["start"]) offset_stop = u.Quantity(config_coinc["time_offset"]["stop"]) - + event_data = pd.DataFrame() features = pd.DataFrame() + profiles = pd.DataFrame(data={"time_offset": []}) # Arrange the LST timestamps. They are stored in the UNIX format in @@ -171,10 +217,11 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): tel_ids = np.unique(event_data_magic.index.get_level_values("tel_id")) for tel_id in tel_ids: + tel_name = TEL_NAMES[tel_id] df_magic = event_data_magic.query(f"tel_id == {tel_id}").copy() - # Arrange the MAGIC timestamps as same as the LST-1 timestamps + # Arrange the MAGIC timestamps as same as the LST timestamps seconds = np.array([Decimal(str(time)) for time in df_magic["time_sec"]]) nseconds = np.array([Decimal(str(time)) for time in df_magic["time_nanosec"]]) @@ -194,12 +241,11 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): ) logger.info( - f"\nExtracting the {tel_name} events taken when LST-1 observed for pre offset search..." + f"\nExtracting the {tel_name} events taken when LST observed for pre offset search..." ) time_lolim = timestamps_lst[0] - window_half_width time_uplim = timestamps_lst[-1] + window_half_width - cond_lolim = timestamps_magic >= time_lolim cond_uplim = timestamps_magic <= time_uplim @@ -299,12 +345,11 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): time_offsets = u.Quantity(time_offsets.round(), unit="ns", dtype=int) - # Extract the MAGIC events taken when LST-1 observed - logger.info(f"\nExtracting the {tel_name} events taken when LST-1 observed...") + # Extract the MAGIC events taken when LST observed + logger.info(f"\nExtracting the {tel_name} events taken when LST observed...") time_lolim = timestamps_lst[0] + time_offsets[0] - window_half_width time_uplim = timestamps_lst[-1] + time_offsets[-1] + window_half_width - cond_lolim = timestamps_magic >= time_lolim cond_uplim = timestamps_magic <= time_uplim @@ -321,7 +366,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): timestamps_magic = timestamps_magic[mask] # Start checking the event coincidence. The time offsets and the - # coincidence window are applied to the LST-1 events, and the + # coincidence window are applied to the LST events, and the # MAGIC events existing in the window, including the edges, are # recognized as the coincident events. At first, we scan the # number of coincident events in each time offset and find the @@ -349,12 +394,16 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): ) n_coincidences.append(n_coincidence) + + + if not any(n_coincidences): logger.info("\nNo coincident events are found. Skipping...") continue n_coincidences = np.array(n_coincidences) + # Sometimes there are more than one time offset maximizing the # number of coincidences, so here we calculate the mean of them @@ -392,7 +441,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): logger.info(f"--> Number of coincident events: {n_events_at_avg}") logger.info(f"--> Fraction over the {tel_name} events: {percentage:.1f}%") - # Keep only the LST-1 events coincident with the MAGIC events, + # Keep only the LST events coincident with the MAGIC events, # and assign the MAGIC observation and event IDs to them indices_lst, indices_magic = np.where(mask) @@ -406,8 +455,8 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): df_lst.reset_index(inplace=True) df_lst.set_index(["obs_id_magic", "event_id_magic", "tel_id"], inplace=True) - # Assign also the LST-1 observation and event IDs to the MAGIC - # events coincident with the LST-1 events + # Assign also the LST observation and event IDs to the MAGIC + # events coincident with the LST events obs_ids_lst = df_lst["obs_id_lst"].to_numpy() event_ids_lst = df_lst["event_id_lst"].to_numpy() @@ -452,11 +501,11 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): event_data.drop_duplicates(inplace=True) # It sometimes happen that even if it is a MAGIC-stereo event, only - # M1 or M2 event is coincident with a LST-1 event. In that case we + # M1 or M2 event is coincident with a LST event. In that case we # keep both M1 and M2 events, since they are recognized as the same # shower event by the MAGIC-stereo hardware trigger. - # We also keep the MAGIC-stereo events not coincident with any LST-1 + # We also keep the MAGIC-stereo events not coincident with any LST # events, since the stereo reconstruction is still feasible, but not # yet used for the high level analysis. @@ -474,7 +523,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - event_data = get_stereo_events(event_data) + event_data = get_stereo_events(event_data, config) event_data.reset_index(inplace=True) event_data = event_data.astype({"obs_id": int, "event_id": int}) @@ -484,7 +533,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): input_file_name = Path(input_file_lst).name - output_file_name = input_file_name.replace("LST-1", "LST-1_MAGIC") + output_file_name = input_file_name.replace("LST", "MAGIC_LST") output_file = f"{output_dir}/{output_file_name}" save_pandas_data_in_table( @@ -500,24 +549,28 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): ) # Create the subarray description with the telescope coordinates - # relative to the center of the LST-1 and MAGIC positions - tel_descriptions = { - 1: subarray_lst.tel[1], # LST-1 - 2: subarray_magic.tel[2], # MAGIC-I - 3: subarray_magic.tel[3], # MAGIC-II - } - - subarray_lst1_magic = SubarrayDescription( - "LST1-MAGIC-Array", TEL_POSITIONS, tel_descriptions + # relative to the center of the LST and MAGIC positions + tel_descriptions = {} + for k, v in TEL_NAMES.items(): + if v[:3] == "LST": + tel_descriptions[k] = subarray_lst.tel[k] + elif v[:5] == "MAGIC": + tel_descriptions[k] = subarray_magic.tel[k] + else: + raise Exception(f"{v} is not a valid telescope name (check the config file). Only MAGIC and LST telescopes can be analyzed --> Valid telescope names are LST-[1-4] and MAGIC-[I-II] ") + + subarray_lst_magic = SubarrayDescription( + "LST-MAGIC-Array", TEL_POSITIONS, tel_descriptions ) # Save the subarray description - subarray_lst1_magic.to_hdf(output_file) + subarray_lst_magic.to_hdf(output_file) logger.info(f"\nOutput file: {output_file}") def main(): + start_time = time.time() parser = argparse.ArgumentParser() @@ -528,7 +581,7 @@ def main(): dest="input_file_lst", type=str, required=True, - help="Path to an input LST-1 DL1 data file", + help="Path to an input LST DL1 data file", ) parser.add_argument( @@ -563,6 +616,9 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Check the event coincidence event_coincidence( args.input_file_lst, args.input_dir_magic, args.output_dir, config diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 41cf519e..e4e0ba00 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -2,32 +2,27 @@ # coding: utf-8 """ -This script processes LST-1 and MAGIC events of simtel MC DL0 data +This script processes LST and MAGIC events of simtel MC DL0 data (*.simtel.gz) and computes the DL1 parameters, i.e., Hillas, timing and -leakage parameters. It saves only the events that all the DL1 parameters +leakage parameters. It saves only the events where all the DL1 parameters are successfully reconstructed. -Since it cannot identify the telescopes of the input file, please assign +Since it cannot identify the telescopes from the input file, please assign the correct telescope ID to each telescope in the configuration file. -When saving data to an output file, the telescope IDs will be reset to -the following ones to match with those of real data: - -LST-1: tel_id = 1, MAGIC-I: tel_id = 2, MAGIC-II: tel_id = 3 - -In addition, the telescope coordinate will be converted to the one -relative to the center of the LST-1 and MAGIC positions (including the +The telescope coordinates will be converted to those +relative to the center of the LST and MAGIC positions (including the altitude) for the convenience of the geometrical stereo reconstruction. Usage: $ python lst1_magic_mc_dl0_to_dl1.py --input-file dl0/gamma_40deg_90deg_run1.simtel.gz (--output-dir dl1) -(--config-file config.yaml) +(--config-file config_step1.yaml) """ -import argparse -import logging +import argparse #Parser for command-line options, arguments etc +import logging #Used to manage the log file import re import time from pathlib import Path @@ -38,23 +33,17 @@ from astropy.coordinates import Angle, angular_separation from ctapipe.calib import CameraCalibrator from ctapipe.image import ( - apply_time_delta_cleaning, hillas_parameters, leakage_parameters, number_of_islands, - tailcuts_clean, timing_parameters, ) from ctapipe.instrument import SubarrayDescription from ctapipe.io import EventSource, HDF5TableWriter -from lstchain.image.cleaning import apply_dynamic_cleaning -from lstchain.image.modifier import ( - add_noise_in_pixels, - random_psf_smearer, - set_numba_seed, -) + from magicctapipe.image import MAGICClean -from magicctapipe.io import SimEventInfoContainer, format_object +from magicctapipe.image.calib import calibrate +from magicctapipe.io import SimEventInfoContainer, format_object, check_input_list from magicctapipe.utils import calculate_disp, calculate_impact from traitlets.config import Config @@ -64,11 +53,14 @@ logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) -# The CORSIKA particle types +# The CORSIKA particle types #CORSIKA simulates Cherenkov light PARTICLE_TYPES = {1: "gamma", 3: "electron", 14: "proton", 402: "helium"} -def mc_dl0_to_dl1(input_file, output_dir, config): + + + +def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): """ Processes LST-1 and MAGIC events of simtel MC DL0 data and computes the DL1 parameters. @@ -83,22 +75,18 @@ def mc_dl0_to_dl1(input_file, output_dir, config): Configuration for the LST-1 + MAGIC analysis """ - assigned_tel_ids = config["mc_tel_ids"] + assigned_tel_ids = config["mc_tel_ids"] #This variable becomes the dictionary {'LST-1': 1, 'MAGIC-I': 2, 'MAGIC-II': 3} - logger.info("\nAssigned telescope IDs:") + logger.info("\nAssigned telescope IDs:") #Here we are just adding infos to the log file logger.info(format_object(assigned_tel_ids)) - tel_id_lst1 = assigned_tel_ids["LST-1"] - tel_id_m1 = assigned_tel_ids["MAGIC-I"] - tel_id_m2 = assigned_tel_ids["MAGIC-II"] - # Load the input file logger.info(f"\nInput file: {input_file}") event_source = EventSource( input_file, - allowed_tels=list(assigned_tel_ids.values()), - focal_length_choice="effective", + allowed_tels=list(filter(lambda check_id: check_id > 0, assigned_tel_ids.values())), #Here we load the events for all telescopes with ID > 0. + focal_length_choice=focal_length, ) obs_id = event_source.obs_ids[0] @@ -135,14 +123,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info("\nLST PSF modifier:") logger.info(format_object(config_lst["increase_psf"])) - increase_nsb = config_lst["increase_nsb"].pop("use") - increase_psf = config_lst["increase_psf"].pop("use") - - if increase_nsb: - rng = np.random.default_rng(obs_id) - - if increase_psf: - set_numba_seed(obs_id) + logger.info("\nLST tailcuts cleaning:") logger.info(format_object(config_lst["tailcuts_clean"])) @@ -153,8 +134,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info("\nLST dynamic cleaning:") logger.info(format_object(config_lst["dynamic_cleaning"])) - use_time_delta_cleaning = config_lst["time_delta_cleaning"].pop("use") - use_dynamic_cleaning = config_lst["dynamic_cleaning"].pop("use") + use_only_main_island = config_lst["use_only_main_island"] logger.info(f"\nLST use only main island: {use_only_main_island}") @@ -177,8 +157,6 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info("\nMAGIC charge correction:") logger.info(format_object(config_magic["charge_correction"])) - use_charge_correction = config_magic["charge_correction"].pop("use") - if config_magic["magic_clean"]["find_hotpixels"]: logger.warning( "\nWARNING: Hot pixels do not exist in a simulation. " @@ -189,11 +167,6 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info("\nMAGIC image cleaning:") logger.info(format_object(config_magic["magic_clean"])) - magic_clean = { - tel_id_m1: MAGICClean(camera_geoms[tel_id_m1], config_magic["magic_clean"]), - tel_id_m2: MAGICClean(camera_geoms[tel_id_m2], config_magic["magic_clean"]), - } - # Prepare for saving data to an output file Path(output_dir).mkdir(exist_ok=True, parents=True) @@ -207,11 +180,31 @@ def mc_dl0_to_dl1(input_file, output_dir, config): zenith = 90 - sim_config["max_alt"].to_value("deg") azimuth = Angle(sim_config["max_az"]).wrap_at("360 deg").degree - + logger.info(np.asarray(list(assigned_tel_ids.values()))) + LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) + LSTs_in_use = np.where(LSTs_IDs > 0)[0] + 1 #Here we select which LSTs are/is in use + + if len(LSTs_in_use) == 0: + LSTs_in_use = ''.join(str(k) for k in LSTs_in_use) + elif len(LSTs_in_use) > 0: + LSTs_in_use = 'LST'+'_LST'.join(str(k) for k in LSTs_in_use) + + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) + MAGICs_in_use = np.where(MAGICs_IDs > 0)[0] + 1 #Here we select which MAGICs are/is in use + + if len(MAGICs_in_use) == 0: + MAGICs_in_use = ''.join(str(k) for k in MAGICs_in_use) + elif len(MAGICs_in_use) > 0: + MAGICs_in_use = 'MAGIC'+'_MAGIC'.join(str(k) for k in MAGICs_in_use) + magic_clean = {} + for k in MAGICs_IDs: + if k > 0: + magic_clean[k] = MAGICClean(camera_geoms[k], config_magic["magic_clean"]) + output_file = ( f"{output_dir}/dl1_{particle_type}_zd_{zenith.round(3)}deg_" - f"az_{azimuth.round(3)}deg_LST-1_MAGIC_run{obs_id}.h5" - ) + f"az_{azimuth.round(3)}deg_{LSTs_in_use}_{MAGICs_in_use}_run{obs_id}.h5" + ) #The files are saved with the names of all telescopes involved # Loop over every shower event logger.info("\nProcessing the events...") @@ -224,83 +217,21 @@ def mc_dl0_to_dl1(input_file, output_dir, config): tels_with_trigger = event.trigger.tels_with_trigger # Check if the event triggers both M1 and M2 or not - trigger_m1 = tel_id_m1 in tels_with_trigger - trigger_m2 = tel_id_m2 in tels_with_trigger - - magic_stereo = trigger_m1 and trigger_m2 + magic_stereo=(set(MAGICs_IDs).issubset(set(tels_with_trigger))) and (MAGICs_in_use=="MAGIC1_MAGIC2") #If both have trigger, then magic_stereo = True + + for tel_id in tels_with_trigger: - for tel_id in tels_with_trigger: - if tel_id == tel_id_lst1: + if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call calibrate on the LST() # Calibrate the LST-1 event - calibrator_lst._calibrate_dl0(event, tel_id) - calibrator_lst._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - - if increase_nsb: - # Add extra noise in pixels - image = add_noise_in_pixels( - rng, image, **config_lst["increase_nsb"] - ) - - if increase_psf: - # Smear the image - image = random_psf_smearer( - image=image, - fraction=config_lst["increase_psf"]["fraction"], - indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, - indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, - ) - - # Apply the image cleaning - signal_pixels = tailcuts_clean( - camera_geoms[tel_id], image, **config_lst["tailcuts_clean"] - ) - - if use_time_delta_cleaning: - signal_pixels = apply_time_delta_cleaning( - geom=camera_geoms[tel_id], - mask=signal_pixels, - arrival_times=peak_time, - **config_lst["time_delta_cleaning"], - ) - - if use_dynamic_cleaning: - signal_pixels = apply_dynamic_cleaning( - image, signal_pixels, **config_lst["dynamic_cleaning"] - ) - - if use_only_main_island: - _, island_labels = number_of_islands( - camera_geoms[tel_id], signal_pixels - ) - n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) - - # The first index means the pixels not surviving - # the cleaning, so should not be considered - n_pixels_on_island[0] = 0 - max_island_label = np.argmax(n_pixels_on_island) - signal_pixels[island_labels != max_island_label] = False - - else: + signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, obs_id=obs_id, config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, is_lst=True) + elif tel_id in MAGICs_IDs: # Calibrate the MAGIC event - calibrator_magic._calibrate_dl0(event, tel_id) - calibrator_magic._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - - if use_charge_correction: - # Scale the charges by the correction factor - image *= config_magic["charge_correction"]["factor"] - - # Apply the image cleaning - signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( - event_image=image, event_pulse_time=peak_time + signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, config=config_magic, magic_clean=magic_clean, calibrator=calibrator_magic, is_lst=False) + else: + logger.info( + f"--> Telescope ID {tel_id} not in LST list or MAGIC list. Please check if the IDs are OK in the configuration file" ) - - if not any(signal_pixels): + if not any(signal_pixels): #So: if there is no event, we skip it and go back to the loop in the next event logger.info( f"--> {event.count} event (event ID: {event.index.event_id}, " f"telescope {tel_id}) could not survive the image cleaning. " @@ -326,6 +257,15 @@ def mc_dl0_to_dl1(input_file, output_dir, config): # Parametrize the image hillas_params = hillas_parameters(camera_geom_masked, image_masked) + # + if any(np.isnan(value) for value in hillas_params.values()): + logger.info( + f"--> {event.count} event (event ID: {event.index.event_id}, " + f"telescope {tel_id}): non-valid Hillas parameters. Skipping..." + ) + continue + + timing_params = timing_parameters( camera_geom_masked, image_masked, peak_time_masked, hillas_params ) @@ -388,17 +328,10 @@ def mc_dl0_to_dl1(input_file, output_dir, config): n_islands=n_islands, magic_stereo=magic_stereo, ) - + # Reset the telescope IDs - if tel_id == tel_id_lst1: - event_info.tel_id = 1 - - elif tel_id == tel_id_m1: - event_info.tel_id = 2 - - elif tel_id == tel_id_m2: - event_info.tel_id = 3 - + event_info.tel_id = tel_id + # Save the parameters to the output file writer.write( "parameters", @@ -409,28 +342,29 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info(f"\nIn total {n_events_processed} events are processed.") # Convert the telescope coordinate to the one relative to the center - # of the LST-1 and MAGIC positions, and reset the telescope IDs + # of the LST and MAGIC positions, and reset the telescope IDs position_mean = u.Quantity(list(tel_positions.values())).mean(axis=0) - tel_positions_lst1_magic = { - 1: tel_positions[tel_id_lst1] - position_mean, # LST-1 - 2: tel_positions[tel_id_m1] - position_mean, # MAGIC-I - 3: tel_positions[tel_id_m2] - position_mean, # MAGIC-II - } - - tel_descriptions_lst1_magic = { - 1: tel_descriptions[tel_id_lst1], # LST-1 - 2: tel_descriptions[tel_id_m1], # MAGIC-I - 3: tel_descriptions[tel_id_m2], # MAGIC-II - } - - subarray_lst1_magic = SubarrayDescription( - "LST1-MAGIC-Array", tel_positions_lst1_magic, tel_descriptions_lst1_magic + tel_positions_lst_magic = {} + tel_descriptions_lst_magic = {} + IDs_in_use = np.asarray(list(assigned_tel_ids.values())) + IDs_in_use = IDs_in_use[IDs_in_use > 0] + for k in IDs_in_use: + tel_positions_lst_magic[k] = tel_positions[k] - position_mean + tel_descriptions_lst_magic[k] = tel_descriptions[k] + + + subarray_lst_magic = SubarrayDescription( + "LST-MAGIC-Array", tel_positions_lst_magic, tel_descriptions_lst_magic ) + tel_positions = subarray_lst_magic.positions + logger.info("\nTelescope positions:") + logger.info(format_object(tel_positions)) + # Save the subarray description - subarray_lst1_magic.to_hdf(output_file) - + subarray_lst_magic.to_hdf(output_file) + # Save the simulation configuration with HDF5TableWriter(output_file, group_name="simulation", mode="a") as writer: writer.write("config", sim_config) @@ -439,10 +373,15 @@ def mc_dl0_to_dl1(input_file, output_dir, config): def main(): + + """ Here we collect the input parameters from the command line, load the configuration file and run mc_dl0_to_dl1()""" + start_time = time.time() parser = argparse.ArgumentParser() - + + + #Here we are simply collecting the parameters from the command line, as input file, output directory, and configuration file parser.add_argument( "--input-file", "-i", @@ -469,14 +408,29 @@ def main(): default="./config.yaml", help="Path to a configuration file", ) + + parser.add_argument( + "--focal_length_choice", + "-f", + dest="focal_length_choice", + type=str, + choices = ["nominal", "effective"], + default="effective", + help='Choice of focal length, either "effective" or "nominal". The default (and standard) value is "effective"', + ) + + args = parser.parse_args() #Here we select all 3 parameters collected above + + with open(args.config_file, "rb") as f: # "rb" mode opens the file in binary format for reading + config = yaml.safe_load(f) #Here we collect the inputs from the configuration file - args = parser.parse_args() + # Checking if the input telescope list is properly organized: + check_input_list(config) - with open(args.config_file, "rb") as f: - config = yaml.safe_load(f) + config['mc_tel_ids']=dict(sorted(config['mc_tel_ids'].items())) #Sorting needed to correctly name the output file # Process the input data - mc_dl0_to_dl1(args.input_file, args.output_dir, config) + mc_dl0_to_dl1(args.input_file, args.output_dir, config, args.focal_length_choice) logger.info("\nDone.") @@ -485,4 +439,4 @@ def main(): if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index accec1ce..277de8d4 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -7,7 +7,7 @@ specified in the configuration file are applied to the events before the reconstruction. -When the input is real data containing LST-1 and MAGIC events, it checks +When the input is real data containing LST and MAGIC events, it checks the angular distances of their pointing directions and excludes the events taken with larger distances than the limit specified in the configuration file. This is in principle to avoid the reconstruction of @@ -44,7 +44,7 @@ ) from ctapipe.instrument import SubarrayDescription from ctapipe.reco import HillasReconstructor -from magicctapipe.io import format_object, get_stereo_events, save_pandas_data_in_table +from magicctapipe.io import format_object, get_stereo_events, save_pandas_data_in_table, check_input_list from magicctapipe.utils import calculate_impact, calculate_mean_direction __all__ = ["calculate_pointing_separation", "stereo_reconstruction"] @@ -54,39 +54,45 @@ logger.setLevel(logging.INFO) -def calculate_pointing_separation(event_data): +def calculate_pointing_separation(event_data, config): """ - Calculates the angular distance of the LST-1 and MAGIC pointing + Calculates the angular distance of the LST and MAGIC pointing directions. Parameters ---------- event_data: pandas.core.frame.DataFrame - Data frame of LST-1 and MAGIC events - + Data frame of LST and MAGIC events + config: dict + Configuration for the LST + MAGIC analysis Returns ------- theta: pandas.core.series.Series - Angular distance of the LST-1 and MAGIC pointing directions - in the unit of degree + Angular distance of the LST array and MAGIC pointing directions + in units of degree """ - - # Extract LST-1 events - df_lst = event_data.query("tel_id == 1") - - # Extract the MAGIC events seen by also LST-1 - df_magic = event_data.query("tel_id == [2, 3]") + + assigned_tel_ids = config["mc_tel_ids"] #This variable becomes a dictionary, e.g.: {'LST-1': 1, 'LST-2': 0, 'LST-3': 0, 'LST-4': 0, 'MAGIC-I': 2, 'MAGIC-II': 3} + LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) + LSTs_IDs = list(LSTs_IDs[LSTs_IDs > 0]) #Here we list only the LSTs in use + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) + MAGICs_IDs = list(MAGICs_IDs[MAGICs_IDs > 0]) #Here we list only the MAGICs in use + + # Extract LST events + df_lst = event_data.query(f"tel_id == {LSTs_IDs}") + + # Extract the coincident events observed by both MAGICs and LSTs + df_magic = event_data.query(f"tel_id == {MAGICs_IDs}") df_magic = df_magic.loc[df_lst.index] - # Calculate the mean of the M1 and M2 pointing directions - pnt_az_magic, pnt_alt_magic = calculate_mean_direction( - lon=df_magic["pointing_az"], lat=df_magic["pointing_alt"], unit="rad" - ) + # Calculate the mean of the LSTs, and also of the M1 and M2 pointing directions + pnt_az_LST, pnt_alt_LST = calculate_mean_direction(lon=df_lst["pointing_az"], lat=df_lst["pointing_alt"], unit="rad") + pnt_az_magic, pnt_alt_magic = calculate_mean_direction(lon=df_magic["pointing_az"], lat=df_magic["pointing_alt"], unit="rad") # Calculate the angular distance of their pointing directions theta = angular_separation( - lon1=u.Quantity(df_lst["pointing_az"], unit="rad"), - lat1=u.Quantity(df_lst["pointing_alt"], unit="rad"), + lon1=u.Quantity(pnt_az_LST, unit="rad"), + lat1=u.Quantity(pnt_alt_LST, unit="rad"), lon2=u.Quantity(pnt_az_magic, unit="rad"), lat2=u.Quantity(pnt_alt_magic, unit="rad"), ) @@ -108,14 +114,15 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa output_dir: str Path to a directory where to save an output DL1-stereo data file config: dict - Configuration for the LST-1 + MAGIC analysis + Configuration file for the stereo LST + MAGIC analysis, i.e. config_stereo.yaml magic_only_analysis: bool If `True`, it reconstructs the stereo parameters using only MAGIC events """ config_stereo = config["stereo_reco"] - + assigned_tel_ids = config["mc_tel_ids"] #This variable becomes a dictionary, e.g.: {'LST-1': 1, 'LST-2': 0, 'LST-3': 0, 'LST-4': 0, 'MAGIC-I': 2, 'MAGIC-II': 3} + # Load the input file logger.info(f"\nInput file: {input_file}") @@ -142,25 +149,36 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa # Apply the event cuts logger.info(f"\nMAGIC-only analysis: {magic_only_analysis}") + LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) + if magic_only_analysis: - event_data.query("tel_id > 1", inplace=True) + tel_id=np.asarray(list(assigned_tel_ids.values())[:]) + used_id=tel_id[tel_id!=0] + magic_ids=[item for item in used_id if item not in LSTs_IDs] + event_data.query(f"tel_id in {magic_ids}", inplace=True) # Here we select only the events with the MAGIC tel_ids logger.info(f"\nQuality cuts: {config_stereo['quality_cuts']}") - event_data = get_stereo_events(event_data, config_stereo["quality_cuts"]) + event_data = get_stereo_events(event_data, config=config, quality_cuts=config_stereo["quality_cuts"]) - # Check the angular distance of the LST-1 and MAGIC pointing directions + # Check the angular distance of the LST and MAGIC pointing directions tel_ids = np.unique(event_data.index.get_level_values("tel_id")).tolist() - if (not is_simulation) and (tel_ids != [2, 3]): + Number_of_LSTs_in_use = len(LSTs_IDs[LSTs_IDs > 0]) + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) + Number_of_MAGICs_in_use = len(MAGICs_IDs[MAGICs_IDs > 0]) + Two_arrays_are_used = (Number_of_LSTs_in_use*Number_of_MAGICs_in_use > 0) + + if (not is_simulation) and (Two_arrays_are_used): + logger.info( "\nChecking the angular distances of " - "the LST-1 and MAGIC pointing directions..." + "the LST and MAGIC pointing directions..." ) event_data.reset_index(level="tel_id", inplace=True) # Calculate the angular distance - theta = calculate_pointing_separation(event_data) + theta = calculate_pointing_separation(event_data, config) theta_uplim = u.Quantity(config_stereo["theta_uplim"]) mask = u.Quantity(theta, unit="deg") < theta_uplim @@ -201,6 +219,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa multi_indices = event_data.groupby(["obs_id", "event_id"]).size().index for i_evt, (obs_id, event_id) in enumerate(multi_indices): + if i_evt % 100 == 0: logger.info(f"{i_evt} events") @@ -218,6 +237,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa tel_ids = df_evt.index.get_level_values("tel_id") for tel_id in tel_ids: + df_tel = df_evt.loc[tel_id] # Assign the telescope information @@ -321,6 +341,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa def main(): + start_time = time.time() parser = argparse.ArgumentParser() @@ -363,7 +384,10 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) - + + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Process the input data stereo_reconstruction(args.input_file, args.output_dir, config, args.magic_only) @@ -374,4 +398,4 @@ def main(): if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py b/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py index 8afaaf3b..d5bc2fbf 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py @@ -41,7 +41,7 @@ import pandas as pd import yaml from magicctapipe.io import format_object, load_train_data_files -from magicctapipe.io.io import GROUP_INDEX_TRAIN, TEL_NAMES +from magicctapipe.io.io import GROUP_INDEX_TRAIN from magicctapipe.reco import DispRegressor, EnergyRegressor, EventClassifier __all__ = [ @@ -54,6 +54,8 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) +TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) + # True event class of gamma and proton MCs EVENT_CLASS_GAMMA = 0 diff --git a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py index 645c1005..ed56cd2b 100644 --- a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py @@ -7,11 +7,6 @@ Hillas, timing and leakage parameters. It saves only the events that all the DL1 parameters are successfully reconstructed. -When saving data to an output file, the telescope IDs will be reset to -the following ones for the convenience of the combined analysis with -LST-1, whose telescope ID is 1: - -MAGIC-I: tel_id = 2, MAGIC-II: tel_id = 3 When the input is real data, it searches for all the subrun files with the same observation ID and stored in the same directory as the input @@ -56,7 +51,7 @@ from ctapipe.io import HDF5TableWriter from ctapipe_io_magic import MAGICEventSource from magicctapipe.image import MAGICClean -from magicctapipe.io import RealEventInfoContainer, SimEventInfoContainer, format_object +from magicctapipe.io import RealEventInfoContainer, SimEventInfoContainer, format_object, check_input_list from magicctapipe.utils import calculate_disp, calculate_impact __all__ = ["magic_calib_to_dl1"] @@ -300,10 +295,10 @@ def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=F # Reset the telescope IDs if tel_id == 1: - event_info.tel_id = 2 # MAGIC-I + event_info.tel_id = config["mc_tel_ids"]["MAGIC-I"] # MAGIC-I elif tel_id == 2: - event_info.tel_id = 3 # MAGIC-II + event_info.tel_id = config["mc_tel_ids"]["MAGIC-II"] # MAGIC-II # Save the parameters to the output file writer.write( @@ -315,13 +310,13 @@ def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=F # Reset the telescope IDs of the subarray description tel_positions_magic = { - 2: subarray.positions[1], # MAGIC-I - 3: subarray.positions[2], # MAGIC-II + config["mc_tel_ids"]["MAGIC-I"]: subarray.positions[1], # MAGIC-I + config["mc_tel_ids"]["MAGIC-II"]: subarray.positions[2], # MAGIC-II } tel_descriptions_magic = { - 2: subarray.tel[1], # MAGIC-I - 3: subarray.tel[2], # MAGIC-II + config["mc_tel_ids"]["MAGIC-I"]: subarray.tel[1], # MAGIC-I + config["mc_tel_ids"]["MAGIC-II"]: subarray.tel[2], # MAGIC-II } subarray_magic = SubarrayDescription( @@ -392,9 +387,11 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Process the input data magic_calib_to_dl1(args.input_file, args.output_dir, config, args.max_events, args.process_run) - logger.info("\nDone.") process_time = time.time() - start_time diff --git a/magicctapipe/scripts/lst1_magic/merge_hdf_files.py b/magicctapipe/scripts/lst1_magic/merge_hdf_files.py index c11c450d..7b747935 100644 --- a/magicctapipe/scripts/lst1_magic/merge_hdf_files.py +++ b/magicctapipe/scripts/lst1_magic/merge_hdf_files.py @@ -2,7 +2,7 @@ # coding: utf-8 """ -This script merges the HDF files produced by the LST-1 + MAGIC combined +This script merges the HDF files produced by the LST + MAGIC combined analysis pipeline. It parses information from the file names, so they should follow the convention, i.e., *Run*.*.h5 or *run*.h5. @@ -13,8 +13,7 @@ If the `--run-wise` argument is given, it merges input files run-wise. It is applicable only to real data since MC data are already produced run-wise. The `--subrun-wise` argument can be also used to merge MAGIC -DL1 real data subrun-wise (for example, dl1_M1.Run05093711.001.h5 -+ dl1_M2.Run05093711.001.h5 -> dl1_MAGIC.Run05093711.001.h5). +DL1 real data subrun-wise. Usage: $ python merge_hdf_files.py @@ -22,6 +21,10 @@ (--output-dir dl1_merged) (--run-wise) (--subrun-wise) + +Broader usage: +This script is called automatically from the script "merging_runs_and_spliting_training_samples.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -35,7 +38,7 @@ import tables from ctapipe.instrument import SubarrayDescription -__all__ = ["write_data_to_table", "merge_hdf_files"] +__all__ = ["merge_hdf_files","write_data_to_table"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler())