From 6845dd2bad1d3ce3f45c228926b54178f84c7e01 Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Sun, 7 Apr 2024 01:52:42 +0300 Subject: [PATCH 01/17] update-to-version-2.11.0 --- README.rst | 8 +- docs/CHANGELOG.rst | 38 +- docs/GETTING_STARTED.rst | 38 +- docs/OPTIMIZATION.rst | 2 + docs/PUBLIC_MODELS.rst | 142 +++--- docs/images/logo.png | Bin 206098 -> 185769 bytes .../HAILO15H_classification.rst} | 313 ++++++++----- .../HAILO15H_depth_estimation.rst} | 41 +- .../HAILO15H/HAILO15H_face_attribute.rst | 57 +++ .../HAILO15H_face_detection.rst} | 67 ++- .../HAILO15H_face_recognition.rst} | 41 +- .../HAILO15H_facial_landmark_detection.rst | 57 +++ .../HAILO15H_hand_landmark_detection.rst | 52 +++ .../HAILO15H_image_denoising.rst} | 47 +- .../HAILO15H_instance_segmentation.rst} | 110 +++-- .../HAILO15H_low_light_enhancement.rst} | 41 +- .../HAILO15H_object_detection.rst} | 384 +++++++++------- .../HAILO15H/HAILO15H_person_attribute.rst | 57 +++ .../HAILO15H_person_re_id.rst} | 40 +- .../HAILO15H/HAILO15H_pose_estimation.rst | 105 +++++ .../HAILO15H_semantic_segmentation.rst} | 93 ++-- ...AILO15H_single_person_pose_estimation.rst} | 51 +- .../HAILO15H_stereo_depth_estimation.rst | 57 +++ .../HAILO15H_super_resolution.rst} | 50 +- .../HAILO15H_zero_shot_classification.rst | 57 +++ .../public_models/HAILO15H_Face_Attribute.rst | 44 -- .../HAILO15H_Facial_Landmark_Detection.rst | 44 -- .../HAILO15H_Hand_Landmark_detection.rst | 39 -- .../HAILO15H_Person_Attribute.rst | 44 -- .../HAILO15H_Pose_Estimation.rst | 66 --- .../HAILO15M_classification.rst} | 314 ++++++++----- .../HAILO15M_depth_estimation.rst} | 40 +- .../HAILO15M/HAILO15M_face_attribute.rst | 57 +++ .../HAILO15M_face_detection.rst} | 67 ++- .../HAILO15M_face_recognition.rst} | 41 +- .../HAILO15M_facial_landmark_detection.rst | 57 +++ .../HAILO15M_hand_landmark_detection.rst | 52 +++ .../HAILO15M_image_denoising.rst} | 48 +- .../HAILO15M_instance_segmentation.rst} | 109 +++-- .../HAILO15M_low_light_enhancement.rst} | 40 +- .../HAILO15M_object_detection.rst} | 384 +++++++++------- .../HAILO15M/HAILO15M_person_attribute.rst | 57 +++ .../HAILO15M/HAILO15M_person_re_id.rst | 69 +++ .../HAILO15M/HAILO15M_pose_estimation.rst | 105 +++++ .../HAILO15M_semantic_segmentation.rst} | 93 ++-- ...AILO15M_single_person_pose_estimation.rst} | 51 +- .../HAILO15M_stereo_depth_estimation.rst | 57 +++ .../HAILO15M_super_resolution.rst} | 50 +- .../HAILO15M_zero_shot_classification.rst | 57 +++ .../public_models/HAILO15M_Face_Attribute.rst | 44 -- .../HAILO15M_Facial_Landmark_Detection.rst | 44 -- .../HAILO15M_Hand_Landmark_detection.rst | 39 -- .../HAILO15M_Person_Attribute.rst | 45 -- .../HAILO15M_Pose_Estimation.rst | 66 --- .../HAILO8_classification.rst} | 272 +++++------ .../HAILO8_depth_estimation.rst} | 39 +- .../HAILO8/HAILO8_face_attribute.rst | 55 +++ .../HAILO8_face_detection.rst} | 63 +-- .../HAILO8_face_recognition.rst} | 39 +- .../HAILO8_facial_landmark_detection.rst | 55 +++ .../HAILO8/HAILO8_hand_landmark_detection.rst | 50 ++ .../HAILO8_image_denoising.rst} | 47 +- .../HAILO8_instance_segmentation.rst} | 101 ++-- .../HAILO8_low_light_enhancement.rst} | 39 +- .../HAILO8_object_detection.rst} | 435 ++++++++++-------- .../HAILO8_person_attribute.rst} | 32 +- .../HAILO8_person_re_id.rst} | 39 +- .../HAILO8/HAILO8_pose_estimation.rst | 99 ++++ .../HAILO8_semantic_segmentation.rst} | 89 ++-- .../HAILO8_single_person_pose_estimation.rst} | 50 +- .../HAILO8/HAILO8_stereo_depth_estimation.rst | 55 +++ .../HAILO8_super_resolution.rst} | 47 +- .../HAILO8_zero_shot_classification.rst} | 31 +- .../HAILO8l_classificaion.rst} | 271 +++++------ .../HAILO8l_depth_estimation.rst} | 39 +- .../HAILO8L/HAILO8l_face_attribute.rst | 55 +++ .../HAILO8l_face_detection.rst} | 63 +-- .../HAILO8l_face_recognition.rst} | 39 +- .../HAILO8l_facial_landmark_detection.rst | 55 +++ .../HAILO8l_hand_landmark_detection.rst | 50 ++ .../HAILO8l_image_denoising.rst} | 47 +- .../HAILO8l_instance_segmentation.rst} | 101 ++-- .../HAILO8l_low_light_enhancement.rst} | 39 +- .../HAILO8l_object_detection.rst} | 435 ++++++++++-------- .../HAILO8l_person_attribute.rst} | 31 +- .../HAILO8l_person_re_id.rst} | 39 +- .../HAILO8L/HAILO8l_pose_estimation.rst | 99 ++++ .../HAILO8l_semantic_segmentation.rst} | 90 ++-- ...HAILO8l_single_person_pose_estimation.rst} | 50 +- .../HAILO8l_stereo_depth_estimation.rst | 55 +++ .../HAILO8l_super_resolution.rst} | 47 +- .../HAILO8l_zero_shot_classification.rst} | 32 +- docs/public_models/HAILO8L_Face_Attribute.rst | 44 -- .../HAILO8L_Facial_Landmark_Detection.rst | 44 -- .../HAILO8L_Hand_Landmark_detection.rst | 38 -- .../public_models/HAILO8L_Pose_Estimation.rst | 66 --- docs/public_models/HAILO8_Face_Attribute.rst | 43 -- .../HAILO8_Facial_Landmark_Detection.rst | 44 -- .../HAILO8_Hand_Landmark_detection.rst | 38 -- docs/public_models/HAILO8_Pose_Estimation.rst | 66 --- .../HAILO8_Single_Person_Pose_Estimation.rst | 66 --- .../HAILO8_Stereo_Depth_Estimation.rst | 43 -- hailo_model_zoo/base_parsers.py | 170 +++++++ .../alls/generic/damoyolo_tinynasL20_T.alls | 2 +- .../alls/generic/detr_resnet_v1_18_bn.alls | 48 +- .../cfg/alls/generic/efficientdet_lite0.alls | 2 +- .../cfg/alls/generic/efficientdet_lite1.alls | 2 +- .../cfg/alls/generic/efficientdet_lite2.alls | 2 +- .../cfg/alls/generic/fast_sam_s.alls | 15 + .../cfg/alls/generic/nanodet_repvgg.alls | 1 + .../cfg/alls/generic/nanodet_repvgg_a12.alls | 3 + .../alls/generic/nanodet_repvgg_a1_640.alls | 1 + .../cfg/alls/generic/ssd_mobilenet_v1.alls | 2 +- .../generic/ssd_mobilenet_v1_no_alls.alls | 2 +- .../generic/ssd_mobilenet_v1_visdrone.alls | 2 +- .../cfg/alls/generic/ssd_mobilenet_v2.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov5m.alls | 2 +- .../cfg/alls/generic/yolov5m6_6.1.alls | 2 +- .../cfg/alls/generic/yolov5m_6.1.alls | 2 +- .../cfg/alls/generic/yolov5m_vehicles.alls | 2 +- .../alls/generic/yolov5m_vehicles_nv12.alls | 2 +- .../alls/generic/yolov5m_vehicles_yuy2.alls | 2 +- .../cfg/alls/generic/yolov5m_wo_spp.alls | 2 +- .../cfg/alls/generic/yolov5m_wo_spp_60p.alls | 2 +- .../cfg/alls/generic/yolov5m_wo_spp_yuy2.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov5s.alls | 2 +- .../cfg/alls/generic/yolov5s_c3tr.alls | 2 +- .../cfg/alls/generic/yolov5s_personface.alls | 2 +- .../alls/generic/yolov5s_personface_nv12.alls | 2 +- .../alls/generic/yolov5s_personface_rgbx.alls | 2 +- .../cfg/alls/generic/yolov5xs_wo_spp.alls | 2 +- .../generic/yolov5xs_wo_spp_nms_core.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov6n.alls | 3 +- hailo_model_zoo/cfg/alls/generic/yolov7.alls | 2 +- .../cfg/alls/generic/yolov7_tiny.alls | 2 +- .../cfg/alls/generic/yolov7e6.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov8l.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov8m.alls | 2 +- .../cfg/alls/generic/yolov8m_pose.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov8n.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov8s.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov8x.alls | 2 +- hailo_model_zoo/cfg/alls/generic/yolov9c.alls | 6 + .../cfg/alls/generic/yolox_l_leaky.alls | 2 +- .../cfg/alls/generic/yolox_s_leaky.alls | 2 +- .../cfg/alls/generic/yolox_s_wide_leaky.alls | 2 +- .../cfg/alls/generic/yolox_tiny.alls | 2 +- .../cfg/alls/hailo15h/base/resnet_v1_50.alls | 14 + .../alls/hailo15h/base/ssd_mobilenet_v1.alls | 12 + .../performance/deeplab_v3_mobilenet_v2.alls | 6 + .../performance/mobilenet_v2_1.0.alls | 4 + .../hailo15h/performance/regnetx_800mf.alls | 2 + .../performance/ssd_mobilenet_v1.alls | 8 + .../performance/yolov5m_wo_spp_60p.alls | 31 ++ .../cfg/alls/hailo8/base/nanodet_repvgg.alls | 1 + .../hailo8/base/nanodet_repvgg_a1_640.alls | 1 + .../alls/hailo8/base/ssd_mobilenet_v1.alls | 2 +- .../hailo8/base/ssd_mobilenet_v1_no_alls.alls | 2 +- .../base/ssd_mobilenet_v1_visdrone.alls | 2 +- .../cfg/alls/hailo8/base/yolov5m.alls | 2 +- .../alls/hailo8/base/yolov5m_wo_spp_yuy2.alls | 2 +- .../hailo8/base/yolov5s_personface_nv12.alls | 2 +- .../hailo8/base/yolov5xs_wo_spp_nms_core.alls | 2 +- .../cfg/alls/hailo8/base/yolov6n.alls | 4 +- .../performance/deeplab_v3_mobilenet_v2.alls | 6 + .../hailo8/performance/efficientnet_l.alls | 3 + .../hailo8/performance/efficientnet_m.alls | 3 +- .../hailo8/performance/mobilenet_v2_1.0.alls | 4 + .../alls/hailo8/performance/resnet_v1_50.alls | 10 +- .../hailo8/performance/ssd_mobilenet_v1.alls | 4 +- .../ssd_mobilenet_v1_visdrone.alls | 2 +- .../cfg/alls/hailo8/performance/stdc1.alls | 2 +- .../cfg/alls/hailo8/performance/yolov5m.alls | 2 +- .../hailo8/performance/yolov5m_vehicles.alls | 2 +- .../performance/yolov5m_vehicles_nv12.alls | 2 +- .../performance/yolov5m_vehicles_yuy2.alls | 2 +- .../hailo8/performance/yolov5m_wo_spp.alls | 2 +- .../performance/yolov5m_wo_spp_60p.alls | 2 +- .../cfg/alls/hailo8/performance/yolov5s.alls | 2 +- .../performance/yolov5s_personface.alls | 2 +- .../cfg/alls/hailo8/performance/yolov6n.alls | 4 + .../cfg/alls/hailo8/performance/yolov8n.alls | 3 +- .../cfg/alls/hailo8/performance/yolov8s.alls | 6 + .../hailo8/performance/yolox_s_leaky.alls | 2 +- hailo_model_zoo/cfg/base/base.yaml | 2 - .../cfg/base/efficientdet_lite.yaml | 1 + hailo_model_zoo/cfg/base/efficientnet.yaml | 2 +- hailo_model_zoo/cfg/base/mobilenet.yaml | 2 + hailo_model_zoo/cfg/base/nanodet.yaml | 1 + hailo_model_zoo/cfg/base/resnet.yaml | 2 +- hailo_model_zoo/cfg/base/ssd.yaml | 1 + hailo_model_zoo/cfg/base/vit.yaml | 3 + hailo_model_zoo/cfg/base/yolo.yaml | 1 + hailo_model_zoo/cfg/base/yolox.yaml | 1 + .../centernet_resnet_v1_18_postprocess.yaml | 2 +- .../centernet_resnet_v1_50_postprocess.yaml | 2 +- .../cfg/networks/detr_resnet_v1_18_bn.yaml | 4 +- hailo_model_zoo/cfg/networks/dncnn3.yaml | 3 +- .../cfg/networks/dncnn_color_blind.yaml | 3 +- .../cfg/networks/efficientdet_lite0.yaml | 4 +- .../cfg/networks/efficientdet_lite1.yaml | 4 +- .../cfg/networks/efficientdet_lite2.yaml | 4 +- .../cfg/networks/efficientnet_l.yaml | 2 +- .../cfg/networks/face_attr_resnet_v1_18.yaml | 4 +- .../networks/face_attr_resnet_v1_18_nv12.yaml | 1 + .../networks/face_attr_resnet_v1_18_rgbx.yaml | 1 + hailo_model_zoo/cfg/networks/fast_depth.yaml | 4 +- .../cfg/networks/fast_depth_nv12_fhd.yaml | 2 + hailo_model_zoo/cfg/networks/fast_sam_s.yaml | 44 ++ .../cfg/networks/hand_landmark_lite.yaml | 3 +- hailo_model_zoo/cfg/networks/hardnet39ds.yaml | 2 +- hailo_model_zoo/cfg/networks/hardnet68.yaml | 2 +- .../cfg/networks/inception_v1.yaml | 2 +- .../cfg/networks/lightface_slim_nv12_fhd.yaml | 2 +- .../cfg/networks/mobilenet_v1.yaml | 4 +- .../cfg/networks/mobilenet_v2_1.0.yaml | 2 - .../cfg/networks/mobilenet_v2_1.4.yaml | 4 +- .../cfg/networks/nanodet_repvgg.yaml | 11 +- .../cfg/networks/nanodet_repvgg_a12.yaml | 12 +- .../cfg/networks/nanodet_repvgg_a1_640.yaml | 11 +- hailo_model_zoo/cfg/networks/osnet_x1_0.yaml | 2 +- .../networks/person_attr_resnet_v1_18.yaml | 2 +- .../networks/repvgg_a0_person_reid_512.yaml | 2 +- hailo_model_zoo/cfg/networks/repvgg_a1.yaml | 2 +- hailo_model_zoo/cfg/networks/repvgg_a2.yaml | 2 +- .../cfg/networks/resmlp12_relu.yaml | 2 +- .../cfg/networks/resnet_v1_18.yaml | 2 +- .../cfg/networks/resnet_v1_34.yaml | 2 +- .../cfg/networks/resnet_v1_50.yaml | 2 +- .../cfg/networks/resnext26_32x4d.yaml | 2 +- .../cfg/networks/resnext50_32x4d.yaml | 2 +- hailo_model_zoo/cfg/networks/scdepthv3.yaml | 4 +- hailo_model_zoo/cfg/networks/scrfd_10g.yaml | 4 +- .../cfg/networks/scrfd_10g_nv12_fhd.yaml | 4 +- hailo_model_zoo/cfg/networks/scrfd_2.5g.yaml | 4 +- hailo_model_zoo/cfg/networks/scrfd_500m.yaml | 4 +- .../cfg/networks/segformer_b0_bn.yaml | 4 +- .../cfg/networks/ssd_mobilenet_v1.yaml | 3 +- .../networks/ssd_mobilenet_v1_visdrone.yaml | 3 +- .../cfg/networks/ssd_mobilenet_v2.yaml | 3 +- hailo_model_zoo/cfg/networks/stereonet.yaml | 4 +- hailo_model_zoo/cfg/networks/tiny_yolov3.yaml | 2 +- hailo_model_zoo/cfg/networks/vit_base_bn.yaml | 2 +- .../cfg/networks/vit_pose_small.yaml | 2 +- hailo_model_zoo/cfg/networks/yolov5m.yaml | 3 +- .../cfg/networks/yolov5m6_6.1.yaml | 5 +- hailo_model_zoo/cfg/networks/yolov5m_6.1.yaml | 3 +- .../cfg/networks/yolov5m_vehicles.yaml | 3 +- .../cfg/networks/yolov5m_vehicles_nv12.yaml | 3 +- .../cfg/networks/yolov5m_vehicles_yuy2.yaml | 3 +- .../cfg/networks/yolov5m_wo_spp.yaml | 3 +- .../cfg/networks/yolov5m_wo_spp_60p.yaml | 4 +- .../cfg/networks/yolov5m_wo_spp_yuy2.yaml | 2 +- hailo_model_zoo/cfg/networks/yolov5s.yaml | 3 +- .../cfg/networks/yolov5s_c3tr.yaml | 3 +- .../cfg/networks/yolov5s_personface.yaml | 3 +- .../networks/yolov5s_personface_nv12_fhd.yaml | 2 +- .../cfg/networks/yolov5xs_wo_spp.yaml | 3 +- .../networks/yolov5xs_wo_spp_nms_core.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov6n.yaml | 2 + hailo_model_zoo/cfg/networks/yolov7.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov7_tiny.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov7e6.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov8l.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov8m.yaml | 3 +- .../cfg/networks/yolov8m_pose.yaml | 2 +- hailo_model_zoo/cfg/networks/yolov8n.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov8s.yaml | 3 +- .../cfg/networks/yolov8s_pose.yaml | 2 +- hailo_model_zoo/cfg/networks/yolov8x.yaml | 3 +- hailo_model_zoo/cfg/networks/yolov9c.yaml | 32 ++ .../cfg/networks/yolox_l_leaky.yaml | 4 +- .../cfg/networks/yolox_s_leaky.yaml | 4 +- .../cfg/networks/yolox_s_wide_leaky.yaml | 4 +- hailo_model_zoo/cfg/networks/yolox_tiny.yaml | 4 +- ..._division_nms_for_AP_test_threshold_0.json | 18 + .../efficientdet_lite0_nms_config.json | 146 ++++++ .../efficientdet_lite1_nms_config.json | 146 ++++++ .../efficientdet_lite2_nms_config.json | 146 ++++++ ...bilenet_ssd_hd_nms_postprocess_config.json | 136 ++++++ .../mobilenet_ssd_nms_postprocess_config.json | 133 ++++++ ...t_ssd_nms_visdrone_postprocess_config.json | 132 ++++++ ...bilenet_v2_ssd_nms_postprocess_config.json | 133 ++++++ .../nanodet_nms_config.json | 30 ++ .../nanodet_repvgg_a1_640_nms_config.json | 30 ++ .../nms_config_nanodet_repvgg_a12.json | 34 ++ .../nms_config_yolov6n.json | 34 ++ .../nms_config_yolox_l_leaky.json | 34 ++ .../nms_config_yolox_s_leaky.json | 34 ++ .../nms_config_yolox_s_wide_leaky.json | 34 ++ .../nms_config_yolox_tiny.json | 34 ++ ...tion_only_yolov5s_vehicles_nms_config.json | 58 +++ .../yolov5l_nms_config.json | 55 +++ .../yolov5m6_leaky_nms_config.json | 73 +++ .../yolov5m6_nms_config.json | 73 +++ .../yolov5m_6.1_nms_config.json | 58 +++ .../yolov5m_nms_config.json | 55 +++ .../yolov5m_spp_nms_config.json | 58 +++ .../yolov5m_vehicles_nms_config.json | 55 +++ .../yolov5n6_6.1_leaky_nms_config.json | 73 +++ .../yolov5n6_nms_config.json | 73 +++ .../yolov5n_6.1_nms_config.json | 58 +++ .../yolov5n_seg_nms_config.json | 66 +++ .../yolov5s6_leaky_nms_config.json | 73 +++ .../yolov5s6_nms_config.json | 73 +++ .../yolov5s_6.0_leaky_focus_nms_config.json | 58 +++ .../yolov5s_6.1_nms_config.json | 58 +++ .../yolov5s_c3tr_nms_config.json | 58 +++ .../yolov5s_nms_config.json | 58 +++ .../yolov5s_personface.json | 58 +++ .../yolov5s_vehicles_nms_config.json | 58 +++ .../yolov5x_nms_config.json | 58 +++ .../yolov5xs_nms_config.json | 58 +++ .../yolov5xs_wo_spp_nms_config.json | 56 +++ .../yolov5xs_wo_spp_nms_config_10class.json | 56 +++ .../yolov5xs_wo_spp_nms_config_1class.json | 56 +++ .../yolov5xs_wo_spp_nms_config_20class.json | 56 +++ .../yolov5xs_wo_spp_nms_config_5class.json | 56 +++ .../postprocess_config/yolov7_nms_config.json | 58 +++ .../yolov7_tiny_nms_config.json | 58 +++ .../yolov7e6_nms_config.json | 73 +++ .../yolov8l_nms_config.json | 33 ++ .../yolov8m_nms_config.json | 33 ++ .../yolov8n_nms_config.json | 32 ++ ...yolov8s_bbox_decoding_only_nms_config.json | 33 ++ .../yolov8s_nms_config.json | 33 ++ .../yolov8x_nms_config.json | 33 ++ .../postprocess_config/yolox_hailo_nms.json | 30 ++ .../yolox_hailo_pas_nms_4cls_pas20k.json | 30 ++ .../core/datasets/dataset_factory.py | 65 +-- .../core/datasets/parse_300w_lp_tddfa.py | 3 + .../core/datasets/parse_aflw2k3d.py | 3 + .../core/datasets/parse_aflw2k3d_tddfa.py | 3 + hailo_model_zoo/core/datasets/parse_afw.py | 3 + hailo_model_zoo/core/datasets/parse_bsd100.py | 3 + hailo_model_zoo/core/datasets/parse_bsd68.py | 4 + hailo_model_zoo/core/datasets/parse_cifar.py | 3 + hailo_model_zoo/core/datasets/parse_coco.py | 41 +- hailo_model_zoo/core/datasets/parse_div2k.py | 3 + .../core/datasets/parse_facenet.py | 3 + .../core/datasets/parse_gustavosta_prompts.py | 4 + .../core/datasets/parse_imagenet.py | 3 + .../core/datasets/parse_kitti_3d.py | 2 + .../core/datasets/parse_kitti_depth.py | 2 + .../core/datasets/parse_kitti_stereo.py | 3 + .../core/datasets/parse_landmarks.py | 5 + hailo_model_zoo/core/datasets/parse_lol.py | 3 + hailo_model_zoo/core/datasets/parse_lp_ocr.py | 3 + hailo_model_zoo/core/datasets/parse_market.py | 3 + hailo_model_zoo/core/datasets/parse_mot.py | 3 + .../core/datasets/parse_nyu_depth_v2.py | 2 + hailo_model_zoo/core/datasets/parse_pascal.py | 3 + hailo_model_zoo/core/datasets/parse_peta.py | 4 + .../core/datasets/parse_tusimple.py | 3 + .../core/datasets/parse_utkfaces.py | 3 + .../core/datasets/parse_widerface.py | 3 + .../core/eval/age_gender_evaluation.py | 2 + .../core/eval/classification_evaluation.py | 6 +- .../core/eval/depth_estimation_evaluation.py | 6 +- .../core/eval/detection_3d_evaluation.py | 2 + .../core/eval/detection_evaluation.py | 4 +- hailo_model_zoo/core/eval/eval_factory.py | 76 +-- .../core/eval/face_detection_evaluation.py | 5 +- .../core/eval/face_landmark_evaluation.py | 6 +- .../core/eval/face_verification_evaluation.py | 5 +- .../core/eval/faster_rcnn_evaluation.py | 6 +- .../eval/head_pose_estimation_evaluation.py | 5 +- .../core/eval/image_denoising_evaluation.py | 5 +- .../image_generation_from_text_evaluation.py | 9 +- .../eval/instance_segmentation_evaluation.py | 17 +- .../core/eval/lane_detection_evaluation.py | 12 +- .../eval/low_light_enhancement_evaluation.py | 6 +- .../multiple_object_tracking_evaluation.py | 3 + hailo_model_zoo/core/eval/ocr_evaluation.py | 6 +- .../core/eval/person_attr_evaluation.py | 6 +- .../core/eval/person_reid_evaluation.py | 5 +- .../core/eval/pose_estimation_evaluation.py | 5 +- .../core/eval/segmentation_evaluation.py | 5 +- ...ingle_person_pose_estimation_evaluation.py | 5 +- hailo_model_zoo/core/eval/srgan_evaluation.py | 5 +- .../core/eval/stereo_evaluation.py | 4 + .../core/eval/super_resolution_evaluation.py | 5 +- hailo_model_zoo/core/factory/__init__.py | 8 + hailo_model_zoo/core/infer/infer_factory.py | 46 +- hailo_model_zoo/core/infer/infer_utils.py | 54 ++- hailo_model_zoo/core/infer/model_infer.py | 10 + .../core/infer/model_infer_lite.py | 52 ++- hailo_model_zoo/core/infer/runner_infer.py | 2 + .../core/infer/sd2_unet_model_infer.py | 3 + hailo_model_zoo/core/infer/tf_infer.py | 2 + .../core/infer/tf_infer_second_stage.py | 2 + hailo_model_zoo/core/info_utils.py | 8 +- hailo_model_zoo/core/main_utils.py | 123 +++-- .../age_gender_postprocessing.py | 8 +- .../centerpose_postprocessing.py | 14 +- .../classification_postprocessing.py | 16 +- .../depth_estimation_postprocessing.py | 7 +- .../postprocessing/detection/centernet.py | 6 +- .../core/postprocessing/detection/nanodet.py | 31 +- .../postprocessing/detection/ssd_mlperf_tf.py | 8 +- .../core/postprocessing/detection/yolo.py | 38 +- .../detection_3d_postprocessing.py | 8 +- .../detection_postprocessing.py | 14 +- .../face_attr_postprocessing.py | 3 + .../face_detection_postprocessing.py | 3 +- .../face_landmarks_3d_postprocessing.py | 5 + .../postprocessing/facenet_postprocessing.py | 10 +- .../head_pose_estimation_postprocessing.py | 11 +- .../image_denoising_postprocessing.py | 8 +- .../instance_segmentation_postprocessing.py | 10 +- .../landmarks_postprocessing.py | 8 +- .../lane_detection/polylanenet.py | 4 +- .../lane_detection_postprocessing.py | 8 +- .../low_light_enhancement_postprocessing.py | 6 +- .../postprocessing/mspn_postprocessing.py | 6 +- ...multiple_object_tracking_postprocessing.py | 12 +- .../core/postprocessing/ocr_postprocessing.py | 3 + .../person_reid_postprocessing.py | 3 + .../pose_estimation_postprocessing.py | 12 +- .../postprocessing/postprocessing_factory.py | 146 +----- .../segmentation_postprocessing.py | 9 +- .../stable_diffusion_v2_postprocessing.py | 11 +- .../stereonet_postprocessing.py | 6 +- .../super_resolution_postprocessing.py | 9 +- .../postprocessing/vit_pose_postprocessing.py | 8 +- .../preprocessing/centerpose_preprocessing.py | 11 +- .../classification_preprocessing.py | 15 +- .../depth_estimation_preprocessing.py | 6 + .../preprocessing/detection_preprocessing.py | 15 + .../face_landmarks_preprocessing.py | 4 + .../image_denoising_preprocessing.py | 3 + .../lane_detection_preprocessing.py | 4 + .../low_light_enhancement_preprocessing.py | 3 + .../core/preprocessing/mspn_preprocessing.py | 22 +- .../person_reid_preprocessing.py | 3 + .../core/preprocessing/pose_preprocessing.py | 9 +- .../preprocessing/preprocessing_factory.py | 84 +--- .../segmentation_preprocessing.py | 18 +- .../stable_diffusion_v2_preprocessing.py | 4 + .../preprocessing/stereonet_preprocessing.py | 4 + .../super_resolution_preprocessing.py | 5 + hailo_model_zoo/main.py | 210 +++------ hailo_model_zoo/main_driver.py | 89 ++-- hailo_model_zoo/multi_main.py | 5 +- hailo_model_zoo/utils/completions.py | 51 ++ hailo_model_zoo/utils/constants.py | 1 - hailo_model_zoo/utils/data.py | 19 +- hailo_model_zoo/utils/factory_utils.py | 32 ++ hailo_model_zoo/utils/hw_utils.py | 2 - hailo_model_zoo/utils/logger.py | 3 + hailo_model_zoo/utils/numpy_utils.py | 10 +- hailo_model_zoo/utils/parse_utils.py | 4 +- .../docs/TRAINING_GUIDE.rst | 50 +- .../license_plate_recognition/README.rst | 2 +- .../docs/TRAINING_GUIDE.rst | 52 +-- hailo_models/personface_detection/README.rst | 2 +- .../docs/TRAINING_GUIDE.rst | 58 ++- hailo_models/reid/docs/TRAINING_GUIDE.rst | 48 +- .../vehicle_detection/docs/TRAINING_GUIDE.rst | 58 ++- setup.py | 14 +- training/arcface/README.rst | 46 +- training/centerpose/README.rst | 48 +- training/damoyolo/README.rst | 50 +- training/fcn/README.rst | 46 +- training/fcn_hailo/Dockerfile | 30 ++ training/fcn_hailo/README.rst | 141 ++++++ training/mspn/README.rst | 52 +-- training/nanodet/README.rst | 49 +- training/vit/README.rst | 47 +- training/yolact/README.rst | 48 +- training/yolov3/README.rst | 45 +- training/yolov4/README.rst | 48 +- training/yolov5/README.rst | 56 ++- training/yolov8/README.rst | 53 +-- training/yolov8_seg/README.rst | 52 +-- training/yolox/README.rst | 49 +- training/yolox_hailo/Dockerfile | 34 ++ training/yolox_hailo/GUIDE.md | 69 +++ training/yolox_hailo/README.rst | 113 +++++ 479 files changed, 10579 insertions(+), 4691 deletions(-) rename docs/public_models/{HAILO15H_Classification.rst => HAILO15H/HAILO15H_classification.rst} (57%) rename docs/public_models/{HAILO8L_Depth_Estimation.rst => HAILO15H/HAILO15H_depth_estimation.rst} (53%) create mode 100644 docs/public_models/HAILO15H/HAILO15H_face_attribute.rst rename docs/public_models/{HAILO15H_Face_Detection.rst => HAILO15H/HAILO15H_face_detection.rst} (59%) rename docs/public_models/{HAILO8_Face_Recognition.rst => HAILO15H/HAILO15H_face_recognition.rst} (51%) create mode 100644 docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst create mode 100644 docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst rename docs/public_models/{HAILO8L_Image_Denoising.rst => HAILO15H/HAILO15H_image_denoising.rst} (57%) rename docs/public_models/{HAILO8L_Instance_Segmentation.rst => HAILO15H/HAILO15H_instance_segmentation.rst} (64%) rename docs/public_models/{HAILO8_Low_Light_Enhancement.rst => HAILO15H/HAILO15H_low_light_enhancement.rst} (53%) rename docs/public_models/{HAILO8_Object_Detection.rst => HAILO15H/HAILO15H_object_detection.rst} (73%) create mode 100644 docs/public_models/HAILO15H/HAILO15H_person_attribute.rst rename docs/public_models/{HAILO15M_Person_Re_ID.rst => HAILO15H/HAILO15H_person_re_id.rst} (53%) create mode 100644 docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst rename docs/public_models/{HAILO15H_Semantic_Segmentation.rst => HAILO15H/HAILO15H_semantic_segmentation.rst} (58%) rename docs/public_models/{HAILO15M_Single_Person_Pose_Estimation.rst => HAILO15H/HAILO15H_single_person_pose_estimation.rst} (55%) create mode 100644 docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst rename docs/public_models/{HAILO8L_Super_Resolution.rst => HAILO15H/HAILO15H_super_resolution.rst} (57%) create mode 100644 docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst delete mode 100644 docs/public_models/HAILO15H_Face_Attribute.rst delete mode 100644 docs/public_models/HAILO15H_Facial_Landmark_Detection.rst delete mode 100644 docs/public_models/HAILO15H_Hand_Landmark_detection.rst delete mode 100644 docs/public_models/HAILO15H_Person_Attribute.rst delete mode 100644 docs/public_models/HAILO15H_Pose_Estimation.rst rename docs/public_models/{HAILO15M_Classification.rst => HAILO15M/HAILO15M_classification.rst} (57%) rename docs/public_models/{HAILO15M_Depth_Estimation.rst => HAILO15M/HAILO15M_depth_estimation.rst} (53%) create mode 100644 docs/public_models/HAILO15M/HAILO15M_face_attribute.rst rename docs/public_models/{HAILO15M_Face_Detection.rst => HAILO15M/HAILO15M_face_detection.rst} (59%) rename docs/public_models/{HAILO8L_Face_Recognition.rst => HAILO15M/HAILO15M_face_recognition.rst} (51%) create mode 100644 docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst create mode 100644 docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst rename docs/public_models/{HAILO8_Image_Denoising.rst => HAILO15M/HAILO15M_image_denoising.rst} (57%) rename docs/public_models/{HAILO8_Instance_Segmentation.rst => HAILO15M/HAILO15M_instance_segmentation.rst} (64%) rename docs/public_models/{HAILO15H_Low_Light_Enhancement.rst => HAILO15M/HAILO15M_low_light_enhancement.rst} (53%) rename docs/public_models/{HAILO8L_Object_Detection.rst => HAILO15M/HAILO15M_object_detection.rst} (73%) create mode 100644 docs/public_models/HAILO15M/HAILO15M_person_attribute.rst create mode 100644 docs/public_models/HAILO15M/HAILO15M_person_re_id.rst create mode 100644 docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst rename docs/public_models/{HAILO15M_Semantic_Segmentation.rst => HAILO15M/HAILO15M_semantic_segmentation.rst} (58%) rename docs/public_models/{HAILO15H_Single_Person_Pose_Estimation.rst => HAILO15M/HAILO15M_single_person_pose_estimation.rst} (55%) create mode 100644 docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst rename docs/public_models/{HAILO8_Super_Resolution.rst => HAILO15M/HAILO15M_super_resolution.rst} (57%) create mode 100644 docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst delete mode 100644 docs/public_models/HAILO15M_Face_Attribute.rst delete mode 100644 docs/public_models/HAILO15M_Facial_Landmark_Detection.rst delete mode 100644 docs/public_models/HAILO15M_Hand_Landmark_detection.rst delete mode 100644 docs/public_models/HAILO15M_Person_Attribute.rst delete mode 100644 docs/public_models/HAILO15M_Pose_Estimation.rst rename docs/public_models/{HAILO8_Classification.rst => HAILO8/HAILO8_classification.rst} (72%) rename docs/public_models/{HAILO15H_Depth_Estimation.rst => HAILO8/HAILO8_depth_estimation.rst} (55%) create mode 100644 docs/public_models/HAILO8/HAILO8_face_attribute.rst rename docs/public_models/{HAILO8_Face_Detection.rst => HAILO8/HAILO8_face_detection.rst} (63%) rename docs/public_models/{HAILO15H_Face_Recognition.rst => HAILO8/HAILO8_face_recognition.rst} (56%) create mode 100644 docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst create mode 100644 docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst rename docs/public_models/{HAILO15H_Image_Denoising.rst => HAILO8/HAILO8_image_denoising.rst} (58%) rename docs/public_models/{HAILO15H_Instance_Segmentation.rst => HAILO8/HAILO8_instance_segmentation.rst} (67%) rename docs/public_models/{HAILO15M_Low_Light_Enhancement.rst => HAILO8/HAILO8_low_light_enhancement.rst} (54%) rename docs/public_models/{HAILO15H_Object_Detection.rst => HAILO8/HAILO8_object_detection.rst} (66%) rename docs/public_models/{HAILO8_Person_Attribute.rst => HAILO8/HAILO8_person_attribute.rst} (50%) rename docs/public_models/{HAILO8_Person_Re_ID.rst => HAILO8/HAILO8_person_re_id.rst} (55%) create mode 100644 docs/public_models/HAILO8/HAILO8_pose_estimation.rst rename docs/public_models/{HAILO8_Semantic_Segmentation.rst => HAILO8/HAILO8_semantic_segmentation.rst} (67%) rename docs/public_models/{HAILO8L_Person_Re_ID.rst => HAILO8/HAILO8_single_person_pose_estimation.rst} (56%) create mode 100644 docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst rename docs/public_models/{HAILO15H_Super_Resolution.rst => HAILO8/HAILO8_super_resolution.rst} (59%) rename docs/public_models/{HAILO8_Zero_shot_Classification.rst => HAILO8/HAILO8_zero_shot_classification.rst} (50%) rename docs/public_models/{HAILO8L_Classification.rst => HAILO8L/HAILO8l_classificaion.rst} (72%) rename docs/public_models/{HAILO8_Depth_Estimation.rst => HAILO8L/HAILO8l_depth_estimation.rst} (55%) create mode 100644 docs/public_models/HAILO8L/HAILO8l_face_attribute.rst rename docs/public_models/{HAILO8L_Face_Detection.rst => HAILO8L/HAILO8l_face_detection.rst} (63%) rename docs/public_models/{HAILO15M_Face_Recognition.rst => HAILO8L/HAILO8l_face_recognition.rst} (56%) create mode 100644 docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst create mode 100644 docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst rename docs/public_models/{HAILO15M_Image_Denoising.rst => HAILO8L/HAILO8l_image_denoising.rst} (58%) rename docs/public_models/{HAILO15M_Instance_Segmentation.rst => HAILO8L/HAILO8l_instance_segmentation.rst} (67%) rename docs/public_models/{HAILO8L_Low_Light_Enhancement.rst => HAILO8L/HAILO8l_low_light_enhancement.rst} (54%) rename docs/public_models/{HAILO15M_Object_Detection.rst => HAILO8L/HAILO8l_object_detection.rst} (66%) rename docs/public_models/{HAILO8L_Person_Attribute.rst => HAILO8L/HAILO8l_person_attribute.rst} (50%) rename docs/public_models/{HAILO15H_Person_Re_ID.rst => HAILO8L/HAILO8l_person_re_id.rst} (54%) create mode 100644 docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst rename docs/public_models/{HAILO8L_Semantic_Segmentation.rst => HAILO8L/HAILO8l_semantic_segmentation.rst} (60%) rename docs/public_models/{HAILO8L_Single_Person_Pose_Estimation.rst => HAILO8L/HAILO8l_single_person_pose_estimation.rst} (56%) create mode 100644 docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst rename docs/public_models/{HAILO15M_Super_Resolution.rst => HAILO8L/HAILO8l_super_resolution.rst} (58%) rename docs/public_models/{HAILO8L_Zero_shot_Classification.rst => HAILO8L/HAILO8l_zero_shot_classification.rst} (50%) delete mode 100644 docs/public_models/HAILO8L_Face_Attribute.rst delete mode 100644 docs/public_models/HAILO8L_Facial_Landmark_Detection.rst delete mode 100644 docs/public_models/HAILO8L_Hand_Landmark_detection.rst delete mode 100644 docs/public_models/HAILO8L_Pose_Estimation.rst delete mode 100644 docs/public_models/HAILO8_Face_Attribute.rst delete mode 100644 docs/public_models/HAILO8_Facial_Landmark_Detection.rst delete mode 100644 docs/public_models/HAILO8_Hand_Landmark_detection.rst delete mode 100644 docs/public_models/HAILO8_Pose_Estimation.rst delete mode 100644 docs/public_models/HAILO8_Single_Person_Pose_Estimation.rst delete mode 100644 docs/public_models/HAILO8_Stereo_Depth_Estimation.rst create mode 100644 hailo_model_zoo/base_parsers.py create mode 100644 hailo_model_zoo/cfg/alls/generic/fast_sam_s.alls create mode 100644 hailo_model_zoo/cfg/alls/generic/yolov9c.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/base/resnet_v1_50.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/base/ssd_mobilenet_v1.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/performance/deeplab_v3_mobilenet_v2.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/performance/mobilenet_v2_1.0.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/performance/regnetx_800mf.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/performance/ssd_mobilenet_v1.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo15h/performance/yolov5m_wo_spp_60p.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo8/performance/deeplab_v3_mobilenet_v2.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_l.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo8/performance/mobilenet_v2_1.0.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo8/performance/yolov6n.alls create mode 100644 hailo_model_zoo/cfg/alls/hailo8/performance/yolov8s.alls create mode 100644 hailo_model_zoo/cfg/networks/fast_sam_s.yaml create mode 100644 hailo_model_zoo/cfg/networks/yolov9c.yaml create mode 100644 hailo_model_zoo/cfg/postprocess_config/centernet_res18_with_division_nms_for_AP_test_threshold_0.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/efficientdet_lite0_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/efficientdet_lite1_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/efficientdet_lite2_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_hd_nms_postprocess_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_postprocess_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/mobilenet_v2_ssd_nms_postprocess_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nanodet_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nanodet_repvgg_a1_640_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nms_config_nanodet_repvgg_a12.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nms_config_yolov6n.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_l_leaky.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_leaky.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_wide_leaky.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_tiny.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/validation_only_yolov5s_vehicles_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5l_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5m6_leaky_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5m6_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5m_6.1_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5m_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5m_spp_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5m_vehicles_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5n6_6.1_leaky_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5n6_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5n_6.1_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5n_seg_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s6_leaky_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s6_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s_6.0_leaky_focus_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s_6.1_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s_c3tr_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s_personface.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5s_vehicles_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5x_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5xs_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_10class.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_1class.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_20class.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_5class.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov7_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov7_tiny_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov7e6_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov8l_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov8m_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov8n_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov8s_bbox_decoding_only_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov8s_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolov8x_nms_config.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolox_hailo_nms.json create mode 100644 hailo_model_zoo/cfg/postprocess_config/yolox_hailo_pas_nms_4cls_pas20k.json create mode 100644 hailo_model_zoo/core/factory/__init__.py create mode 100644 hailo_model_zoo/utils/completions.py create mode 100644 hailo_model_zoo/utils/factory_utils.py create mode 100644 training/fcn_hailo/Dockerfile create mode 100644 training/fcn_hailo/README.rst create mode 100644 training/yolox_hailo/Dockerfile create mode 100644 training/yolox_hailo/GUIDE.md create mode 100644 training/yolox_hailo/README.rst diff --git a/README.rst b/README.rst index e088b74f..cd2ec700 100644 --- a/README.rst +++ b/README.rst @@ -57,13 +57,13 @@ The models are divided to: * Public models - which were trained on publicly available datasets. - * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-15M - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-15M - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ diff --git a/docs/CHANGELOG.rst b/docs/CHANGELOG.rst index b06d8471..e879ac29 100644 --- a/docs/CHANGELOG.rst +++ b/docs/CHANGELOG.rst @@ -1,6 +1,34 @@ Changelog ========= +**v2.11** + +* Update to use Dataflow Compiler v3.27.0 (`developer-zone `_) +* Update to use HailoRT 4.17.0 (`developer-zone `_) + +* New Models: + + * FastSAM-s - Zero-shot Instance Segmentation + * Yolov9c - Latest Object Detection model of the YOLO family + +* Using HailoRT-pp for postprocessing of the following variants: + + * nanodet + + Postprocessing JSON configurations are now part of the cfg directory. + +* Introduced new flags for hailomz CLI: + + * ``--start-node-names`` and ``--end-node-names`` for customizing parsing behavior. + * ``--classes`` for adjusting the number of classes in post-processing configuration. + + The ``--performance`` flag, previously utilized for compiling models with their enhanced model script if available, now offers an additional functionality. + In instances where a model lacks an optimized model script, this flag triggers the compiler's Performance Mode to achieve the best performance + + These flags simplify the process of compiling models generated from our retrain dockers. + +* Bug fixes + **v2.10** * Update to use Dataflow Compiler v3.26.0 (`developer-zone `_) @@ -10,7 +38,7 @@ Changelog * yolov8 -* Porfiler change: +* Profiler change: * Removal of ``--mode`` flag from ``hailomz profile`` command, which generates a report according to provided HAR state. @@ -128,7 +156,7 @@ Changelog * yolov5m_seg * yolov5l_seg -* New object detecion variants for high resolution images: +* New object detection variants for high resolution images: * yolov7e6 * yolov5n6_6.1 @@ -215,9 +243,9 @@ Changelog * ReID training docker for the Hailo model repvgg_a0_person_reid_512/2048 -**NOTE:** Ubuntu 18.04 will be deprecated in Hailo Model Zoo future version +**NOTE:**\ Ubuntu 18.04 will be deprecated in Hailo Model Zoo future version -**NOTE:** Python 3.6 will be deprecated in Hailo Model Zoo future version +**NOTE:**\ Python 3.6 will be deprecated in Hailo Model Zoo future version **v2.1** @@ -270,7 +298,7 @@ Changelog * unet_mobilenet_v2 * Support Oxford-IIIT Pet Dataset -* New mutli-network example: detection_pose_estimation which combines the following networks: +* New multi-network example: detection_pose_estimation which combines the following networks: * yolov5m_wo_spp_60p * centerpose_repvgg_a0 diff --git a/docs/GETTING_STARTED.rst b/docs/GETTING_STARTED.rst index 10ad7b8a..299374fe 100644 --- a/docs/GETTING_STARTED.rst +++ b/docs/GETTING_STARTED.rst @@ -9,8 +9,8 @@ System Requirements * Ubuntu 20.04/22.04, 64 bit (supported also on Windows, under WSL2) * Python 3.8/3.9/3.10, including ``pip`` and ``virtualenv`` -* Hailo Dataflow Compiler v3.26.0 (Obtain from `hailo.ai `_\ ) -* HailoRT 4.16.0 (Obtain from `hailo.ai `_\ ) - required only for inference on Hailo-8. +* Hailo Dataflow Compiler v3.27.0 (Obtain from `hailo.ai `_\ ) +* HailoRT 4.17.0 (Obtain from `hailo.ai `_\ ) - required only for inference on Hailo-8. * The Hailo Model Zoo supports Hailo-8 connected via PCIe only. * Nvidia’s Pascal/Turing/Ampere GPU architecture (such as Titan X Pascal, GTX 1080 Ti, RTX 2080 Ti, or RTX A4000) * GPU driver version 525 @@ -24,8 +24,8 @@ Install Instructions Hailo Software Suite ^^^^^^^^^^^^^^^^^^^^ -The model requires the corresponding Dataflow Compiler version, and the optional HailoRT version. Therefore it is recommended to use the -`Hailo Software Suite `_, that includes all of Hailo's SW components and insures compatibility +The model requires the corresponding Dataflow Compiler version, and the optional HailoRT version. Therefore it is recommended to use the +`Hailo Software Suite `_, that includes all of Hailo's SW components and insures compatibility across products versions. The Hailo Software Suite is composed of the Dataflow Compiler, HailoRT, TAPPAS and the Model Zoo (:ref:`see diagram below `). @@ -116,6 +116,9 @@ The following scheme shows high-level view of the model-zoo evaluation process, By default, each stage executes all of its previously necessary stages according to the above diagram. The post-parsing stages also have an option to start from the product of previous stages (i.e., the Hailo Archive (HAR) file), as explained below. The operations are configured through a YAML file that exist for each model in the cfg folder. For a description of the YAML structure please see `YAML `_. +**NOTE:**\ Hailo Model Zoo provides the following functionality for Model Zoo models only. If you wish to use your custom model, use the Dataflow Compiler directly. + + Parsing ------- @@ -131,6 +134,12 @@ The pre-trained models are stored on AWS S3 and will be downloaded automatically hailomz parse --hw-arch hailo15h +* To customize the parsing behavior, use ``--start-node-names`` and\or ``--end-node-names`` flags: + +.. code-block:: + + hailomz parse --start-node-names --end-node-names + Optimization ------------ @@ -169,7 +178,7 @@ To add input conversion to the model, use the input conversion flag: hailomz optimize --input-conversion nv12_to_rgb -Do not use the flag if an input conversion already exist in the alls or in the YAML. +* Do not use the flag if an input conversion already exist in the alls or in the YAML. To add input resize to the model, use the resize flag: @@ -177,22 +186,25 @@ To add input resize to the model, use the resize flag: hailomz optimize --resize 1080 1920 -Do not use the flag if resize already exist in the alls or in the YAML. - -Profiling ---------- +* Do not use the flag if resize already exist in the alls or in the YAML. -To generate the model profiler report: +To adjust the number of classes in post-processing configuration, use classes flag: .. code-block:: - hailomz profile + hailomz optimize --classes 80 -To generate the model profiler report using a previously generated HAR file: +* Use this flag only if post-process exists in the alls or in the YAML. + +Profiling +--------- + +To generate the model profiler report: .. code-block:: - hailomz profile --har /path/to/model.har + hailomz parse + hailo profiler path/to/model.har * When profiling a Quantized HAR file (the result of the optimization process), the report contains information about your model and accuracy. diff --git a/docs/OPTIMIZATION.rst b/docs/OPTIMIZATION.rst index cf93e710..bd8e8219 100644 --- a/docs/OPTIMIZATION.rst +++ b/docs/OPTIMIZATION.rst @@ -36,6 +36,8 @@ Both steps may degrade the model accuracy, therefore, evaluation is needed to ve .. figure:: images/quant_flow.svg +**NOTE:**\ Hailo Model Zoo provides the following functionality for Model Zoo models only. If you wish to use your custom model, use the Dataflow Compiler directly. + #. First step includes full precision validation. This step is important to make sure parsing was successful and we built the pre/post processing and evaluation of the model correctly. In the Hailo Model Zoo, we can execute the following which will infer a specific model in full precision to verify that the accuracy is correct (this will be our baseline for measuring degradation): diff --git a/docs/PUBLIC_MODELS.rst b/docs/PUBLIC_MODELS.rst index fc850264..58d32f56 100644 --- a/docs/PUBLIC_MODELS.rst +++ b/docs/PUBLIC_MODELS.rst @@ -10,97 +10,97 @@ Hailo provides different pre-trained models in ONNX / TF formats and pre-compile - Hailo-15H - Hailo-15M * - Classification - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Object Detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Semantic Segmentation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Pose Estimation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Single Person Pose Estimation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Face Detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Instance Segmentation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Depth Estimation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Facial Landmark Detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Person Re-ID - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Super Resolution - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Face Recognition - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Person Attribute - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Face Attribute - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Zero-shot Classification - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ - NA - NA * - Stereo Depth Estimation - - `Link `_ + - `Link `_ - NA - NA - NA * - Low Light Enhancement - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Image Denoising - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Hand Landmark detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ diff --git a/docs/images/logo.png b/docs/images/logo.png index 85a20fa043c5950ceaf048dc31708a7d3c8d3ba9..b8d4b34b395b52a1454cb3d73d02503dc13353b2 100644 GIT binary patch literal 185769 zcmeEvXFycD*61FpN)r)Kx}adA47~`bG!YS`cZ8V%kuoqdRB2XJniN4qKv6+KK|s2I zl_Cl%y$J|N@4dh5p@>KCz2}_!zW4ojCpvqltgNi8tgIv}*&)H#RNpS_NrT;Y#iX2eUPw_u&A_< zh%{1)3n?ZoBqS{+0q~ICMf{-eF3DOM6o>DUuK7u<0UYco5TvWH6dUV;;Qmn;#Qu$S zAwjI+KnRnpR?F+*#H$j&m>P?$A zZQew^c{4RV)fOsxMmlP0I>sFg42%p6JLsv2ZuPYi_~R2nLq$bHOS6@hcI$RpTH5XK zgLeB$7WzMfVEH9PzX_6q?vf(7Arg87DLrDj1ypj&@>_&FppqOxbbw&kkwOR(GI9#a zO`Eq+0Ri|q5do2qt|e}T5M&5a5;78UN{UV7q|_oHk)D)n*ES^iky927+)n3(DYl2) zOFPQ5n^EhPrHCl*!e<4_J)zno?V~8X7*l-uMP9`)9kkNx`vh_3FS_3_V;Ps;R5^Ey zEAxdvxcvO)+v?7Vhneqcx+e9kT&_eUWEIzbpOR3~w|4c5Ow2B+>z<~ENJs#+WJH=M zDagf$Bq z%xEQDaRTPsItl-p0=^$4EH^^bq{K4mA$e%*;n}6gYUE&P(t>N9)gJroT|rw7N^VD) z1~28OpIL^ob#?QjC#zDdMR)_ENk_)|6Cy1fGCrNYS3jjU;NfNd*L@l#q+o!w&KnQ*E{7b9F1-+bs~fO-bG4r z)^b_H6KBmc)33R^p;}N+UB1Z$ry8dXQ#Tq9)_)(we)l-BWK=l(R8OAwN%q8m3OYS5 z!x-BSHEA@&d}-1B;9&lsr9NfOt)`=?Po($00O#kTq*AuH1ZZh^Y^gu4+CxIS*YbXh z!efWSP7lY}N(N=@8tgHX`Bkn})8`n)?pM!MWsEvGHux(u3G7U((GBhk$s5+FJ=YrP zqMNd0RWx$_B7a)wj&BX$skvUvAXOO#nU|plu3xq0m6oAi_UZ8FL#O%%OhwwAFfS3mjL{HX`mru>(oFEYdX)-oEo z?w@qwJS!R^_c;X4p~oIryYs3@%t-|&HjCUn!Hf)D1P`BykQD?*Hr&(!7nkT;-8K3< zeE>z8Rk`zwFe$3sRAY2ADqdYmP0ea|(6Op&N>qKE%b&&`KJZTM_PBM>232(Cxm+TN zMIL2Q&D6Zt{+2$&Osl7nix%?M^Rl{I*_A|x`I<}gT941~t7@9M?Q?We02yp1?4c`U zE%hom9g+X7Xw=f@)of$py-5)VC4%O{`RmjAn46-x`OyM2Yfh8!ghjo-b*eNHK`_EYdE zF?F6or91Uyv$1?-HN?awXU{45eQh&3b-SMbyhs z>ipN%X~|{iy_9uAclLHGX}|0pZ%vF`Udb2@_c=@rE0^`Em-Iyv=5fhSw2n#CbL=?A zn)5WGz^EuyAFHuFE4wbNM}%>RBMOyl)%u`YXCN4Fi|r|}k4@~=;jDQk&|f=wBEZ^M zA*N6GLi>a-tE`4YU0PAj`5=DiWJwG5HF#fiEF+t7 z=6J47$z|u|vlvWms}p;@iRWmYXo6l|EIu#Dt0Cp|ljh!f>X)Rp%EJ{?*}eOdc53nl z`hUviLZcXVz2T%5iA<(2wsG@j0Jo;o8T)4#Wm)^+esA||hY-y%<}t!sRx+4oLu8@yau zL5=>Lhxf;xrO>(lhZYk-4?MhTBdePOOp;=PBNgsqAHT~wcMxwk;gHmqR9cezWRZQM zZlTL3-t=Na)l+Z%V-x(@4=f2U?3%?t>-so2=kg`8^1CQpuAOV}vGroks3~H(zRpL$*`)MYBN1)4>nda)E;OxQ`Z&B zoj)_L%Ph}cG|fJF@NSLm<4M-JU`?-~PeL5cmj;%5pKG6k8eUipo~KI6_JbNUS_i(8T^lN($b@cXioSydl-1b#JY zcWl+JZ`G)tRy3Zu}Z$_D2 z6iBRQ=hVXu-HYNVpnm+pKFA5*{?w=2;Ct{)}Ou5lJ~M3)v1kg%?;125Gxp@&3&4F z5c^Q$QS%9Zc{SIJFU!!S#F~?H`KR|3+1^VHWi>8u?52P2)N7xNk6VV$a%wE_B_zl_ zLC-g1A7`fB`_gXJQB*x)@P1M7OM}DvW$4zV&jbBsh^BYX?q%pUyGqr$z?+m}O$w@b zOg?{RrQH3Uy#Y%5gdGBduQT4oi+!<`6{K?7* z7w;$w=Kwn+>9FX_#!1ClPdslYom*5eD2u<-s!-clsrx){8Bz^)EvlNcWvI<>8FnqW z7gXI>J6K=hv#T)1yS0vB6AM&);C$z=Xy=%Hh+Sp=Q?kd7u6> z`?#fhF3?;TGhT)a8~WHuIa%@@?ms$(-{;cu@Xfh`T0OQ(rMe9Jz|913-SW(!amLo)_{RJWc9{a@ z8a1{`t<-saUCxSwmqN8uDw9*a&If)dp^3<>#J8y}3Tjvn*I}OwAFrIDS~OSAtIEHt zNxk&>Ugo^lkZf~tk*)EmY?mg^?4<(}{p`EL`?Now&76g-z8I>p^fhlnd@uI6nLSaQ zs-C|LDL5=c_gvEqJ4#vkG%$Xyj9d}#L*p5)UFRIyE7bJ zizcMRGN*JGYO%fY>fR#sN8I=)&Wl&}OfB-amIk{F_dQ5dpzDh?Oe?OqrxbI5GAV7sx$&TSFSQlKSG7J1zREh`)L6S#R)yd$cC|G^6 zA&ekW);%h0V95&pvyqTP4AwS;GtQRg&W;vn0>)h30w*lGl4lPw&sNgYNX`eARfhzu zjMPF}%@!JV|Q$LJ^-=zyq|!d5b9;;>jNZ4ANL zQN!{q3M5iPnh*}cf`3*J7o-hgAOhqJIl`!*EspEZAVfrS)Q;e=N)ffCvyC0W#({_u zq;DodE2tZ)t#CsIMnS?49zSZlZ9|PU90@j92Ruk;h9MOk2@Y$O0F|`Ftp~KM@#^aV z1)PJzkAMT=N8p5|9ezFKq&2~1J)mT7r??&feEfm>D9YA)l@Tjkg0xhQDgbI=iw2~H z=HfzQoh=Vz+u$J4so)%bi92fdE8I~WT1VG`pvRU~!5T1PizZL?ShTQMcD)go*J+ z;JF4PMvQ?ae>H}Pa|VPVxMwZ@){*wW$-hln{%_KVyns;?6&NuDo8Nwfgoq);3O~qT zs$sfeNh9)tY!fkNwGWdV2GQWR%5z27N9^CQemwkB8V5*$h~}}C_F;j8q~_qU!WB3O z!KOEg!@$D`Swl8J_nZOR#}YCp>L3dU4Pr1Lqz+jCEhP+zLi`XCK(BUI;QQe=#K9QX zje6@48vxehR;vYv9h~i84GJ@d!V>H3fX4qpGoT1?(XfC)aCcglSrGn%%tJ@l`^E|x zTI-@utSklI&IX0S>)D-vJ1^p=a>$8kAbK14Q4#~HiW|zJw8mkb9e;|Vz~XGIZPxX& zk~WN8gBS;J&{#MVuqqe_496Fp>C*aLzusDw+b~e^) zoT6K){IRuYP;*K&#>&E(sIoV^U~q)rfa|SA{|ruLX{~^@!{RnDmwpBRQI+)skOex4 zb%1%jnSgZ!Lq8s~Q73M(1IqK4SZYh6vi&8V3N-PVKY|l`_+GdmFt!76mLDPdSNb4q zXkQWGmA(x_Kr|WP0e6nO;3o&XZ4g8_4B{i99l$L8kJwnDF|Cp`+FEDpmZC4l5d2?-&&B!Ev;Tv$>{N7AQw0gCL$;#E+{O=IZJy;3^`3#aRm?rKF?;g@gr#h4}#lKi=JeVByB^ zfah7mpoqbva5h9+HW!Q$*lC>!vfO}LVl~jHb*zrS>OsVbMhRlr~ugXJz?`VO;IH25Rxpi>P zKY&hZ0-*vl18x9ZjU~NieZ-Ei>a(+fHKnwjg#*wG zZv3mZT5h@D?8qJxgq5+K4aiCKtsoKHfEq{^kTpdN&c+3UR>ooNxrmBV+U7UXCm|^$ zCN69t&M$(IMDdGa#6|g~q|ieAmO^485)zgoQj$WTxmWT7SzDJrSbEVwRQ^i-6=VM2 zlD8ECKs){mc@qTOSgH0g8$3|S?yG#&#yI`pFj3=6TcBWxmF31;0Pet51H-^?StEiJ z7ZO2>TJZ}Zg)R9-CDFqC7UIAsC5%R)F;c?f65>*8`CY+|B2g8I@C$)%C$578hqOhc zMZ^z)W81JQS}6vPwIa9zaXexTI`NvQ{d>i+k`Z+VNCpPXGz7Wfs;)@DD$+k)A#o84 z3zUT=ieF4bR1#7jqG?KSjAXpjYQ{>wWjXVBen=by%K$ zW#8(#{XfAz%{4vq%ZC7Kjq_)u;cqc-!$|ut#SFcoSJr6R*e2^`a{baeziA4%wf;;s3{gL?s35^VY9|nehck5xH+Ue@w(-D#?hIyd(ym}k5*HE@5F$F5 z&2a1)S#C)&u|GsQJJnZvKDiK7X&nx#0P>EVqUu#(@j=u4Mro8J?MH z6Tq` zN=S&Up#*C)M=ah3o&&Cq{~Krr6OENB3W!L6g_8h4@^jZ;tK~0@IEu9+4hw4le})Zz z%=6cn0P4EN1Yx3gBNHTq{>TI|NfBWwE=46hr4t&O>Pjbd{u)DmG$%hy_J8DzJI=-e zeGFrX!?^NmVXU3)EO2WD!h*EU9wHpq3gFKGu%P@V=U--$C{a?tK{mjxjVuzE`Xh_r zSuSzf0ypBHlN0sd)N`FT#E!huli-dFcigo;wi?v514{>3me%^+#+iqlg1d#ohI!lS z1Wp}ve6Z;LHNg<<{bOB!jX!D!%nuu)|Da!BvVNjtl{Rfx3j*ql3Vgcax74l7CpIqk zK}Hp6;&F_X?xDDHc7SUI6BiK`ak{+Jv>Z4Fx4_?bSFGV5cni4;4wo2rm)=H-CwDCo=PwXiJ!v z8)7$%cfSxwK~_wNOI=n}h|5q`M2JgG7UcUk90Vudz{s%y_e!h$ z!u=Iai~TEBi>@ZDEGbo_*LW@Rhxk=yZz%8I*F{nU)CGL)Xef&m19kboR})EL0bur$ zl3H29sz|R34N_`-Y$Z1TT1}9vWA0iF|BBcs$Zn{I=>Kv(M1QW|U#kZ!T1ZK_IfP)@ z`oGs6|4Oisk}Kq{v3WG-QLDvD5MT{aYsMlB4uxA6% zN?98eq6*H!9lzSYh|wz=khsVv%FkcZfC{oOhvB~abJzW)KmHGE!#{M;zt6}&Ks zMr{B#Tg<_-P+9ea67h@x^|2E+4p&5DMOXF)%dZqO1VlJ!}RaW`HjoEKqny zb!}z%%P|P-%y7Y5G_ZfwfpZ2^wJRTBRPi6v*sXlp_v&5rxnM@FHlic@F%S zfZxuQ;0VXV$JMu6+QNPk_*;wZI6xrq?|}W*EB=E-m=*tF*pIe%0ADtcz%)Ce?a{FR zE%0A;afZKmAUy&6S6pl`uE1Xn{JZU(?QMV`{v>C+J;nljg+T@%h2KrUpw0ll5b#su zbhN-HRb+?2B{$U88~m0V`~>jn0f2=Ru#Uhm0uJMI?M3b50%ry!fr-b}4nrXDYk~!| z1r7~PD%d+(IJiU5N}b_c(6(Q;zlhN*kAIuE0q);<;kI2#A3sHGGX&FzA8|i&{)ofI zgHLE z{MVih7y3=fu|?dAf|*2?V=>EH*5jDZRwHHif& zi1l_mu~`qC3j#-mIJ;Nm9{52z@%ht(Bp3uq@HWKnfglAf9WInJ&SeFJ41SS=kLKvW zd7_;V2gC#I1s`S!LlTe-bQn^Cjzb!d4rB5 zkV7aTP9U@phKSP$YlH*B8R3bzi10^*B5ot@As!>r5V?p~h%&@yL<6E7(Tf;E%#o0i zP?KyYVJG1w5hRf$IYOdFqC;XzVol;i;z4qWB#7h|Nj%9Dk}Q%}B;_PEByA-9B$MDG zC@NAW(p{wdq>`ixq#C3~q*kOjQg71hq!Fa?q$#9%q$Q+Pq-~@Fq_bocWDI0nWc*|@ zWXH&K$xviYWZq-}WVgv4k!6v+C95K9CmSJKBBvo|BR@bcO|D9AKyFR$N`8erf;@pd zll&cd4S5&&6a@tZ69pfI1jR85LyEH$=O_XwVklB6UQ&Ff=%ARSq@>(Qd4N)uQiIZh zl0bQdGK%sEWfA2U$}Y;8O;npWHwkZ2-ek1NVbg_85u1`W6>j>nse9A>=B=CeY?j`v zu^GMDeRJ^U2b=RYS8ndwJimp03*Q#mEjnAyZaKf@)|Qkl@3u5=8KKRojRXf!z^)~AL)C$xl)XvmF)QQwDsT-)rX=rG8X%5pE(GX~YXdctNq4`QP zL(4!bK&wiNruCtXrp==LLOVpanQjl=5jrzE54s4t47y6Xfvuaj@@`ewYO&ROYxLIK zt#w-`=^5ye^cwW`^a1oq^kwut+bFm3Zd2NZ-gar*{cUfybuf@J@GvMapcpPPJYaao z(6yanJKuKI?Ps_9Z-2VIV*3c=HbzlKJw`XiXvPA@HYQRgUM5v0JEmZ!bf#LSIc9d| zBg`1)Ys@LkUzn$Nu1+s~%Oc8={n+XuEub`EwGc1QLo_BZSU984UCInHu~a};v) zax!qraoTW(a~5&-aWQfo;j-hp#r2kJWEb16W4oMp-P={MYo42rTbKI+cPe)a&lVm@ z9xI-3p4U7hyE%8O@AllCyt{GF<~@>o&g_ZYQ?h58mycJU*N-=ww})>BpDLdRUov0w zUYfn~d!6>i@2%cPu}^ZJ?Y@|OpZ1gN7u$bkfAs#!10)B;57-=tIq>-)`9Z0J4hQ28 z*6~yGAK`c5Pv&nIU=lbXa6uqjU`TMcppjsRV2R+8keHC2&;y|+2&E~8GadCnIxH>LwtuYhaMd2kmZ)Oki93{CdVar zTJEk~oBS?$3;B5YZ-;pfqYoz>?mn{b$k`)Ljtm_YI*L1*d30JqTER=LoKtyujQp6|v5;eRs_d#L)yJwM$0d$?9e=Avt){JZ zTkYG4{U>lI^3+MxkE@5NH=o>d(&1#b2Be{?5vI|i$*1Y0nSYAnl;)}EQ{T0aTAo@Z z+6>y}+DY0|I!ASab((bd>N@Mb)}zxi(R-{nt*@vbs^4ZHXy9c~ZpdP2W0+$^X{2YA zU^Hp0WPH=O%S6oNib<_0pQ)Q^nHh_jomqi7ow3dB2oS5!{~5yFGdyPo{C5}DLKW^@>&E5KYD)+MDdE+DQQ`~pC|NVj9gOZ1m4<9G6B={#R zCORaxJ<@vg_OZm{$4P8S!O0}a&dI$`OrCsts`xY~g+Jw9DpRWeGw7M~v%a*`X|?HU z>8~>+Gg6-Kd44C8A@h0`BFjB%B>PNuTaIDQ=iKADuk&Q`GV=xU6JP9l5naGo5L~#W z@KWJ&kw?+wOUIW3udH8nyf%N`^v2*#_1jZ#E8eNSD=k(oep7O!q^MN3^hKF;S?+tu z_u1v*Qmn5L!S%3$bWfNrC3#5ty*1PbF$`ht!{07 zok?9=J-WW9!LDJn(WP;|$+wxj`FhLNmWZ!xU*lW%wWhX-w-vN2wtx7h{jI43)zROH z?_B7*^quB=L^o&m;~wFj7rn~8pZbjZy80da=LfC~(huGl+B@`o`0((D5rdJgQQYYA zSitzs@x%$yiPw{wlV7Lor{<@x&+MFeG%GnHgPK{3F*p7VR&)?9Pv8CmCFKu|0G$7Ci%&c{_IwB0#Go9laTzJ z_-8`PufcVOB-Rv%NfF@wzm@9@-y%-KGad?3#D;kfDH(i+AqfQ~@eV_9Rp7={Q2M6L zWFT$JRxl~rL`F(ZK~4$);cFrg^duk%Zb00+jcf~2cn8-mCgz<&nvM*TQiqRli%Xnl zWH~CbT})c^uI~*VdQdC^K}rrr?9E%K>1Z|s%A_8C37^jEO8>coonl^e!ifPAxhZ8=T~w|>U4W( z{1;SMmCkdqn=X%bj;3qlFQUVFt%_9=_Zs-#eXWRj*!_4L1g=0NC8gLzK}`urAR`4K zdNODi`Bvn%BMf1p;S``??k^%$VnX0X#_i7!YdVG;)#AYus+}oMTjDYpYsBxRT}Vg0 z*?sBq&CI(apF`snw3%LAy#L_so`>(4eQzl0JR3!?+;oTl7d(RcLNBR4z-zI8+&>)n zpWpzR@^Ztq_+HPkwDD&&*>bYQ^5RTQrx|ZfEJ<{pub_=OsPWdj+CF+D$R6RunK?4H zuu#w!w$(cmT%y!h82Cy~-00bh0K5Jh2-g?J(hHsimmtDLGppG^CddDZT4B@IgSSRg zE~koc3PLT|Z(Y|o%Fgrl@N_5h(-zCeHJ+HQ;}T$)^4?`JT^l)}UQ;ALefg93{HrHH zvqe?PTfI&#LmD@WSl_)k@qEDDpuU8QRBTaSu0TRFF6=6)wLrj=xbWJB!vBgy3bh>5 z3=Xk$UXAF(X3P}i`3Ypvck?B9BoS^L$0r-EE<+~rpB&Vi$9idR7cdXHaDL-E`?^Qo ztF2bWv$gmBCb>t;P~!X6z;9o9ja&{ru|wTJL|Y}t)JEU8E`N*2PAq<3yHnjRzYW}~ zkT>u8zR$092{z*VMTa=-!`7AfZqx@h;sCqEo@RlDV3xj*V|=Pm!>;)Db6KZV^ITKw zCQV+?3obb$)7$C@;dG-z2Gy?l3%2|zB|dR>%F-=MmmbeodFK*13rB2u3b7NPFLCPl zRCQsCN3jh2G8}s1$WrKIhHgc_`sa+UZ%|WJ2ga523{CBiiPf^IA zrc*tUbVTS7vt3Cbf#3bstQ$qY5A(reQzte{oH(C*v)W#HzNmQAu7AGZwD8XF!I~VE zgYshUmLcDQ&KAny7c8a2(Tk!Mlr@|av&N$@jNg`%2YzXJP9ByKS@3#5qRM&7!I!!& z)5I?0c3bvKVrhT(kzaeTb<)(DwObvkw@#?9F9g^eT+%x{6fX zUDA;Yhb4Xg24)E2Oca^;4(1m-VN>LHb+(t#B4v-fCAlg8aixTW46 zUxf3YRM}<#LEK?}z4L6z697Lb;TYcsaI-T5U#%33n)b!tD&RV>p(0AV0aB99`yn#z zUe3Fmce^+<-!sm6U!2%BI{FCWiSq>21KL3E@036qwH-pt;bjnE9Qk41nN6da6mGM& z2NT3+({!5-Y_iu)hzISkg07ZZLGqORSc>BLaEaUZJjcsYa7vu6m1~uowz^cyBK7>ziSKI{i)oe zMJF{=kk$QH<ej`#1=26^TDBL6!)eTT_+EZb%@(nlG7=bs#GVz z21olMh6_8!Y`bw)$#(WL5-pUQZ!l1F#<3o7F-$Hgy*S0q7TmZfRU$G~q*33^mhOG$ zen{yR_optT`6|6@q`)meXE-Gvw;0Da{$O&JL!=MfqY2@2=6y03UOH=3E)GjB$ip{6 zL69)wyNZ^W;e`AJCB^fF^WVk_eQqjEyW-bA=GZ!+n4eRRdGxN2XvTn>rrgky7 zyA2}CVtrv;Qy2kb_!mz8SR3FgO+ANzLCnDIh3O-wyaz`=1W6TC>4`RPAIf;=W!xK? z)Foj!?bVrTI&;qX-XbGLwO(E!i}@R!ZMfQ4%1P?NOtAix{d}{lqg!SFqLhlrP47D} z$5T#y0Kd}jM^Xi4Hn;MGly<^IPzJ#j--({$|HO#)k_i%WYPia2RP>of(nG$%H;1{w zEjZjqe(oUCAVJ_frG7Pn{zt4D-1D7UY#^+bvMrI=E5WQAFz z0CMUpn`OsKj)_NcT_(3v_I~C2hDjVQi%_mmdiTD_HXJW?y^F2Fy7xmTPX45IWvYl= zS+5faz{HTg+Y6r5>F1bwwxr9&l+fFFOHCnyvNzBBlkVquHOHAsmolnplo)(yqJ-g+ zZ25U)Y@ErIMx19b>%^h*5wB*5GQoTL!OJ(;7;nCuF5`Ohqcvrdu>y@Efi0I%Z(dil zy^L-8HdmD9L3d)I65U%AIpC|$SL>F06Kml`6@Zj}u@v?qv94fZOUQJvf2Y&Oq1(P3 zaq~;T!*Z=-APjHq@eS+94CuMC5yo-|E%~Q{H z7mxZJ`z?DBLbRt?F4vDbTo}cDVeUd<2ck|+QGI5{j2AI5oh+Ph?&qK!@>+&O#_&3W zBC~NNf|tC?j7i%(JENOAI@2m*!~5!b7QElYJxT4Ul`1lRQ?55P8^F{shr60KA2!iV zPma6l$1`yi&sZV->Qf(bAZ_a^Z{PfZw<8N0Rd3Ec@#z_Vzj&^7e@RafRPK{D++8_! zImvCj@`RJ5Lh}pHNwbcdUY1`~!eSif=mtHmCTgh8vtN{SZ`zYDx9`J|Q*!;I&7ryF zUk1&+4g_36&oTd;8IB|7b(V*JX0JOZ|LA<~1f@A62V1SdL` zyp~nziya0jCvp&1WrUv3>+rdi3ZKnE4-2PBG zY^ikkbn5kK!RpCZXDTFI=!5B~WG)sa^5>-$3?ImQ-Mb7$4d~tKh&#=AZQ`p`ZCrIu zm0Fzqd;<3(*5{~h!K8x_vCDN(pG=j1FuZSa*b73)=f8a~7}vsFS5z+O+`njhJ&Co|88mk;l?IWc%Eci<~tpk8H)5 zSAf59;N6`-FfX0DZ8w2^UIYG8!OWm+cJW>FU|cy=^sOT<81;0pki#f? zAu7-TpS2X3is7tW^7pE2@;1KO4Q@>Sb6@1Mtz&U?$vzG?Xt8(kK%I?-d zck`(0iDm`kvz$*CxAeMY>$>IIa9Wu+=O>3R766?50t)?q3=&C zg=dryN!%Im{GrWQX-26sqrZ<;B|9X~E<**^B}^A*y6PG@$7{Od{D**+uagx#_1^5c zJ+UDlzV`0GYYrZmjUV!P-0$MjA%WU4KT?$~pWC%$X2c$_49VB1P0(FxvTc9yIm2~F zKF9l}XG@nTL_D_+)i#=M`JVk^-sinAxaEGfZT_ouml073w!Go^nsE|fy+72N+%Gxc zct6Mg_-Oevx$Li|JtHAvPGpYRfSg}c_8HEP4?HL98T@3QHzWqghImj8%mta7g8PIIaik8IU9<`ka#4Z@ zrn~b~G)BILsti#nJBE0omD?ZXxx1l5; zCdeH$* zW2-U_42;hf>_~L(YBi->xF1fkz?R<^KXvGFzd)21-C(Wmw@S<`H{L&CpRAek))*}x zjnKU5fHaxyJuDgC_lld8o${ag?{?D8xbr^$&=S@!bgui8oV@=WPlN3J@8eCLc8-$r z&2?Aj606dm4hY_Jy27U)_r*1QZo1Hyylqyn&g@9@$38BD;cJWc49c&0JveVHuhYS+ z&wiD_A=KQ@Iww2IsWazTrV+z_pm~1y4qJBQyIh|xvx4Dl%T9f$$GG!O!aMU+hgp?+ zFq@Ky1%E!})~SGQvU3?(!LQw)!R-0P$*We~QB1MI%%!~Zl28r48OZ!;=Y&n}+}V?A z(v~&pXWx$T8Br!F4+t_J)XGvP0W(2F!3n5BA z{GCN<=Oij{e{6#T^8=p8-?PTW>^wO*`ZQ|9+rC5Xm07<<$1O&&;W+y8*7V@+tCrX5 zT)lO)$0|+)q|DRzh-Vfi7TX#*&Bw&*_J5uCpgraMRKq)RazHlk^V0Qu9WjT`MV5)s z50uYV?nreWZzZQ(@Vj{$Fz8o@z+mfrj!b4wnh6_a1bdE_P+na8XwDO1YSPLVVQTtg^40}(@bw|` zDYM<~bWgo#-z^*;6s%ng>#l>l4?f@1&c_Ygd6lgxA*Hli{NWA~;O1u7%%Nv`Wr7drbNvp=3m!hT2a#_%YCEh`Mh`{x-!6V} zVvys)(vkOlopY7U!hdL{^YBDw;Q2Y0B_ElhzPYz41yff#tSt&$UoZNSU!J+x_}yGK z+~*MEzF~8wgi7Uk^)R>OhNlLLtZfqEox{c*I||8N&v$tj47;OT4tx?kz^_&kbWT&H zWY)rM>7;Xsfw@{oYPiR6k;S`Ol?kVj7aBD%6MuEyVGr7O&NBFWU*eZ%rdtBFV-@Q&RZJ?+hBN3aE$jY=fNFk7>jsbHXfzR?B&U#?aT6v1jsoGD>#NGnF)5 ziOKuz44f(%$9&$MecUI(7lKZ=RJTqt>&hRUk<)y!&{JN&7~RdK5M`Jcz#d}222leL$jghfYae`1e0Wm4;;l%f4#nu~Od%NV7KHE42eZ?1ggLzr2HlIH2|X(v8d z3<{L9D5IZVs4UBgY@BgFWi~ikkv!X4a5Qdrm(sz7`Jp#8UVfiaNT*Kjra~`pMi=0*K@Jk+%eK{wytHIotKPr-x~?9EWVr%NP8mGf5+P9iWc*hK)<=|OZviHa zm{7S~G4q^4Zw){8v-f;q#me#AxCrG;Itv7meN5&vi&VANu5D7)2G* z6#|OrP{CF~r$LD`4|zNPPQeH{^|xm`m8BHVepeD#>&d*y67}LLMaffUo&f#_xJZlQ zM%#U<{)DXWM|$a-N_dvohmWa10<{uOs3 ztb1>u$<-3cz*CzN$^x0vI#wI}`*czRgSb%ChsT+X6@dr7>WRfM+Z8uSKP?Flq~E`9 z>s3W=`i>Z-s}B^qYeaOeLX)9)oruOGlB?-|g@83!DTcSN-t$x+-M6Eg-Zm=TTDB(SmtHo$a$80V)>c?A_((k#z!&aJBasS)3(1VJPLUvqrf7KQR z&2^c5iFNQ5B^E6^WP2GOHKnf&9akvshjYGv|wZ@R9`O@Hix)~*i^2I97IEOLCN zKY19YK{JB*g1vvik~v$YPaqM7 z8rxV3_9jfrv#eVcnzD#ny!k=3xT{3F%gjXcE{PLwCrx&Vzl2D-4W8D8y(rdZ8QR5q z8fLYrKa*MG5bZpM`4b$pnoeqO$a2e}P|PuBR@-F-$Nl8E86M!nRk(GuklT1dfJ~|> zJvp{2liAqfKBh4zQisaGn@@yqx46C(w?C8F?tdyZ3~Dj`))f$2cJVuoUx|lUwXy5S zvOc8F%zb*>TW|?Xv!(Ehae7K%PmADt(1~x#VihF*&V8KBySP9SmoH+Bs^SW zGxe;fN%&2jgP%VO_4n7Z5*+*iyTv~QWHue_7U8b(y#4HElW=mKtqe2ED}TEiGLNIh zllw=n3^hFSQ6E8>*3NzosF*ZLxlUE4bE>t2-1JMwj`WhcGjq=hnuJkxSmgBqgUqY5 z+vX*WU!Q(vbvEn}*R)&S$%7u^Y6pMOf=Yawy7vaDq$s}H(apfPy@55pcK2_j7Hrfy zU3BT(2qePHc3lt!Hl5fu?o`;~gYJQLlP4!9h){7aW4Dsdr9K zXXy=C2`078=25pzG+qciXWaEM&8(HmquHp@^+VERlfEJPNkjbEtm75N%)GC?m?~$V zjD|ngJMuiNB=5zcYZ=i>b!mBKbyQut;j~#nh^PJ(-tRhYhBIoEAln zlU()x)WtuWydFLz8dxs->__M!q z3FeUShuF+!dRMhD%d%+GH%)e9$HDqfeNuZ-yQqQ!Kw zo3SA98rsGZMvZWwZiC-u@?l<1n%QOU>T2?G^EnVY^Kx;dRO^Y0BhMQ!X=mGzp?diN zU(5P1X|@%8GX};8Yw|WE`esUpM7k!w6>Cb>9!ukNwDILlx^Q?Z;Z&w)0 zH0OqtY?3aE5XkqlDswflk4}%e(*L~7AYvejsokl#!la*BAqnyL&A~yJM<_b}&~NlA zH;;u2OTF$0yi?ju6(o);x?iXyUu(wl^qDYfU%J^vCPj}yMCZFobZ+s%NatoBe0ZHH zZL0m{oS=gzvffkPg)6kt&5@O{End4|8BZTC#S+iTBWm=D*o!T{`a2I8g}k^$(X7GJ zn9rN@afBvnj{mfBA{{p(xAk2)r`>lNR{AH`m3io|j=jrxxj!yB%8^>IiTeS(*$h$H zt^IMIjivoZKLdJnA|Nr-xYqLidvJsSefM_2iE3^1Nq+>k5rSFr@3XG zszDGAnZ+7rbFs!On54~BoUY1J=21Kwq|Bot9Zwx|hEe_c-96Vsw!>xe-@Ef4i~bD& zUhZ-FMWqjXx@l9k*SrjzuMq3**|4DeSlVQd4?Ak?ZgCDq zU9inO2kVa&EJT?GwwPB%c2Pj6OCP)~LhkjGD1jL18EpcIK;)_8zW=evUjYy|zA`)1 zfzv0mZnMsaIkeK{%0%@dUOK8Lj^npe7dDh{KqYlCwoE83lwaj4m5NjaNP1$TOSu=u zY19f9SF4V^1Zf=1wGtpn?I^J&a+18u$q>whX{${s86OIRRFiXz2lX#$u4elA#Z3P$ z-|t=dsiH7sO$7v%GEjw!XR#P#9<9z`X)2Zk^e8G$_hy|?+sPA0`g6oFBak4a%_YSIA^&WJXo?RR*SmzBED74mpFw`36u2k>B&J z10Kf1=uPp=k0Uh8JP+~*<9#8S$IbzIxKc1FVNPl9O*bWFEej{ z$2^(t{<&C%=BnQ5Z9}<5cLn@rIclNg`VR+mlk4kt6`CKQx>=kZrjMsjS%y5z#!W}| z7m08A6u=_>;icHuB)#g>+F9n)RF0A}yt`$1&orhUeqt+PK2M1kG`(n=Fni|7O@7u* z6OGtpZ(ZKb_t{#P(lwvz5=^AUs)wT5`f|<>M3X*m!gW5bzq@5J=cTr11)X}Pi(0j1 z@*x8ihbS@IY^rzXV|z2Kl?|i^Z+_lUJP`HZY@xRY;=FfnmUeMDXLC1?c|;Ff^*=cF ziKd+$#yKcw$X&9DnQJYTq8prea>L)+X&=M=q87F_LlArY)FCrpublIyI^+9C^JeeD zk>)ZHF~(lQB4d`A%T2TFbrt*A-CWy`FGF(kFG)`qM|WQKR+foMExTwEG~1-^$KtSe zwmhiZ?a7WELs6IeGB+8c_Oy)JrnCr@T_2ojDsBU|+(PORLIcn1{v2kecTc-LKh)`_ zE#W>J3cs$ipTykAu=D-wG+y6xMY7@J4I=?TV~!yd^PT#Rgm0ATW?@u*;tfE9)D{)m zd3<(W!h(584xK|ZyQrxr>%)?73-!i>ZFRQyoY5<6(Z z_y=9h1$xsul;Z$JNGSt~&Ki~^<0Cw<%At0hGwT`e7h7=_^eOJ8j zNh{JJxrVWRMkk)Tdu1B6}s~1~~4I@1nsy;f)Bve;&e5*@yxRapOIIP9r z9y*khS>h1s{LanO-o&y;w|_LyyX+Q6vi-oV`ED?)w|^SOJ8173-4&m%w_x3s&=|>g zU+HNECi4HV_ugSqG~2!~nITCO7!(-7kaH3x=bRaWgt+uE8~x&l+D|jWt&a1 z4>F4!0oh3p?*o|QeA820Z+c6xoW?t>@B8e58RDDx!ivc`c-ei zia-0!BJ4$pO8?4?ZwtUEpr7(b(q9$&Rl`3#?3%v!i?nP9y})os-$(H8u%A#s@W#Bq z-XkVQ(bwcB5>qFXRhdq5=k?FhJ=1Gzd5c*?P$qeu0yiJ{&+0Ip+@iptIo&|9BX zh&~EclDjw4-mg>rf$SJYOhau3DOcQL40=E0TD{;+d5BvH+GpDcFQVxntJA{mkH2}( z#*@gKf6+2|i&>RBnLKw$8;YA2k5%IuPg}i8BL$--TZK{6pqehzJn{ZonW6)Wr-A!I z<;4o_oqrNMRPKoqDJyMKcmh+96aIxizbo_8zoGDv4Oi&R*}2{gPDxXBp-TXLes0&M zC^dvXL)J-^6dP8b%02?98Jbh&0!$?quz{%Ko00W7+>*ay|JlP-??;NFJ^Aykf1ac9 z&e8kFo9^AR74@$O6GZnF9)tdhg|+nqg@VJ%J;QoiH5tPrs-fLR$QlJjgw++FyF?O* zbX!Hz>B+Ci|Bi<`vaNMbmGmZ$-w9`ZfH;iD0sGK355V&44So0NnyBc@{oRaycqT(K z29ifRKs(D0MI7n2z+KRJ9mQ}4D8|3P5!ODY%w)oe%3z|;D|AwisqiDud&_*wZ|x6K z6vJJ+YzNhax%5VFN|X@h8BFRn9fvt)l6XNYWmQf;;h*FEFI)8Q<<6v0t13IlqpFMK zC1rkn%M@Vsd-D81=3B16Dl%7Gu*MoX<@s8k>1WPL!FhAe#<@>USeoU@&7-b!!^{$r zy!NZ(pSwCF0&lL>i_c$op@7958v}aPJR?d5mr_vBs#nssfcmg+a`lJyMPE}#7ksRb z8(ZH{5bY=k{DJWE+G`>x&shrQf1dDN&bS)SiDsbBXw=Y9NCNltQnViY4Ep5|mS~U? z^IbO=PzPSq^;-sQ{*^Dt^MT>{Tc&@MpQ|<6-8(uBU8IEhZV65jz`ARP^EBm@_qe{ z&Ihj!szr?`pKPaJbd8zhj5Jmuqe=!>1Pxqu_@-0*+ALna@|WHdE~|bW>8|=9)IAer z*$+9U7JT2iDfVoAv0~nxIpZCp9|SD8)hN*$-Z$s#TW~!M zuAYl0cjsyM%B&=5Po|II?B>;a{#d)o3V-xPA=;5aJ?)IM3f0=3k;vosNh{qyP@DjZ z(d6Oy`U9WTjDvBvh;_BWpbDOaadCD1&$1i!v(_uf-~WNqs4V&d%E1}xmL@;)iGobp{;wQYL>L_yUiZgTG%Udk%JvwHZ>91vQ>Pbb@{U?f+y zRlM~G2f=bpk=$M7Fa3ZA` zO?p|#LhW4(SjrHlxyP3emUtcGJ_reKw5&F&6;N@twNJ8*degS8=Ny`~;(06>6m6$E zf}d^y!KxjC??r9+woC`fj)4>->z)@FfS!_GJ2ZhdjrEU?jFa(rRR1i$*j`s= zhM!$dG;GiN6tWjLa>=+_TaV5Hm^V>`Y`?tX7)?@t)lBeJ;=>(E&iGRDs5i#Uwt*H+a2*@EPWRBgQ4mJOG0-?gsPu@Tmn)p$~5;FG zg9)1P3-l1?=j8WipmK8l7Vy6cINFhM;g()_fW{D&#Ca*bBsw>>H_=Y=q7?%^XV}-w zw9R(PE>e%LVWmuI2N5rV>9V;~ zpk;?}hU#VFS2pq0E3WnMnu_;UP?bLBThJ9=-*Tthi7{^l@At>Z2x<88MbG#@s#BQb z>zVe8j7XdFjCo+W(pPFdQV2U_>oRj1(lG&u4;13rI_0&g-=X$j8muvCd&e!t6ZU~0 zZQ6FoyUOB3b){heaHDu$8*8=u(YbQyQH(lnm)Nw_$n%7J;LDzQr1fDq(z5oo(_`QX zac5s6ypzDHva#`mZ9GMCJR_GMkKEeMvrPT zU=(`X0K6xZipi~Q=G!t&zNMpyZNzb&e9P$*rx6XF_0Qb@q*m0@!|=6JP*WPYc?SJ; zMN#3g3BRlRZ}_0_X{Kwf{%Oyx0rtRub{157dR3|MKj}8;?h0jAZ5#uJ>kkI}sR%UG zHrQ)BHq*y*h`-v_e$ivtAKn02%8ai&1!zqo2xHB=aFI> zu54d-dH!B`g=S^DJf`RZi^nAYC#{ddq>lWD5aMKP!dA#os0S*Xbb79NeGTKkRUk*n z26Jr%MNFQ{HuRhMb!80;fAzlpy_%y+Q2#V0f3EdRJZTvOg;0^ritOO8`TDkKhCKxO z>8CzTapeC02ic;C$9QSG{WfF~Ye*hXtAR}r@ZwXR&5U9W=pq1Z@duP1im)^?j{=&t z%j+V_4Xmkyf*r08e1{1_MMFWwMEQYolrlDw`lgnPvZ}u41q1)KvK`p&6WifO6w)`Q zydk*;1GfCd7gU?YBN){goWsw|^NUTpwrn`IDBsln-nE>1_ftzDT$D866>`W{FMgmH{y-TZU&nRxqT1)WdlUR~$4ev`X}W~k z5Aaviy*pnAN~+c6MrGQt0)!jP_iG2n103K<9+ zXMKr1z?Ye)g!SGljHi@Cn936@MU+=EyC2WcbWxSjGyg;U#KN+-n=YuCI!yA+dR##V zx1~iDh~kNR82)qde-{r-Jm+i9$g`g`S$6Pquzo`S{&DwNVt)vosQrrY_b*?b?bAl* z2dPVR9OqsJIcru*)1McK+Y@FAouZPtXb=`ZM9i0VLJ(^&PCvLnH6_ zwPbYf#m;JQipdY}FyJKK5{jqyx3G&g!j<@z8YFA;Jls_4c1bL?%Wd_KMdt7-)FhR4 z4sq@7Uk;h>G?r|ge<3Za#bMen$GoDbJn#y$l;#IYXz7Quo)eMpq`8FMrzcub0i?Ns ztEAxpAka4REF(hk@5gKWTXKrGU}2tNHesGYf(E48Jn{L(tS)o^a^{2Fp#BDFQZ}@K*i!1n*LdAgvwH( z++l)NmRQmzN%s?YlY)iVGG$SWcL;`g;vzzX1w`FLBE|Uk!wf^?3BMFL^QoZD4i#JW zO(S0BHiiz2CD|AfQUtauM%<-eD~K9N_oqz)gD{LhmCZ0F5_9fJzEE9*_GM`Xx8^5< zSe`BgVj3QtEC(DELT9k+V>DH!=QcW1hFmJA1iaypQj33rSp( z8E*kovjQ$e(W$Ysc}eaX`1$IvnNy~r9t!D2u5&GYxkq!tSKPB1X|=~-V+Y)fh*v5d zlNJ7QbiY5Ww3B!2kFlm;7Cc7b+DAz-Kzh z5SXEjUh-GpnES}>4Q~nRNoj*N=tU2Qk}khM*&n=yL}L_rhY(|gWG*2fi60-w?FTJL z{i)UH#3$4O!OL2j@u!M@U{=tX9!^(rpykSFvixu@!b(iJWk8SFn{|F%dR$~M=QTLu zgEG#o%lm9rCpjDfJI9=0Byk~3@QuP`Rb67okb&G{D{oqj33}4quW5$ycWp9$dEu+G z%52jXf~)%0w79FKlMbE_3_HhP*m#a@OK$1E%yWI66ViZlo29x&?)jL&>A^lSK(mG= zv09D4z*J2srq8rFCte|VV~Kj6eD6+hhb;1aURmklDdiHc(|gr9v5YO=kGSlpAW&pk1qUyj3mU{lk5u)tlWtS;pa-&oz+m%?g z3_;)LP7dp^x$uYF_4potyq}#Fvf(m>V2=b*9zm)wD!qG=gB(Zm;VK(r-zO{pj`oTUpGZK&X}S8*n^$$5!|s*cc+gF} zD#e%gOu!Rg0Nx5+dw7^e?7jQuU9S3~-bbxrCZbzKqBK;@dSSLBCQhA?m+6YPu<>Fz z>MPjxv$dz+m$~%v9I1DTQDzNi{uNjy-jmOF~V!9J!nN$^iFM&Kg`WN{ zA5~6kC1Ma0zn|n@e?W)o(HL&VRS7a$O~sb9RDH!AH#%C7>MTKo=6=6O@5Da*#1m)W zlhz=w^)IGR#ig3f8DOTtV7980_qk>)aGKj>NMC5lE8`+E?)U9UE(Nw*q*pN$+ZPPD z=yu1-S4Y8J%jQ;S-iooO&QQv|wVOU+)J47%roYXU9IN<9{B&aHPIhGIh~;>AKxmJ07D)Fuonw_|4vj5rYT8I=l2r`_vl> zK1#)?kR(t7iOe+$T!UZ{iQi)+6N$y&4(1)knS+dSvqjyn@C*!8M-}G?Hfu7Ni;;dY}a>?3{DA?yb~|onK}WB=F=L(Cyz5 zrcQdksw$Y^Pk%6VhJ{Ik8YF{-FHnW4R0qrUWC%>DjqdRBvUaNYzQ2gGC8#x=Xk05R zuXe}lVpYSXvl#;C7plACy{=d#Y!MrNxT#lB>oY%49R2z23iT`tEI#did5h~6{xh&Km0tV#+PLyLs;F;w@reF zCztJhp4l1q$S-7>VT$JDVM;uluA_hfzUcg|70G1I!f5K#k!;}2C z+V|noqY>t4fnaL2>(kV&Z!`{j2E`NE|0-qKw zdHkh;k(e|(-Zk+!)*1WqAu-3YbiUqEZUF>)qySHf`2LK8>7ySgfi!p(D#VG2Wv?~m z6K0}9QtKSd%5!;43S6{Qi)SS@9*H0sArZty#1KXjX;6E`UnT3R3b3jG_2TLcDgwBSrmSfewD*T-P=Egj&PjA)TiMjRN8ICb5})%S zmG|?2d1#apBr^V?JA1Z67H+54z`3{eI~dK;_;afj}9lf2!RUh2!|z z=jmn#dJ(U(p|V>^E#I;N1+|$o6RV7CEWz^z?96wOQTIW&JP@Wb0uCV?sdj8ktm#T9efM-**jzJM(?~1 z03yHme4@0c;7cV%f(rZq93oCo@-sd7D4}^Jb4)mO?X{5vfO1`3doBoo*u6F6{#OmB zh3#TrFdnZd0{v=92BkcNsy7jXgf;x1P`xjChTi!r7|g8Oe_ESMT|s$q`kA)t*b)5NOIfWKt>4 z=!TDIlJwXv;6+JM{H_lwm4XW+LUIW-JPS2XMd;xe{LOoa!0x=L6(uBZGyJ+T6G@^P ziGiOaugGUGP^aZV1>lduM+8J3E)qoXNSo+MN(h9w2$EouQsnXvNIrN=gU`4m4c#>{IdbgGHE;5nQcA;g9sz^S`FrQ7_av3&gJ`+A7<=-1YoS23u096<< zW^3jkH%R5qs6#j|_E21uA`=WPbzfWd;gtFq<`bPqC&1hc(gkBkMtQW{4LIu9%y_LZ zW;!Aa-J!27GML)WT(D+3K5+m8B$meScvQGz#4X9sqbXteltN)}HPsj(g}CJ>oEm`G zbpbphExGWxlw-C+^`XJ)^8V2RP>p-baxW-a-v;et5u#t~*zcxpTA9hr6=(m-w<2#Z zg{H`$xQhvxLSr0ZDg|GR#E^fq$XXXNnAR7Qn(O`z_^=YVH3dD-gBw=5!)BJX+deBJomLA!Ggz~@^e=t@^Xf$2$Kp7tcEUQO~Q3{ZsDdHr; zV8}RIl?n;cGa@pP4)z0Of#5Fq5UVIv*B~a=QkP$OSa4k@9S!&*U2uiUjekvIY%C^5 zz#yTZOiO0So`8JBPKhN{gRSrf%2$4%`iX2Af@Jtj7oBIcDCYk{+OvkwHvQynBj*QZ zndu~qU{_^nRBr$!F~G#&9PG|kgf`JXDvw1JFt6F~wu0t5QubMgrl&nMWyJiVtHMTb zdPG(cngR_U!mUsay}p1HWV#wk8~Eo{6alqku$IhZGqAq0kmC!c=M$2}NhHT(spwwE zgh3St(zsKI7YZ_^62Z6VF$T3er<9R4?j*%2wL;1nOtPSeNKbcS5RgSX2m82r|I{1OhpiDTk(xD5q%)Ib@tm0ibglv@cFcDf(02uR+QF&2Sv2 zZ{}BEXp#%s%SzoDF+d!*B#Xl+Yu_`{?;?dkDHZa!gBFRI#YRw`abARDqxuJ_$V4zn zQH}XDG>DkZpsJkXre^9kHy}S$8cmP^;tU9adW|Hh=YJsAc@kZ87N6E6s9MVuO$41b zx^93UC>TJA7(?=quF6svkmx3gK-(q=qznJ=$9HTFJ6(PlCSqior#_k;@f<&hs{#qE zToZ`AE&*{-Q!xaA?TfYpX8g1k z0y+o&9G~GJq`oqMY$O%Kp4(2P!uCDP&1eq58PLG+q9X|m|DAGsaSF`hFovz1%?xoZ zJ>rKT3>-R|o=4UU#2=NAKupIhwAbhYAUlS z%&!0}JORfG8)_R66H|0hu>~{m3F<+k4^#kXlKc2XGLmL&kby<=*EYOkFjO91X0^^Z za07(q*!?hlD_n^Oa{4uPnc%eumK~F9LgN=>u2Xga?{iiJ~;vRkkKv8|oB zR)Us0ph?v>(d%GL3Vxv|Ov6v~jHKe8=dclLr*{yQDx1TYu1cy(S}ZBJZ_HdGsi5kp z|G>Gt)iJ+dG+%Sz{i?&KrnrY$!(5F+JSrRo{YV)D|3X|2>&AAU(5WHNs1D51po8RE z3=~1!oe&$aybf5^2E^^WTdv?0x;V8}%dX?R{%)d8|NC41LcTc5H{Gxx0o_Q?X@ktF z)cfw{!%>@`V420HUSZa9&pZ)*WmU6`xXWbr86N>TZgoMC+D(23 zdGoA_(077}w#s7IU_r?PANHYOc;hG}*sgBh2Jr)B*Kc7UCS7w{P)|?|u&9^!E=QUY zv$RP}7kQLIa#(pY7&}nDWbwWa{ZKi*>G`d?pGa0NGLZ&L);uDDnAqG}Au^Ft3K9_k zCSH-1LxB46iR3{Mbwg!Rv{n&P$q0(xY?t=CRC9#xL{Ht9oTMl3L=o{o#KsR*{Q_}Uk8z-uN9Yn$BT z(04QomMqo?p;YM}clv zPM8H6w6tr2`2+a~6-j0#Ffc&VpbDe7)hg(G^g=~9?aD&|JgFy#`6F#2wZ2MMCVgD~ zqZP~#fcG|K4hVh*K;_%ZGl@CssvOuerdNzrBGEALNS10=pXpwqY*9bEH#|OB)qdl% z-yO#m=WebH?K|57pGV3Bry(OTqxr>_hSJbOFiSn`JPOSqjX=u9EH$gh2h5giD1+-Q zV3wNBZpv1noGhm}4Y$|jP-0S87(6QnY3(ROIerUSyFB zWzf@~;_L%FK?Y+-9vHQ8mFNJlPXTQ&7$zyI@2jgQuW}$kYg37Ig-8Zj8sF?Z3*gk> z+K~(F=#Zr@j%Hv9Pg$;8 zb|;M6)%jJm06^R{)Dcv&Ja;5Ihnu(70k-t@{iT6X*iAu2j#1`BcZ`ui&q&6Cl5(#6 z=ZKypEgjuj6XTwOX2n6qLVn$@A#gQ8W-&~U!B9r9Yt50Z+(sYWX7O-_2Y5@$QO;q; z=|;=m6p4FTNcYD*t`c))uGCa$^m>(KruRM`3%toh#~>m>UI}_rPz5tHse8SeAl*d> zo*hl8B?Gf!OdmEtRAf>N^NC*H(7i5`OF>B4e-#-qGT~5BM>R6xOM%Oxd=c^sje5>< zNmu;!;qv&gI?AX520?;!U&+AJ;AyOBZLBF|W(GY!?u$Kosq~vZl|8>Pq}C`Lxw_;} z7!-^}2&C4CJOFq*mzep&AZB2=UYP@~I>;z1Zw$a%Wv?mp7r0>t?VpJPY4&-{k-yUb z3tm)SnPq`&bRo%|KTxKOdq@733ip!kPwn@!uz}Oxt28psJN}#4nBC~B{{q@SssSQiA&!>T$+++AlT!p{~ z)a!qv3L^GHXS}5Tt)>7Q&vJqJK3eV+BrE@t$`ko`J6V~ZbD7Y8&t)=su?+)O)`PC6 zYr2nUS$~@zWs#uz7+ny-rQYg*<|E*9LFN^|% ze*YoG?-Y^72Qea{$iQG~vy+Tm>sDF9WnS|qPoBgOQtdo@O%%fVTC*{GQbD8CqS)eo zz3s4Jret#CccvG3fT72_*RjJZKIj!eTc0(V1WM+5-DW?m>8 zS2wgNnd(?7OH)v@by`bDTk1f@)m*4*)SDDwZ|B4*@E@e7?^7OrN2f9Cv>HCc_YFji z=U(Dnzr+&lc+&r$p#ST*Vc8q^EPhGlm5+YI&iBh7D23(xpBl#c+SlHO6|@LEd#$JH z>SDIkuYXm-0s?_LYk_A@dF=gXVkyDIcV!)A<}U&?Y)e{l)^^hrhyCHkXrD9pdEd8q zGHJCi%sw)kzMUIs>(n@I^v=WnWya5$EHr;i0{MY*DEuU@3r!rtKo$aQEAin)2F$a} z82%z0q}$0smm(5&W;dc|)z1q{lIth%YYaQ`qEk%tqP#Rda{Jb?)1AiW@afup=E(Ee zyuhn26cjy3DtrrOqnnH8w!C`SSU&$OhwsqmxKO(*x^!*sDevdH^4STd^dynzRB6<9 zQ?==PYVNz}z1ycAO(q|G2DLuNq|*GuD|trZZCLg_Nz~SQJ7g_{>8vvH+i~RDf@fK# zTbnVQ@BD_6AolG*LE5)^&J?#kRx@u+u$F8lx@hQQVeYXte~ZC45}bThdY&`nteovA z?%?anT-)gVRlo5$ZMlVW?mD5>eF-&T<(idChsKT1ja-2V9YygtbN7~&XmM0Go{_6H z)n}ELUx+SNGvBHJyG2dJw3$de-^2QW;+s`jRtRHzo$}JH(Gahf@Qf2!4&f!uO3v7K z<~>{>=dLH8YQhQ^97!C`p9)`Rs&_9|Jy)^cPxV}VWH*wt+s6G?ZUEG=%6m^%tQQV2@|o{3|!eH6b8 zuA-_{X?(otehdyyJUj?DiV!aDx*~fm_D(}>BmeJobju6 zUkyWAb0?kk@03QhhuX5MGOKQfF#2-}IUpXWwQDj6sw?W@52R*o=0|4}ukDj>eAlFE zH%*r+&Y9k*5i?e!>`lyR;)YFc%FEau zeW`u+^&57gvr?fxe-_+G_grS6Hk3|+t@yD z@^Eqq5)_p+7EPVst23F@?{k-wOIp>z8Rm7~`;>30^V#67eh5^R|I;1s{(5IS6?}oL z=X&8V<>Y4jY0l%UcK!|}aro*R_Yv3pv8|j(qXZ07M%^-G$OB!XqplAULul=`)m1MG zNJ9#QKS5lKlGvwPZ9O?ueHlv04un7Q-M8j(?$_>X2*`3tHS4{49?&YgH4c-L-?DX5 zzs<$mrhZ;>j`k%{)9YrdxMyM;9Q?v=t}rT@jfQ?q{_vex+P86no`+3xR}Ao12)AMT zWSYC{F@!u;vq^Bd0%8RH+3~0SBDPpBJHZxC$3{I?CjTuc#^WnAA34n`iCZ$qwCcnd zQ4vt5iD(;qBhJ-u|G^+{!S6+1+)oGgST{itQ1Rf&Gr#{#E z)oN7g^~H)M*NjRI5n?S;!u2kEGI5-^#o?Y-c6yd{2!s6C!eN#0^D1VICavsH50zOA z^}90o>UO-BYd+l?NB|EAoNLtg#{25zTZE!RQIxOn;!GprXgqr3K>2+iDI-G}QkF-jZM zzmt3;_LjY6Ym9tsLp3U%OuTRxW6EI#dGfx?b9>kx5QvqyVuI7V4}@KBidb-y_c`gpvx_Zw6lF(UqTD< zVX^Dw4tpCjKl(c-Iqt~&oa^jK-psm@u7JRmk9??3(^!_zq81O!c4;OLS0CMGt!p(A z{nliq7qb>I!K<%RIkAyp*Dwp)4OibS8wYrTGg@w6kbHKJX=KVpr0w9D%vNn6@;+LCOj{y`wTfz7?^ktj<^CV!4#+=uF%r!GlZ)FQ9cmtDke<);8PO6 zup8bg$jg&7B8Jz7ofOR1K%?cC+*AA1i=bh&7fqq%&SidA2g8Tk8bWe3-J_OlVb!I0 z`lAvSOshL&Z_=(osc7M%76MQi;>kpxERZ2A#Xtv>SE+dm<(s#X*2V{@La9dz z&9ezzrATf(#R2$lor$U}ySkkc=GpSWYS@4kH;YQm`@N9U?2Epl{7-eNkzI*aA8vcn zyHqxvD)2CuH7^!h&4Hh9U)*Wh62maM~BvXuH&oFhfciP@#x;Lv;2 zFGS@1y_YQ?jD`(3FIWY+gad>_1&F7p$z&ediPMu^XeT;_`h0Q5WH_dx3VW_FGUdJu zG$E=!Nudt=XO!gRIL;(jCkE5{yBGPN7s!#{&>C};eIt{B zyO5=iF22B`*i8Mc4&ef}4sNrUB0ce@6j8i3DCALsEM1|{gUl(6vUh`XIgBw!F^tB_ zt4$o4Ghdh=KiVt^7a~()735oiP3x9CykmnqR8`QGTj{yOXPUcs4}|MhLRKq&?^>{*Bp1Nv#I@s&KDcOY)$v4%_ArPm`jNlTR>P=Jz@^xBzG z>v@@4CJgV5u#lUgQV`=Jb<~5x*)*H21}bXO8fmugX~(2uX^nDKUYq;j`7>(<=ULhZ z>uiNV*US)MoZ=VB8+^`GI45+~6Y=hzW>M3OA#v3BH~TJFAjPUH?XsDb#G1fevE{qz ztgFq6SG`XK=WA3N;3vLqiV+d82h@(16k#QPfbYZy07pbBet18 zx|2s0TaU(yi4>J3^T8<+YdhfHIwKU~(?`2W1*#KO?TK~rgv^f32l=sfa5=fymuU-L zT;r3@l!noU-pX#$_ZzoWzXhg}dgSV@gd7trq%DUwe2cVTv?R^s*z;mkS8G2Nty_YMyl3k(a>7F^)ZikeEA1=)=(_>12sId*;r1p5&n=Y-83 zweQ2hqf8dQ>v;bq_^H=|e;xjZ9$a7QQzCAKuk;(6Ngk?09LrvP428$3xJglY)qi}( zcvN9On=!!$Q?oHU*+_4BRkic7h$*4CgK;af0gBPZbF1dtO<`K|T8$nOpfv-DB52Kn zE~O!l5!eTypc6a{^Z4*J!dipU`uK2b!df$;0U#W>y_Ag)fiNtWqi++5Bhj}hx|4v^ zK%!Z$@rx42cVu+TRxC99E+Z^Dz*e#M16PCivL87Rmm z`4;(kdAO{x-XMd6h)477q*L{2Eu?G{NlNk<&@d9B z_Z(c-@}8q(5Ql&ANn?`Wz^6cw+~)|S-wu$hg;*V8Nbq;o$z$LLDE@_X?iw%eAtlZ@ zzcHV)>7b!K$<)-cmx#v=TW@oOD+eJ-&w;24DQyCeOq=%-Ko$=;hxm{);nkg^bq2)` zkYfjVi6GIDkgG$<3k<^AYo|V7qhgR*)4hwo)R$v7iR5d2&ivyjS49nbI$=g;vJ(>0 zZ4%NQZPF-dR}Bj~@oztweC^ChgxY~XJ46y)v|R;A;lf(UA1I(e+_z*aF(BGy!eu3| z3DU^#MBi`bKA@8bmGF2s`Rd_t=`p=VuPqtDJiz)(RzK_S=!d800m4^rE6e9_@~1sD zG!S=lQaX5+S-9@_4wToR_c?<@aZXeN3=Xg(%@h04Ze!mDF?w5c*XF!MNieTf#4+u~v z($YLA4l~?^#~|afmTo~LD7do@;u9cCNH`F*1WZ)%M|Hst$M>td3?dxa=O82;{-%00 zY5es)GT}M)0C&#JtSdbTgdI-g1A$O-s1hkyP!_)E0ZxesG0gu+j)jMT2HJ*@-l5fKxMNHTfwY0tDF4Tq75o4ZIFch zhuO;x8ncP;-HM?^afeQiyAo};h`m%uk*L%u=74n0Nh-X&Ody_>RIo>-EX2UD1V)(R zhEM@{1u+0uF`C443df=B_+-%a0AfOe8DuGWa2ezk2m$l~L<#Z9!JOrhyugy+0RF>Lg6`a}o#xCQQ|h_? zz5of+@R0tHBChsuSyVAIdg9Pl*uoV8rw5q&w3LlU+8x9gAc?f}`EHhIb&lSBh(U;k z9lwT07EGIPE&phMz~bFNg&@&8_eDwYiIn0lB{)b3Ka?YB25I&Hj%ZC5aFzJd79}2@ z&M`9AXA@p4J~?xwWlq;)gtV)oIt~s}xQMydAcqw=qZ2O02noi);m{^UxF{<5wF~D# z07hU~=7taA?34oQmoNSt=|lZB(wF*5T}J7CQY&|%Sc&XM?=L3&wzrh#v*!8l&%W>a z@+rlho}n*naAdEKM^x-@$pn~BGqWKvwE zXt&6G*sHU?WzFh2w43@gfMbzmfd&Zr^N;vhWYsB+$Q>p_H{s#%UpG$9dr` zw#P+x1_L&_Rf?y!Uh3+oQ{8t;ayMO`7p8j2yzn%f`LZMqG7x4WPkCQqGZgp;Ql2yU zl<`*T>(lnw#BbCh%EQ&OubI;uB({oui2wX+g!`-C%V@wlrRK-{vbxMQ5!g=YztdG; zBngZWQ?=3&9#qy7{XD}FV=10Oq&{8i@K|kK*jt^+Rw77 zt(g3QF5b)bv=tP4SCp?b780EGEv{tQhi#2-T~0r+X5x@3h%UJybo3$L*+Dh`d4Kw|;;oL?=An7(^rx`-a+|*G%BZNB zsj2lo6*ZqQMK!e6E5U+1daw?KS{7BA4_9U>!_aQS`zmi-v8Q9x;?o`H74rjuSJWnG%$sViOlcSi27_tspE4MegvljZBv|WO z>p~ECwZXu2BLl%n|1Mw_$;JK0_eNv&d~yCZ_YsRI5IwqWP?q)q4n}zf6ode^5*U8s z8fJoz151I|0kh`@tkz?qVxmIGS$LE)W7SjUE9WL_g?7laU^o9Uw~kWk@&*(?zCkuY z*}H43NbQ!afJVc_=a&k_rDB;U>uK}$?v39@-5S;3Oq|qNcZt;r<}!)k2F-0b_?DJb zjXv{6OUyFthJKm~iaVg^7jhsKRMO!C_>191WJMPf#)n@#3svpzR&{y@Jame2WjAa$ z{e5~LFx!u|b7FN~H)14}p8U!CZoNj8X`_$hN9;}m$~-Yk=e`6m`As}Y9ieLby>QR^ zX~B;h`q<@Z?^kGJj&*xm!zPlq^jVo@^Kuhl)?R+g zFy4GS&dz?+dvsd&gDf}hOtYZVf)L;K#FZrsnDfc5w0oSc6KuNScQ5zN^X^;lY*LG< zx#$P{a5n!}vOsf*)Uy%C!D6g=6I#J~V0>$ca-z9x!l}jK`{lgJWNKD6Sx^!nxc~kB ze4nT31eA1Bcc5<`kD;!q=nH)G;}`GU+Y61vZz=lgYSUKi^B*)-qsJ``7GgTr)wQ!Ee`v%``NnJ5t0hh z+jRxvRorr-Hafg)HfgjYk=rMQrp4=!k}4bQuE`t3ZSBppuexf`w4ez=Bs@|GO=}KK zE69KpW*96?QkfWyp6UetI^r%kSs{o;7Dp?k4+aGMxBF`>ircUiM7K>Z)`p6lE7d4s z=UARJ%2^eBF4!v2&+sz!<~_Jr`NqZ;cNV`%vZ}|UQKJrh^NV9p9!OS#}lhiHsBw|mQ(<J z$%3Pe#Nxw`S_;O`DM?SPG0^V|7UF4dbCUJ!eR;=7 zK9??TrtP^{4|3gs-4sSxGNzt-yaRrn6)`H^?% zY>`4NR9$F0kw$5{zT|0ls4GuFk!aTDbpGLb{4B7CFIXTzZ@@)L}+t+qs2J#$)tRB`L^ zWJu}+ujR1%fGkhX9SRXPp?7>0;^V4!J*kuoUgOIth;+$qpv#a)U!A;F4-;1pWi-QB$u*WS?QeO~!wt-J18 z_rGr?D-)Q>%$dFS+55N8?DIPYT}OZcQ6yKk!W2I~hzY6o{J{(e) z4_RfVlKd=`wuU)1WL!;FwIy0#b}RpK5Lt-oq;1-CU=eWYUieGW2f~Xyy+#^#h4A;~(BVOiI2+BvF)KLV%++X{_iuoJTZSDX{%BLe z#c99Lib`5tN%XN~Vxl%DmWGlcd!j4pP!gLipySg6cP@(s-8IQcT)|)W= z4fqjcV`u7VqWGy}bMl8OOsl|yBAH4k32OD^%M$UtaNf?x=%6_0xsGP?TcYU<63=UQ z$AnfEIb!Ws3WP`yfca3Xl}Y{$y%KJq_+v_vQ==I?ZtgZ>PrRlTse3f=Z+`c04;9Qs zS&i5no~kt)^Xg_f{JdCJRPdd5y*J9P*Hc}&!pvEd17gtBv6~SuJxX67os)~&sC$0z z!dx3%qm$F?{EeJu9uO(RKHs#sg@EWos3uRzv~aW%!iiLpuB-;Wu+KlRUfro_TO9{J zSMDxj9?4$yvQK$lqOcXQ{K><9>D;D8`^=+PmwcLS{bvK)PtPC6F5(r=cMrmrJo1!D zTP2pbZUgI!S;0Qm%c4k{c46&>ToaD@PkA!7dUJemeO29PbW#BRSh^ROFH5@cz9rW^ z+_=qFG9ylWu~kOHBJ@m+Jcv=7Y+>PEAHHT*oWA$#$uVX(nMdQ2ed=H#h+i*yagcSd z4-XW{58%UO-ERC2ky~;y{>fT-<3zS34gYf>I(d8L0Q+Zq0yOBOe&X=w%5|SXGo(f6 z0Sdg=h=FZhT2@ffcOA>TPFDD8nX^dF?vFOMCB47&^>tcD%B%BcxKuy!Gt)+#YlZ0w z|A)9hGpUcrVEK^lY4M9LU2|PCs*egt1Wv^Q&Hn{ce>8cWSyh1{hQ69luhbzPV@CYs zUmvR0_Jes9?uSqV%|S{^`8UfkF>J!3C<~Th1XZ{jw(ZC+2JKY%ZlpG4=(t_tIUz%6 zydIK-!1Na7jrJ0}?^F;hJfcL&chBVn2?)i6 z-WBGIV)3GyR?Musg*P@#Z-X@c5rnb(YqpX*%gH$O3{2Q65Hg9uWmOJP_ri)}bpd_# z?Gg`-t$K#U;M|y!+k&?RBH?jOyixW1Z4_QMWcD{4Ed}o&uofQ{Ph`cu6ODXapbVd@ zA(1>iOQBb{p11n1_sjV5Jm5tg4jn^5nSK zk~xEMg%tWyqtf|Sas_bbtKT)QEDOnTDOZFE zhFgd(p74pq!cp=R*r3mDm$L|S7IhiSVEC7O|9tzWko9RZ8L3&QA|e8%kn^+JqR^`0y%DCAT`BC2K+^wu7eVH9PQ%l-NaCGG9sg6XArP{0ySepJ9H7JzliJ z!lfmWpRh+FN&#IihM>%1V*f<@ktqJoHh^;GYBYRDRSGkj^mrjUfkkSR99p> zK|hG@{EAjy`4u~{lfc8RF{9eTo*7JtX1*epl$x0o-^6TbyF+n;4R0!=UD(-dE{H4N z!uyUPlm;4uRGqcCM^- zGJseV6*WOMpG_c>RrI)MG0)#7(mf64iV^qMYz%Kq;7ruOd8{V4U%!xwkb!nx55Io# zA3Z=ke#YQzr5}Y9J0Nmw>Q}m z)3lDb&LkAh4eB%bDYc$Tt7zMFTDTnhDH)jCZ6`%t?DVTuv*LAf(tmVo<$`RM$m2>5 z*^1V7T-amgsO;(1*g&ifLZ;yK=)iFrajFiWL-!{f2s3KHq{N>35$?`{Z?7_R%xYF( zwMi^HC5^A|*V6#1F0T6z3VTcEjQBgwiKg)B*?;Izqrk8eKlkc1BGo@1M#v5gO=A0g z4-^9ZvvB%_`an*wX+_QFhwrQGs8PBt4aT>A`HHkR?xxAokv15sYz_pVHNOg4dSNHs zzsYlvM1xEhR#I7ncnkGnsbNAuEi?A8UY1<=zDeBpA}Z$JafGw_*;si+{tPU_+CnUD z^gw&;F?+an$rr9N;b;NFvH)&wd{ReQTbeERMxJM2Y;GZ&-fn(O?E;G*ys8A=w`tlD z{mPC{^n+Q+6YXO~O4+%YF)Fv)8t6*f!MswF5+C={tPzRX`2+iYl^ep;cS#(J66#nz zMG}(}tU1Cu^_y(l2`5Kvhnuwv!gUM?c%G+?3uAsb@t4sSa`x`jzL^;D;F(RgFQD9W z$p>W-6hd)Ex%)b2QIoU%Lk;z2ERl3N*f!|lMts?o$Z-g@U#{DjMii1W8lHWqWhH5R z;3DgNy3?M`{J5lr@^KaxvAor=>?mQLUY&TBgvop7qJFx_5AsKC?YsIQSuStwB9#BJ zdSJItY?{+=K|yHjYuf3$rgNV1R2;9h81(0IahL3jGMlpJi(Q@kjB#Ee5b-jd-D?p# zwy*am(*Snzfuac7UWBc9FrRXiRcO9)%aZkk#6`SU>{N<0Anw*DAQd^ekSA`%?b6Fa zI<~y=s{$zlT$?Yf#HbZ zIQ{N~yuoCzE~?9~Aac0nee~Ns$KA}}L*&w@&lTKps4CyZKhzp6ao@^(y72Sintr6P z539Y|QsBZ7#(>_36;~WCphlM@_0z!np)6`FoANim%GDEA)}z0b1uXMO|B``jdN+3X^bRsSIo{k@pD1VA7GJz7^NAc2-T=4;oR zM4q_7IIWR$2jN9qldorW-;4HRiPLF0uwW%qT<-_}zUn_N{xp!%VYL*wu;~R^d(@lq zS2|yqe=z^B{d$G{w|?3;wmK~57-p+YtNEt2?pE|WCxZ0`^=2jZQ(n&{v%r-NXX@?z z=|bEnbrfI^aHyZ4g(8Xjh@YBM$Fvf8GFj6c(hHxK-LeiU<9<*5;%zYV@h#8=v`(E6 ztU)Yi{-2wdpZf3*F^*9@k#%sOIAx$3$UU={OB(fH@|wf6l5+d4QP{1EiFJ*>@)4du zZm;$tNtj*7X4w^GtoySQfo%?DsvOO1VSC%A-JQ1;kC3}>;{V>h36>m<6{&c7v`FK(<(xQQfSR;_Mq^3Y#RAT4Zj(8u zpPf|HKR7RQ)SV;u{plU)WOBbj50iK>$v5c714yuxy~X2YE?WaX#^?Zf57Xt=>~YHKH zu+2bmgoqrQzb2^AnCAgiPPGpIW$pj8JEZNKJVJ{zlcQO=H@i@3pi-m?wF;fQXi|RI zv%jscV>`McN?5zOQ@Dji^qOv8JZe-w!gp|1%x2lceCsCF*-J}qihb6%SgU4OjXizb{%%f8ZdVl z#8-kC&5SxG_`hr}UJRxqlYjBzK7tKo$7o0X8iu7J%zhWh|4)IM?wLl7msbp&^G>pe zcbem56^{J|Wc<2IrSlmm3)MJRED|?M>sOi8(y~eUwG)zloz9Fts5mM5SPI!mky^$D zYK_41EflCWWJ2IEA)l)0j>3U-smNAAA^+VB?~T&i8q@AFNS$lLQqcpZF58sRJnK^w zw^J>@MFjsM<`!cBE6h<0Fvu+0PZm8J)=7kKDSVhGbkzh(PaX)1F{h^OfC>+6(A&Ne zbls+3<-w$#_W_0HnibdTq&>_+N!ET-24Ta%WLIjEHdT)&T#(vA{WqiMPP~2lq=uD5 zGt(Ngdk4uTKug^QQ?QOO2RVgCU|-3iO_gyx)S?JQPADUaC8X0PAugWak(+3zQ>M1u z?tphwGN&WPF%Dg~1nu9tW|C8jh&FE-3jRA%^DFs8IL^S&1Q#|>K1cPutXbIUnm$6E z7AB>aeHI#OjkWo6BQ*RcZpIokKanmDxhVh&k85?U@S#9I|Hyu;oVyOB<<&X_YO4Hq z*K+S@&L%El($Ezxo_1JGTiIXA>MBVc2;ZTsv^ zk&WU&1Ntzyi1{(lHP5Y|rSVIEQ9Pl;fp}#p+w_T=bPJ2!TGi1!9Al!o6iX{=h}fCS z{ApJ*&RSOVemNpn-tggo#wbV>W7LIM`&x`j&R#m2ojW6i zw_2$=@KyVV*&PUb`D?c&wcA`Xx^K0jFur9K{jpSRIb)KP_d4K@A6fi|UKLI#N_Q2M zrBtu@jHENuyM56V2}vAXwcHm{-lnGb78_U=1$MTYsc0aZ9tYc3x^j^_?W0C53%)KH z-kHB@5C*41F;iNF_Nxh>giw&W?Om!(_-D8(p7L86!N*@ZO;O4t0_>{gE*hmjfpSMk znsQq_!NW+ZsyF+vv}B@qA@=45CsQo*J$9_TYW)3kQ{?PF);^?vNilv4ArOd6LK2ve z{u3y0o^f`9CZ{&NvqmU{nVM(+K!_>Ez4<28y9So*kP$<94SAgWPzG4W4$RsUR({q> zj~2xU)Zhk6D!@{m{xd}UwC=JOou-Cer&V_$3Yf*G`nX3y}d!M zk3qpYjd=YI_h)Hmm$eipJGz1NuJ6Yz@93a5JN6%@X2n%BNhGIyE%zf^s&~F<6IK_T zlsX&eJcpm;MZ23FQrCW1n9$-#(<~a^VX0g@?KFr9?hw~%(R6{I@;BS+lB!}D@Z04d zWjZ?*Fu)@YDEaq~9YnXsD$o>m+GibwCJIaI;upO8ds5TVcwu3d-`vsX?AfTNrM;Fb zJCdl;^K=mdwFyhL(%F>3^a|GPVKARAPj+!2JKy9d#v8xkvH+EK(h?!u#_fVCl157@aO(3t6Jn@ zpWtnj2L9)Ed!E1YH|&@!8Y>W!idiwZYqz0{37nwnlY z^ANCIzEej`5E%Na-YB|SMveO$l<2e*GZ5UyTx|14e zFz4I&Yt6G6b-IEf7W;ZF>uXWz$9C`LKwYP;Hr=Kbm9$RV?-gcaG~-Nk+Fx;kAu#e| zQ?Tt&{5eULsQqk5>Ha34+x2utRQlFs``;#v zpd@4^B-9AF{v4O>fmVt})SR~IH1J%x#eRVw zWi{Y38gv~qofa%H5v&-O?xLR|q9+KlA=0y72^t3p@p?y;U>CUIaLp<8k5R`{)pNWi zLq-E4(j2+eRiZfm-0iUKIRsPDrd9cZfs6g3F;SjkmB!QhJoXO$@!$=MTWR+7(=rt-@{goSO!>pOGh+fSdm8vc)^I zTkw!~_dn#)Y(zRLpynw{PAp7bFsD>U93|AVoYB?_N;j_lB!`%CrAVk?o92soyNOoX z3WVX@Vkj-7iFxa*LU#^jH*>68!)@CeYez{@U#rCIV z%xvRe_(3b~H102o$WeS$cpj+qlw~4)@Gr%9uiwbIi=oT zrK*qAj|bul#{w4SH61>18j`jaZxp6!&$=i<128BT`60B%%=(<1VRC&jM1rZ_oi;yJ@mnjNkO>JgEcN z%Sec#8rbQ>YQwQLKDeW9-(EpX1G8IzTQ@NW+tDT6NHM$kNCVyr_bLiG(m$W_K zQaWO7`#sX%w|{n^Wf%DXgI<~bMAozz&Mq*`0&aZCmYexC!u`BRV|tE61TuP0AO9db1H|9fa;f*c)PtX6f-m(K2n(05g~DFU}Mu3QR1_ zbC+gl3XBEeNkhE0<+$Hp%~>qf@{Q_a!*Coh84S5*>)`^>PV@7jBqRjgTq?QmQ&v#Y z7MgFh$)3n88ky*E<;SNTDU`YMy-JrZ3Nks^H%{5iQ%A~y*Jj3aR6FR~2?-Wvv1C2= z&M|g%afDE2xh&qm{B>y=yUfbG_b2u`$6w>yJoUHiKhEk5epMyaDu*8sgKuSe5C7`p zgRQzqywUh0VtxskY21TanMBX5!OjnSyME256?n51ba|)bGeHD;f=cF`2_5%jM*qH1 zU2+?*QJFxr<8^BM227|lBH7Cc5gZ$7v1(is%X8*-m|^j5PkEK-TwiLAb7vk2R$zZS zOS>Vs*Ey1RKNZ!DP!go465?u?lEPKC`1-l{O86m zqV!sRj$%%&bD0U|4{KZDR`KcbELGx;o&lByamY)Z#Ls-9@~E`vI47($|DeBta>lm^ zSAJ`xAPTJqr6+ic`J7wpXSt~t^bl8DOlPjoT6N;nI^2!do-@paE$q~v*Nu=`(d?hQ z6dBYHX8k3iuOTPI50JwNJjpMjn4H?`1qKAYI)s{M!pr(SEk#nwig^=3h5uDNO42x{ zVb9#J4J^g1cN`=TPYMX26IO0Dee%%PHfaX-_}QJd57aUu4^5P^0{@N+Q`1jk%+@{U zF_25iuv4>-JF4ql2OjVmYkitYs>0(PpKK@B^B8 z4C3yA2*97}8xYRvW$s!NZv%hqlmx|8~fvd0^H zyOk)a@)FURiZ1ft$RE+mjH}G2O${{_?!#Z%W$3#KI$`qccldT;{cx*q=(F}K7T(#X z1`@ujJjEO#K$W^4fLc&^P1loDHyNoIoZYTUla1sLmF{Gu8Gkk~EN~%c*zjk8BiUo( z+VOW)aru$aR)5{zTVxuRw?1%$L0%LR;Aj*Etx=G}sv=qmUlHgynoZA})z@x!0rJ(? z?CiR9PCaQe@tEyFRD%J7J@)f?Z(L1ZK_=$H1xKDo zD{PF0@g7c%*~ofV71PwixZQyL>-Yh=!ZgxJU;Sa_I*tA@+ehmFp;FoUwwV2W-J|`C zvS;isDe*cd*cz}migtE*!YdhfI(B*k%k;-Aatj41?-A$)x8av@t z(A^Pv17nHz&lWUgzs}Cw5*BA))z>k4I>_9v3sG4e?kJkwUgR(%z4pta*_f;8l2j7!rT640 z!@}-Ic6-INgNy8>u~i3l%C6b-8kQJl1qU4z`Mw4$q^qxq%AEPd4{BsJY8V?+H-Oj% zU73Z`B%e4E=%Q@>bTQWPa*LKPtdDGW9b|fDX-7&ti?rSH7Bjn#Ak~jO|(O1 zNt@ZCt1pRu(Ay4IT3?b{AE5XyHd;-W?F20F+s=98DltVNXCUiuCU{mM?%|p1?495T z<0*rTz0-&@cd*m-?!nG=FV-mKTT*nNRrj(VgH6Eh;BC2I9OiF1;fFd!zTl&Ra%j`> zd$P|oXl>L5b%NAxg)?{gAQrpv{9jd~>5S~ljQQSRp91_GJn*k1ZRX_BxZNJ6fo-i# zBr>MH*S=+a_W9Y`?N4>Owqqq*pHK#(RA=}&YOTF+vjVf9HMP3st)$MuaF(~c=P=Lq z3`jTk(y448*R6SrB*RWpg`l+p0#bvwbLQ8MeDQoW?#s( zuy$(E%<BxC(Uo~}Tlv2D^qAk#e3ThC!^*!KXoM_c9D6y9 zaPyhhhj8TBde(thV&^%}9(+w+;P)z@=dt^84TvoH4R}lPSH2QH-SbQztwA~XFvB&q z7OGr4$zP!Ns7j`pjx08M=cOfkPtN5yVrE>!Vt9Cc%6GfLH#ddZ+2+w;Mu9tOVZGNh z-l?g}WL|ft8@ixK5v!|mQ_?iINv23zRlPqSbhamLA)0Yr=P<3U|MeyPd82<;yl*lD zE9%ghscez%fYM&br7n_xu4jgTz(zmrM)W>PcYoosJD6zwTKp}uQifUN{%?cy?^C^^3|SCw z7Zqk_-z4T}a`oW*won{#?^dvH*mlLoR__*heT3>}q0{QQ`Y8>7EvmGCj(4XbU_DNz z{EU6SicoLXic`yrAb{9$Pi>y|{16({8~xt^T&JpHj0_|DuoNg-nAJdiU>2O2KpvyL z7-sH<_5`_nHH<6675z0xg9W#;i8WQGmUcS&)keM)ec)Q!hzwO4@2G+Ee%p&0Fa(@a zJKvV!5CKCrB$ji{=auPnG4ZYv&ZPMON0khU;h!n0(`YBATe4uv;XLoK>v$kAH^h>B3!U>93(X$BrVK z8Ax_NHFN`q)T`Jb#$v2dDX>#ogi@`PB!ZxpjS@qlrx->|L?Rt)2Bc_LbFm>-?1}y#;_B>v>Vl1*|{oh|N6hpKHNBcY( zao=QK@GY}@1p>5IlBe8)lzK$Ki zJg}v;=y>g*<@yyQ{IkX=ko4PG@&YYx)wt5za@-XtuC`e>tV%-JuT2wy-lkeRe#~KR z5Nx#dr#An-KL0;ngytD0pGu}n#(jk8U&J-`93w|dc)7<)cJl?NEz-K=y(_wQ<7PqW zc0nrqln4}AgC}_{sF>eXiv}3toz`a=&9V2;Qwk&OMEvg`)c{$FkZio z$fKZbJTdyldRN6x3Q1(~HD^r6f7x7+7nuG^M2`3&bE6HN6cwI!P0gBUOV@OJM%sh# z^arALB#w3a0&}n#S6Z8}@EgK8kfObgjt7xVRtd;!&4i6*uu0ozr|J-x<%M(&8dnxx zK@5!#8_{vy=pI)wssBLsvj3>0{kx*;^G|+-L1BrNa<0E_nk!vvRhfWM2ukQWoq5b< zyvJp+lC)j8n}*c;zMqD6X_4Qzy=_EundAuh1ZY)R%H?TgFUMCTxfl>h2_~4Zk^207 zY&9LytJmO4JkjQh)H~9QUdbPCwlur=NIONskQAG2(bHhx^cHG)L;U!9np0cNJGhY9 z%TIl5d0zwmSXT5~K(c|fvhd8tTzp9yF!mir4_yQ;kUNVF%z($iR8;45$h8gNsm$X1V1j**GZb1EeDOa6~h3jX_SoIN%$oS zoJBjBnNFMbo6&o5Y{eiGU6#UfFYPV=iv92!?ZyLHw#76?hDWWvFZW0VjgqyJl466Y zya|my+wox9_DDnY?J|MZ{ALv0Bxw=UVe`}bCJFOT8Z*j~V;>P`ms0Q)kiN>ay)HKh zrEXwCe+E$vMfi>h-IR7)tCKQ-lAZ)w*j9ewdjEFW#sEi$cOvpPpkl;)JBPgFY#Fv| zu2Nrcm6m;5K=i!0=!wYGCKvh7ekU~_C4;gL`(G;AsGrI3N6iL2Fl?@2<%m5^sM8-y ztWs#Ulwucj6IT^*!!N3L&d^E`Ed6Y$Fxc?aGJl$rC@FX^iL97aTIhdPaE%q*jNeUg z-GDGQm?d>kC#%AR+S=OMR#-^eXUbF*xqNM7HNV-{bv=FN;S1#R+xkW%%ikY=gzTY? zf2@@Tk}y+PS7Fw$++$Frs6U#U>Q?*sVty2HgsUpp`(?_>9&F>0ts!9#i29`i; z>7V$L8#UrQY+A?p$Nh5g2pTT%H-0O~miH3HG;i>S|t(~`xbmr#s zPS&F(U*SPVNX{uA4>yakgXw<)G$dFvYCcHhbtd^Tv+X1l8pBUEKTn+_&+Z;;r%>lJ z6y$iwoIgoh<86CVXl0B#sdX|xdcZOpZ9T&^sk8r`|FgD5l!H!Cs^2dK{w5zTtcv)^ z?-Gvlqka%AJq;I8}bw?LGQ)kZ%{%(OW zS8Hx)$YKk=ZoE}cJAH4DlPkcH!8&N0u~} zc22IP7Qh~~loAbKt>bP_QqCf5k{3XTdf9vo@XCiNtx--7tIo))I6{ku{E)vo;{1VRw7T z6wG~`U478X=BNan9TUkm`Oi?s^2ca#lWh7@h&lJbzSGvuXbOeQNiq}oiA`<`urPRn z`rdx8O=?3u*icI@z!g*tczy%c670>~ZA`|T*hYT? z;?(caV?F<28DG)37O5h_WnRx7y+xUbI3Rnf=3P55%YLT?@x#yBig?v1O^H%Pr;q1y z+ElX2vzM!U_B^t%IM%3ege2VU;G&E08o(|u;(Mp+MrG;cS|HI5k28=_j}VmTHxz79LXIeCZDzJR@?1v9b7wzWe|nb zD2skOHFEIjL$!xNAZerBPUWgWt+tA$ zk?+**QN6Fz%74cB6$rsY|ECc&bv^`ocuwgpuqMp?u8mN`Nxc~qCQFsdk}bDVCS$Sv zZb3|87KPU}6a#2yjN@`Fo^kk9tah4iLIyQjjpe<;TIQXMChz_&{9EOOg5Jd{*`k-U z{_)Tk#UBRh1q!GA^x?wg(C&um3idmzohiNxe1s;RR}|>U6(Xp{Xps(xp#xW(P4X2u zx0uy{QWNJjxWzKh7%HbgDF&Idg7Ob?1oj!VI0rTwUUUv8l1-F+fn1jz5Y#{RZ(z5= zG(K>Cr(9jQa69V*>4oTRla1nEH63iFI@R(%3VVS3*+^+*JGxy?>=lV% zW5!r1T%U{YqkWxRb+M`A``Li*aMQ`t4OVS!Lz%Vj_a&UC_>2u5q5*Vw$u2$#*2${Q ztXW#)W1FX0_Np3$eksGgTeIW?NtIb1ckw3r{P@LkgvE9*S^0f6m3}57ZU53t$jZiP zHfanz3gYYV4yEZ;Z%u@LxTciFZxV-JY7#DFmF-W~z+qoN`R*Tj^}vh_Y(Jtb)_mUZ z&*kd_D{5y;-C69b%??YaCo#qftkm&$gRIM{`SIs_(hfF-e^y0pD8$w9ke1Fhy_NGb zk-hGymethETmVjWxHXR^TLw6uC%n|-8EPF7n)93J5YsyI`~f7ukQ(;Ic3_Lf8i) zPGq+pNU(;*QiXPl(Cv$SOJ{u8owO!OuFUq-xM<+0BY%+k7?%+`a(tJ(u|N`+o$ z?NuNtyd@qwjukG=YHCW7G|evx=P?lA0pZlzN4bpzOXb*Ye-a80Xdlw)k5U>I*FV5N zEMq4d`0K*&Vsrkt%i=FVPz02G-o0{qb1JSe(w;!=s+b~ZTwk0t=eX7(kTS|()i}0P zj^e@EYK-x7kM*8!Stkq`SeX0O4*lJ~Jw*ACAWF=AFXbCzvl`d`3cps6w}y{_BEa0L zf`@)zS@^tnC<~k3gZch+I{u9W%Txc#hBQ<2a;1$OEpSsAwAnu4uRtgRgru)~{|ln~ z*C{uS%(mSSc9R;5=-M|7LP7uYSwl?f1Yb3WB!2)sY>9|-bC%Fht1!?B>(Yovbf z9yhN{GP#NXy{wTZMDcJVxaYL)JX%vb03K9LyJvk(P&XV1oRP%Ajpas6D^14})Zzaj zzj=tH@B#b=BqXXw-k}4QCcj0^EX!BI^AU_O*&LJWnmH;RekCXKKtKHSY^>pcq${Ho)-hf`wWaB>gl`hwt zH|(|fhhO;DRo@3*i|o{Qv-4s7I8&HNX2Ig?VoIy$X z&{?45^mH}Gj~=Qf<}~ik`Qu6^H+}BxV7UGwMVU(^`%tsiAFXRY`9!cKJpj3W0Ch{d zFm~%w#8bPDJ>>iOE8q3M+D0HCY|u0lrajXNMWbb`EfD_|B93)a6A_&%!#Fk+T_AtS z>MBxM%L8&Mc}q+h^D}7SI~H4s-j_IeUb=Vt@MEas<~8UW6U zzHv#;;s{3H>apXz<*=;>O=7m1lxwLno!o7vbNCsRcc1x>W8Ryr@n1mRO^M)qT1uz% zPwIUmS^^M- zx|;L)HB|(8(iKH&qo!f4^by8}UZX-7Cg+-(3VSt{fCj4zroLJO+5GIqz>kU}Jn@(p z-roExm0wqaNho<|S`rgHE~Zd4xR^pbReF8EEXd_9grdI(b*sNgM>+s4gSHSvrYS_JACsmnb3?n};})ewk8a??0n=kEt8KsTFkrar1AY7WfJ@=t~7Y(xEyjHlYI$Xz%eUA5NNleVu@CXt#N@<>WCKJCrEKGG_6F zySily2|)DQ2lD1U)`LgfHXl2|Xl44-U?B_AODj;NcKn~C{ZJnpohUsT8}|@2An`@Y z>RYSB7t6UMm*b^eNRm!#A8ifPwbD!J*LM?bB5X&J5z$CLr($wf$Tdi8>dT*QKb@)D z+g3Pp+YjxHINNY+Atv)U%H(RtHb(h?@qxp{DGV!7(DWt7X9A-HaP_PS>g&3oC%1|A z{zZ#bffqc}CLenbfQDMDAvcWAr;XqjN{vq&pWdyB2oy3e8@T(Bb;KoElVK!8;#bA+ zJ3bfCu5)}r#m$=$L0Rfi~Y zvGB(EyNd8@Qt72I($H0DD34TPqO?5S5P(IVF(H15J~2>LWQ2sxu~@jGzzUXK!Si_N zQIcq{-_(I<=KuQ*iFLL2niNV(>WBmW0}Xl3Apkes8tPBEbi8WodaI>SCW|lr8u*N! z^FQ__a2^y>;s%?OXl{4&r^zJlSePo!ra$J2bIx@rS_8ble)Fj>c2)uS;*jZ$*-D|I zxbBneMDwu`NUG8cAtBxH|Zb=kE7RYK|(_!?2ob4knsOqE74+X!h1}Vw}qa# zuR0w#T0tND;Q}ODsV>f*)z-#^7Cilz-z1qCg{Xm~A0M$fS2(IdR60{qQ=p6eib{bW z?F8Av1d;-|3VSXpj1n)D&deh!vH~)>t{LTT=CTHRzjjhREkUCS^T(FL=|NlIbkF@L zMP1j0z8*ZtJN>`Yr5W-J{fHN0nYiy7eZEGP08#TiWzpZ_Vh*MRU39*^}hE zs?&zZB+7!{fRyB=>Xy?im?3!lwPM|!=M5I)apxuC0!#7)Ioy}(w-hS;q_xmG&z~nx znUg?HEo0Lij~7kqNr`wLlZEby5uq5`D4(gsFF^{II6gzAv5SwnMHm1|S;TUdMusXc zfs=ScMp8;3>PDH21QXLEu6`&8m)J66dVb){f|rpk*pi9~&;{Vr7Y>v67<5`(&U`W0 zi!0-6Xkb;5DtfXMyBhVNWAR!hWQi*k=L@Ze7-!(Yxd#~ogk+d(4Z)J@Gzu`y}OjiOP%3pl!Ta8+jYl>B6nHi8>L{}o_ z#p2sRA?-{0m3_Sz@Ev!1#&x_cqeIx!kTOv%g4k75Y-3vHm<_i>?7E1Q2dRwGIoq$d zFK_yq0m0KGvciet?|rErFp{n5OYgpn&_|vVXQe(P{R}xhD-6l)*r~o`?YH|{t9BOV zo9ujZd_m$?43ncq1WOF8;o>{ID#gcZjQqBWyc!QYlDb8QAPJtoJTV6dw}T|aj(WrSC`e( zo&h9nNEJ4na+6Gt0O*}$`l83rUR0+?H`BKIsC2B%VQuJ|MfjYnFHb5}Wo{m;%QIet zId)v}{-ofx7JZ%8$?b#L_8SmR!$sNnjdBTRicfD&DlemHHB)B&y)F#nB8gY+$<7gl zW?&N2Ld5$lo!Yj8g%Yb6c{9)iarnQco=S2`InLSudv7;7rfI-I(EKKyq#y0xPNkYn3!$9PK&?1bnAy0my>bl z$?SNlj}<>lzzc6aoZpy0RtCD0Pzv#1WMClTs;6xr3XR&s6kdKryu^>8H>(8EH$uPO z2mHgl-nZi>xlyrvl>bL+53o?79rr%#Yt%BYmRUwVCU{BJhu2GHhLdRa1SS@pGu;lV z&e<5&%36O#rt+agUQD!vH~d8(GQxfcNc%!bDUe`Mc=a0hVr&@>=8fD)GhE0@3~s1f zCni|KEhTIbR7*s)Jh4l7D3f4a_|}64xr*>{N|6Lre~54bHw$lR<=i}m_R3odm6s$B z1>RFoeIzmK>l958@Xp}l89m%m=q3q@oNec^k*3Nx$d==e`{t0wPNW^t?r1m3vg4a>_u!C&ap5@3jE z%0WVqdSpRmtpTBwMcNRQnEre({pJHC5SwIpzcP>usN=^mVKLS4;aiwylhs>$m_qcz$zq^OzL~;?mBKvK5 zF-}RQ7fiV&eNF(pz>$)oV^I=Ms7#WA#fOlFJd;+#e%&fE<+7c_?_?5Td7E;Z8yx|O zUv54=?3dP%s^5SuIIo4EFNeKSeQLc%02Q7NHBAww9X;7fwcdDJ6voom8T0Hg{go7RfJaK`8X?}#}20wLUTHQ!}IHlZS~B4X+KN?ux;92WZ^RUIvSwVMeFSg;%m7CZzRv8t z>J(9KqLX%gP4boMc29KEl^HT`A?3<2KNwO5Ch)`nqTUfNhPp&r5c;ps%4pxMG81!s z?bnmnCJwj(2tR2*_)&(kkkKy9$p2}svi_ot`^8KaUx@19i+Gs{(-oP1GR-K)LJb+u z7d{&V+9>ibz%R9o=|EpB06&!yFME*?(}khv0|=N#SSe8Arf|-F2Uw1{iL0lKN0}RX zprNw#sQX~iygH_c=$CI!lG0E&Rl+B9K!?ho@+%_A13%Ypq1rbfsLfRGBu(&90K z>!W3Q7*B*C>Pko5PkN^X(2sZ*qt20XS`R9P%k~Tpy>B{a1W2J(qBLB$}Y3?)Tz&w3F*n4ik^*2vsDF-Fw@ru7Lf3`653a zGb=P)Mt-sV#oE-&?a0WgCUv`e3A+08UygAj=EsR~4K<({@+J)Ha8QXX+bH^sxCt(- zqge)4$?ilQf%^O!7zmIhnu zZM(&@A-mS3#pFPp!7zwtWJuxhw=Q|QboxdNqqVoNJWmTAfTujqivZccN1P81$n4`R z9z{$h>TEEE;ycBukZf#E2n^x&t+5o5_WnQYy>(C=Z`U?B1Shx!*TE&Ygb5l32A9DJ z!8LfWK+xbmI6;GZa1T1zAi>=&0t86{1j*NVe$Tu0KHu(E?f$o0^>%g5J=4?PeV_ZB z>zs2PK_-wSOk{Yvc)p02fIE5o%M4g8_54it!Hr^j!dJ2kau+J}9T90KLOl`IROf#U zYMn%ZKMk)k(^X}mDwI{=fidIy!dr$%kV+`qd~x~+FP-;jdAo&&4v*c zN6nX?W`oXF#{#=xM7$N{u7bwM>ZK;hjf4s@CCC%r{^y|S;N%zq1Dlsu;@aAZ=xGUq zQ)IC&5=%DA-)MYMda}&m;1flB6;hZSp|R_y_Z;!nI4+cmN$2Fqo&sw92lNROCITXr z?aOpmvO?o=B>^(-B-STkvKpuFqi_ptmvX<&Sz-OU_AM%(MA~=g7e{C~VqgE07{K3G2X`Asm zOH5Y$l&8t6P79Tncz;)M_gRdL~019F@j2lAHYrT?nL@Q-*zMtJZ= zvD~}f0E=)nD};(x@YIXeAEzwyOp((tRZicPMfF2Lh|Hu+38kvsD7J%P8PoeK|m zvb<<$T)zAoMn&lz4D;f|@*BA*wiuDr7Zu&oOr4Jge>G0tP5a9HmYBf~-EFf!RtqR) zkw{CSpJKdFyqkQkM}bR&elSdP*kAP`F1wL*FeTKRAocWOIb9f-PcLqG{!!E$mA_*) ztC|?g4-)k&xW8)PxN=W_!R8cIcT5G*Wf_SR7m~m)4loakt--qrRQW0|*N!Sgruklpq$5m#12fbUyNF~y=$zAlc9+aPhKvZ3eC*}Gp zLcMft`3FP!+wFSS?i=*-XR#BI^cgAoqZ5~`#4(r@pNwYg*GofHdqp!S9&r7#=-+dU zKQ9!Wd)I?i_i+jx18P)Kfc02T0^^xuOo|Wr>D8&^VR~^SnTI%*KMJ=%0gF*sf`m6%R>VEh|q7_GZVrWb+v@)CvO>H-{St z3^YCIew{S15<+#g&%)Z`2jO%He9g+?ln0#q=!0O{-J3|;B7FJe$!|~ofWD$@I+s* zelZ{|k@UO1J#n8PV3IzahWWZxc$ik#SAHPwHhM5njaNIkvH*l7i6|~uUXRAd>(-XDhZOEo`{rk?TBKySd1lz3ki&-?kS~5Qs!L*2}J* z2OmXtT+UE_=yAc+7X@ezS=oGWX`;5YNXJ$epJu1$;++mpUX){PrEsQloUIb#g0hDi{BTTmFL9`6+Xp*O=lF!%FdmcSGDb#rV73z)dR=fZFFkC;T z8T<(k;a<{LyE)(r{uGNSX)AFp);ISnCSZQwQ6oP)flk@M;a%dVZ7Vc3K?@~%KoLhM zKo$BtdPJ-I+=}&Wdq|s^7=E!UerZ8sRLmB=f<{_hn!UUb4t?KN-6%u;^X^QgD;OlxoiKtxMWv?Er2On)C#TK2t?OGuQ)K*WktYnCI|M7zT$LEzg z$Aw3b3_nWq6U%Sp0%K<@pV5zn_ltg^@b~)q@dB(Sel*t$Uh)t&~(E;wtYMUm$Z}FO4u3o zF1Aa_q>BO`=NV^ftj`e4HST!98FoQ(oD;;gV7|E)@9l(l6J8y;uRSQ!fyMvO)r4C4 z>iWjwZpRxE!>tD^O9*Wm(B#^&V~ajRIBJf0f6-CR!sQMMqdR~92eho+Fz}=2LTFbh z20vu7EuxiJINU#NvLh67JHjQ_xN781(ojng%=|r;PZ>`g28ELbqDOCAu;|g@{1>Lm3`YKm*CG(XQw!; zsKEfGmmS?>Y0gkH5p2H*!QRN>L&>B>n$zUu zh23q^kn;;TgRyh>rvsh$)F$ulMbv!XTbvad&$^dy5F#Tecu!=tF#qCvSk@^?bZxUg zB;VL{aWcGXAG+8pOUbbs#lNH++1B$C;2;G5%APFlP5>wb%buqg3@xaDY7d8S?`QFEa`sH)TICL@z1S zH@JXWzxd04U?egcbL6#Ijsv+bho9^U;xTdpnw?`r8vdAD`oainf+${P0j8d|Y$cW~ z2*9TZB4ww+j59tw?}+|@5@`bgVgSy8`S{%hy3h#zJ%l<;5}|Rfnb-p;nL&b!l?#-NbV_p%HPm#DJ|?!@=Uu|yw$tV%NivyT!0HS6lo&Nko;5M=H%qTsH*q7o{g%b64ekHU#m2 z_8TtVawxvYz@#IVE5v5O*}yqF`Y^+yGd5E-;zvm};)lD(V9sRVPeD7f`kGbL#4Cn4 zpn@iYk+qR1=IRyFJzeC$@qf6k}bng@ILfzQ+P z3lx9G)})_ntQm*}eZDa86AbA+Og$Tdr;ObO@+Dt-ELVsXZ@dwYVr9b zI$D&O>$&Pq&o4v%^sh-a9RJxlE8oYoFBqb1eI6m#uW|l)@H;JdTC{j4zER&aPn=>K zAy!y~m%9~;3s{Dj5%Hd`UIYO%N7FFPT`qmiG;7f_e73`Y6D#|quyI+#jIyn+>WcoJ zIATFo&x$Er=a-=pyIPf%AU!>DJOEz=f$)QuUDv*G?5GM)PN}b5@R3MM?Amgm*u*QS znih`V*i=_=GDu@#RGbS`<}*u#>Cdhv(54yz^?NA7Wu;lf1vYS)4#Zf;H=*5cNyxu; znYrA%D3S)s(m9Jd8Ovph<0s0;XZtb@VzWpP=p>muE%3JqA zbmiSsqqGNf{polEHj!U~4s4cP8lJjT>p36LeKaq2@l!?!;7Y|tq8}VR(Wu7Z_I;n` zFibX+qiPz(%C4J2M1t3sqH0*TQy zbHnS01?M}?f?JR3UgeOX1Tc$r75WQoNFShsBBKQK`Nu1jW_hnlXW3mxN4thJ0_K`- z9}fnEg^cuu#6P~X`LI2Fjy`J?x>~ex9Wk~LH%~`VAluweJ{O0w+}O@0 zTz#va`wQlge<5`seE$uzkf)_jzdx4En`OAnDA06B^hbT-XzG0>!Zb(HMbZ{$;)K^b z2};;j+L{c;MkPt>JU%n@STz4I&qG%a9}q$tCZoSOw{|2H5h0M3)5q&AiCb7CF<{^I zosfx(i-;Vl-bF?Ibh(Ree|9z#r1`@B3fF!4+%e+YgXAi)o2$Vov#AU)-9ZG-r2y zcQ1_m>}KoQzj#jI%NCc6qzxId$B~*3w#e+wS4rjliiTJpAszDbxD~1WLRsli_fStM zggokj*H)j5(gu%>De$0Ya`zX6iWA#=k5)LQ@Dj^Q z}@p$F{LYl7vX_V`(Mwx=SxL`7;*L^A-Mi>mi} zBQ*uRu#L**tzRh7_${!JD6iL!m#+V4g|06XZSvV1>7XWSi6f>5K6m^TZvbA7>7}OZ z!S};lHIETl1Ww4N%Di=IAOUxMGcH?3r0subDIq?;F?tM}uO64K5_+#V@}8#1vBZy; zom#6P;`g~dtU}5^i$Bmq#S`j4HMd6I&0TxyZ&VM+fI73$vH%QVUm~OP4JuPrDipEK ze8Gav^5Mhi9xK0lz#OAM#)O#Sy35t*0KPf+5qmMqiDBvf7A-=vm)zie0-MqCy0y~3 z?QxuPrgM^v;FY{ixAfCG9=fG_Zq6zx21PARKCbWx&?kq5y-Z7vN!#^Flv>-!D4dj4 zM;4rJdynSlt`JxM0>iRxikzxq^HlUk&vpGydSha{1yip$!E#SnIFF?jr%X&L)kZUs zUT5Ha+np21H=02=B(GLAlYDJee;sS6!YK1jb-eB%CTOggqp#x$ua@eMkFJUNMl?B# zk^=)1D0-erQ72@ZPS#?j;VT2i8q24QhM_B*@3U;N$kR-sb)JDTvgHO~G(~S|LUOqY z2Rj>*3)6-5URhR`4OJHt2NY~ubRc+snD+4Jy^)cmj*q?eo$roK7>>zXikH z7lGEHL29U>kTQ_tYq+pDSB$~iHwQ*NJ)t5K+*B`;P{QNbSKLX9_bnLNceBbdj>&wa z;CnSEIb)l_HBGfxI+IHuwbmwIzN;%^!vj0WULw~H*AuQH$psL%(MBhs@E3IDjrc~|M1;>Q%a&qM0nA}vV$N82*x#y#= zk~Z=P-D`L97ir1S`=UtpX}xzttu{&wgnydo zpFF`B8KjVE%0dNY47>otn}RHiqwe);)de=&;(YiMfFe7A|Ak~cdDHf1{HC~ znpppNmRA378)uU{Za=W@`^k7P&HZQEfjV)TT>rji z#eZG%ElKH@B>4aj7`W;BQP%xE4T0s)_J}{A_Jm|bO)_rXI6@7#b_N;jNV#s`q8c@5 zB1tWfxX@;eao!NTVTw5~a^oK=Y30Dov0IU9umkcFd^SzaNZkp3^Sy}Z`xdfP!PT1Z z04_8l&B`ozclE)wxigEuL?)b-vfzwUk{i$nWVJ_Rf}12zGB_`%cx^6a8E|@U3jda< zq!)aFm2Yi#NewKy!n15E<%#i?w1U{g-9LQX&U6apc276ljJp^zafrE++~sRV3YzLt zT;YG*E9#^5*@eI@i^yxo6ILug68fU3cX{pNx*&i5qblVT)7r`0X29|-<=L`n$gJgx z*@f6_>r%fNRSFe=pvPD}y%E}MlveXS>!iM-296lBlUY6#2<+xWsm1(tY_+wLS)`S80KMs!}Hx1|Fw zf~RZ^r?#sSxOag(8+@*<{g4-}VSQ8#Xnj{?VGZy8qGdxS9z(uU)Nj)HTsN}coWZj@ zi=3w?_O>~~fd_{0C(2){Ilp~89QV; zM&OjQh`|jB<%AlJI2W!1n?>?!LpcX*eMcxJQ%n~bV;M%#yR6AD zM(4J>-@B@wh7^23IYys$W(llDpGMbK%e^X*VUE-g-+kQ5`tRq^$ZJ5o%a?V8qCTFo zePR7wCWuku?pChQm{QLZ$rE?o22T)&q!n&!dGL>QwZUiS^%dnXb4x>cm%D*8p^(JX)3l*mEz3ySGxgMeFSZd>Z?}n?Wfy-{ zu$(7;R0?#_Ku+nkL=1CZZ0s!!!oR(%i<{0a?&EF~^{%%t6{?ScfKcdE7>ON{Cv-}< zR{y-+@*^eCV8zE;IONZFh0c?U9YcF%t_N6>fE`Ga@>othhimw9s+V7$Q z86P)GzKPY1NenLl#K=Pk{7o0D4dew{4>_Yn{s*BOuAAms2XCFtmC!HX^bf)lxRPhW z%)DoUMWl{eh2s$tzg==tXX{i&;Ok`HeFK66KHu8gb89C?_6rW|oN6xh@MHJEEz4|_ zBGupl*o5O}a=(5Rd2>sTOUT&RomN1ez{eBNx#-B5V!|%k3D49%I zm9V)^g~Zhy9@G|B5$b`>zhsXSyj;|-a%;m<^$V`dY2c%U)6hJb1EQh$MV@L)aI|_< zOA>K6w|8VDj)o*X4d8bP8-bzR;x_cQraXw_0vO3Fz(LRR&@fDL5$3 ztR{x+7}Rv>Bx>9Q1pC`I6DXJa+iKkVH=S=Y%aKaUO4!E`O7#-uA*yMfsq(r9;Wd&U zjSUz+XOwB$1xFW+!A8)YAYf~tgVXHV8mb1!q7=7O*mwd1JK+y)I3r{xI80u);+;YG z1KD5&LRrD+hi;AM=^o>AG9twJVY*vxJ6SuKWw8%BHO74F_oym{vjsDOSN+4$l>Lwb z?FSf@C{Ro4CCYp`op3g+>4=taj9{r?SxfNH-0E*{U}PukOH9Ewzm{J8v^yFT2Y-Wb z3!KZlbaXH=p)6*KD{doay4ELw-fpWHgh$AnvnO-ptnqn|`QT?U)n#y`jP^DcNt`;m z8SVn0)M#8ibQo{zNMs)w;9D?^i+se$k2R2(LFW`V1XQf6PL6!PT1UsqY;J$vh*zbB6eEMw~`GSc)aQy<6g#o?uZQ05SeHvBy&4W&!Q;uVz z9~$p}%>KmE^U%;%?~n|TWz1qmO)y8_W>NuFxoFh9Ivb5ODuvV0@*kybk6(=k$d`p+ zzZJ6!ZL1hkAYb3YuZYp5KxzQdnnFl-Q?wZTvpf^0n4L~vsDwlO{Lt53F9B&an>C_yPzKRhu(CV zr#o^44N{a2Hrwwrz-I#FLMrnWpu@p2{EqnB2%|D@AJ6Wrc$f^=nhrP!k%ju_deTk`nQsA|W|tpIH? z(v^~1>|ghSib_I?D}k&bEX9H^eo3TnNrQW-M&PeKHxQG%1RQ}=AnXYjPED^R>6$sN{{w$zb1stCQ9stISZ=n$5=+%81QN*aEqJAY3=g8 zwTyHD#?zSGkQP04L4DC)=Txe~RXr4Sj@HWp2~;3gqR6gr3_jcy3H!U3nEEAJ8!S*C zu#TxtTfwrP$Z(Fksn+g+5UeWkWPX3?|2}-i~3b=Zf%_p$%->7{-~a1J072m4-wlpkeE*f~h5qkkuPW(63nrSOBBWDUt)7ul z5SP0qvPEGM9$Ko4@0^->zv#H$vAbgF^4X{1OReZSOu0-(2!x_{}hB^GbENfUE5d1{q z@SZAuGAO8A?hH2jZ%SQyNEU>1K1)(K92qV~d!@y6#+QoLl zy3O!X+&s(+%ZSs~0%FBHz{-O6%w7D%po=U}!;@_&bKJwo-t1t`03k~-sG3CB_}zir zx#h0L>8Jm=NuamP+3>P{$8excI_G0Hfg#K2Qnvm_OyT2%#Ga6Lt6DQ}F89U9Qsiyl zGIC=~y-<@#{wMo~mKpRO7xk(M?S4xo!=_zFI=la~;{V*9|IfV?4z*tfo4bz$HAyV` z)jU~7-d2p0R&x{XW-R;MU3@3cSS}l*OmB132&W_O=7QA zPeWCZRPT{^2B*w%yiQ~CbMw*cJjcfT1XiY79UVL5zD3-D@h|<9L}?#+u~dmPRFvy2 z2d#aPuUVJ8;iA3&dd{Q-=I}Cx`DPZn(@BYM8s8 zK~x%%bd@zKa9oODk+GsDHi;gF-cmt!|I_@>E%=|e;QxOe;caWHY;pnMZRf5mFVK4s z40vkMWQ7=QkT-cJxART2)V4h~k}z+Bb2m~UfSG4-r`Yq{?E?{L@O56zBZkC6@{aZ$ z3Yg6f`@+&W2{^_&S(&*o;+Y8lM+}+nTtaRmCtj8=Jjl}bG{*)SYs@(n7)tVeTIQLG zK=b5q>`P*(a;j zNagqg!C{_LRtAmwh_S1akZY?--xDY0f> z+?7OcQl9=eo2zcs0nr9G6lK0wRDRD}uEIgB=wq?v_(?#SC{W9%o^G20*&cK@SC~pPC{rbbmv50^S#0sxr-cYzA zC8k!-;v~q6qq(&`;WsT}(UsTT(;ev+hpvGCeMqfJt``$sFQh8EYYSP17cixai3ojSQKxdc(>)aIYYUyfaN>;xy8*5^&^UPXQ!n#d@A!u!YJm@ z82&&S+vIzs=Sg5>`SKl6*1LBxz)rQALp4Vy5zp4NmA@Ub>c&Rf_mbQJLHb{Q8+?_x z2ON<IuH@SU`{&y~lbxMiyZD41L&?w9QIznc)bCLTvb-~asZ>+1jcf~cdTo8=-^-sn7}duMi8wRdzHBRgBZiMqn+-$~f4Yx-lhB#O!wC8B@Mii$NM?;j_vK@)9&~ z)XFp4I8tT#0)(f;$)y`pzzKWyy$!a&2M$5Z0E-oe332@=-ydXSzoPct?d}e@rTqh% zo^D@FIhRE+ra;WTc`zrxEXekVy~tFcRn`lrrVJZ%TR#kYkv)4JUep{JY&4WsiTvTM zpP;CHke&%+b+zNmlaBZ7v)gzmf_3tV2Kzzmk9RICMg^8iq;p+_@{-(E2+fwcVnG;jguadf4_CQ4)I5|MXPoY= z2=_UKreQfm@z5lvC{GVvT=sNdTr{G9800wxR~&FLF*8S~NX0(ykui+A*w*UDH=sJ! zccZpP2C~{E779O1j6-Q`G2FZHWZz0f*d|Ew`F9t}wk&vSg9Qeg-z(9C>tP_&K@*wj zR{EAO4%-TR&0(x0fcyKft-s`uLFKvXliPfDQNk;=v{=Ok?#MHj$5ew69}P)A`(9j~ zS|wrWV-PFK0ij2d#L#DS>YE!o&t`o*e5&j58ox{)vN`Y9P{*^R9#XS#sr$7e7Q612 zI{c-+i4&2R+r#6cnC658*q&37zv4|OfnoHykxsp6kagulyLj&LqIeaK_#{P44SS6K zA%2vA4sqA`UFLA>cW%pS-Fj&m2SI5^*M7HVcE%?zCcHiAWRMq!KSYa(LK<_u`)$r^ zmq-&@N17^;mw_n)dyu_bm0BU+_fg05GM1)yLF%V82PcX#A{4gXc$E;FXb=&Xguv!y znR3X`&o?R)R6(J$=y&5dBK)Tgt+hiT<{z80qR-Iis$r)SEj=3qVnaZ2DCEy-UYtE1 zaHY~lZ_CL>XW3U$dX%%jvlL^rW!mV?sub#e>JKQ~5D0#<$*)=UCGaU9PkPoPx##ta zkya2CP)3KGdfnu^M&s5}W?l1w0)K0f?&#$B?cPDcE&q5J}#FByztT#o+jFSFo&mziJIcxU&& z{UmX`yO}XjZ_U$i6hrvi^8euLkEWYq)t2LxPil83(SDw;;}ASxQLKr%zCWs?BN8LW z@~ksVU5Z(Gp&fKitS@#-a8L&EKCQh>TM%KSmBf?Q4I^0e7qMTXp>Wn1RhHJWE5CMQ zJ~?T&2t4#yH8Yvi9Art3KK-)s)Obo$r0%!+9I?&rZB?zAsi&Y%tD@byT%}>k3#P-1 zYr!AgG+$}kdKjp6;D12XaRWC!z0UeW`=c$>gZkc|HIyr8U|i@v zKH!}?8SHGg^R~6rc@&_4(!%xI#TquH*13o~(ZD(Y}x)=4j zSyEn_OBpgRX1$-z&sMubOV-!I;d&U2!lXd{J`5ZT0%u#g9Nq6tGrnVEglC~wSR{+N z$z+caeks#enu>VPB}5jq$*2EK2T`05XqT+hl{};=P70)K>d^%|$6aa~=Q_{ie{K|P zYcdzFo8IS*)=su>Z|7?71MNtEyNnQ-k?L1^M?~R#@|k;Re(4OC^C7Jtt{V^+is{iw zn4v*$n zmv+y}0_q2{V!s8VM@gbyot&Dg*4lgF{kkMH^VMZCp7CLuD*&{l33L#g*QOo0Aho&f zhnxt%ef=s~8IxP){ZU$4>OmcCl@bfxjG~QXSflA(dSI(_gX*_0lvEb_eqe8$sHlg+ zoq~9kKz0xqga)1^yR^GxucA{{5vvffSdkZEpTR{_)Ff{#Y7F(YP~oYcyIHL;{i=)5 zq#vJAQS^HhAEkK44Jxn((NY>37FGOpBtUm?#z{t?EUlg0Z#-3X&{xbxW&||JAMqhY z!1zge+jJjV3^jiEKrLmN`T6z=@)nHY!$P4@|LM8ZWKSLYKKK9ANV4@kEWQm2}ywm*rjg2iZU&Qg!(}OaC z3th>T)~^4#nTLeQIguW=?stts&+;{zdz}LZ_JC`2<#E|FG+A*p^)zads72U(ad(3} zk6plorwcp~&%IR&bjP6FxT&qBo?CvOu&|}Ub!6=}SYBe&f{2~!$1U=!0^5c@!_(fh z-1Lc`K>qA?eZMrQ1Oy7XlX^s8Nf355|84{y@1XxCAFmEe2_`Uv}C>LgRrrhqAx@04zy5U9ApHiOPik-tmq><)vOvN z%xCZ)9JkK#Q!r5w%HPMkiW$I5_hE(u=;!(sGC?I}EfPA9Z855j55J zQ_49x@aIvVb*y3gEI;%#_JnV+wyb3Psy}`7+8Z$q9)=U1XLHg{8>s$84!+y5sN>H-`fYYZoVU-5j~QFV>$@qs z$>jU0CJHv>1PX1V;0!{-gQ&p)*2aIUXy$jLmu8+c7%(pb1^NA6-+jAK z2l>F5U-EHMVzOr5v3>+X*Rm-M5}sMsCcG8u5K7WZ3+C<(#lfK2=5MycHl7(#!BFB+r?kM16<`ow+ zV^qDJo@dD3mS7$fV^zn-6EW?-S+?fTs=_m|_H4TTd0$Et=(8XFL#{mzm-ZXpJC{|< z`@Ne+N#$uBXYHgg z>h3l)ZXfGT7JQl^$>VoM$&AtU@{vuMZ}QTYMf?wr#4%}z`6aAC1|xHhyr@0j)Dn5+ zt&)jO>;y&p>S78T3q5n_Gh#UmYv?~ZSIpA!$>veF%-5tVPeL z@86V7Jvm;$l1xebTtl$If8tQvzaXq9E`(QQrdApm9m4ZKpjw*@F$yl&S?yfG>|8h7 zP_9=X#cS-$21$bUZNc2-yn~$Jhhkf83w5F0cO$fGcr{(%V8t#`W+w&K^t8}3ZuZe2 zZ~D!g+W7Y)I=jL9UIA~TAI=^sInGCjN_xIGf_xn^c!zmVZ27OOisFk=f|^Hgl78jOWKEW12}war?;KBd+@_s|Uz6*^Polk{tK^q94& z^XqJP`<1bv!~0<@BCfh;4Y(Vx%=JDs2yhS565beNCitPnoKUP2tjXdQQGicrs_Wqh z3xv>yJ1b2_VDqfW8)=qX&~!;sQc{2Mi9?BXJZyxrVua4jXR2G5y7oI=v+!0lc2xu$ z_*9W}T020#+9+0V_=mWs$ZEXGK^hmv%iD*YF;Mgfky=v^p&E}`!Xo$eEMrj$9tvdY z`RpkEBO$fntID_MI_;LK1kgvVd6ZmW4_0v0n7!U~u#LF~b1>6)T9)ab4pa9p7Qo82 z(}`IaHP2$ssI3?iG5H^m*$E zaW9?cr?U zVIuJ=GZIOi*&fsIY>Q@Rt#@z4mKuQCTo+7n`8ji=Ctwh@jno8*gq5CF?)u4BPyI8u z?)B9Q0U)(#(s|9J8z+KTLAm#!z8V3h|8(+}ZjQunMtM%yR85u$ast{-{w(?6eRzVX z;XM4!e;G)VKQ13>7pN=1JD+**OpNo242L=fA+oFxIbA@M$7FQnkVQvhx-xrwYjM3K zjQCnFHSEkv?R#yUgL~;@3TF*^Wj2hABp+isBl^n~;ZpwPK$&vGD$0iLmt4_xe?9G zh1np_(Id*jh>bOsPjKz&Ls*=GlMPl82B$b!n?H-rQ3g?oHKF^9ZEEqyntJ($IKKYp zbICInE?XHUj9s1YE~QS=C7z(k!7(cy+2H*TH@f^kB^L&nQWq|mu&P2(_?p1`OMVi7ei`eOy$9Y&6F%DC(yWhH$ z&V&Vb&3vq26|GS$kOA=a(+EO#qI*2x6&;)^w)5ym>mpEG!AnLwH=JPZ@onLFm+JKa zT{F#IOHH5XZ=_}95Pu0gC!l5Rbk$+=&T{>Q*+QYw^{|%l!i*LT6-%wXbzF605=BY|5Dw`u+(rJeRA*J0xR(W_<$_}e^{~l> z2L;tZ+u!`RS{m(ovKq=sQtT}g8dU=DO_0TFcK=Rhf1`5X6oIiAnVX!$bkEI&Q{<)x zvKG_4nd*uX{nZRkmfy3ZPiu-0s?>0rTq}-HvF_&#dSr9x?X~pwm(S@a#z+t@Sf@0P zoucx?gdr9=6P@GJm3D}Z!=eG@q>o-f+K!bbmqMd#S4W`a(T+d0r1EdNcziub@oF{T zA__G1$+*i0B4g`U*L#*<2hB5|9^B3P;O%s%SIyk-h>e}Ua@D3K%5{Di)98|Cj&qyi zIAg9v#XBww!UJ5^083Nuz{Aar>t2})irYYI7w%U#J`@MRQnfbZzZF~+Z$oDgSz=DP zXU3k{UTK5VVVT0b&f^v5B3%#9w@+NCx)6f4w%|r#nnqGjKx^X=@_zh1zbpNl=+E7e z4!2}E%2Vi&c$=(`x=75|I+bEh{FfM0rlyxl-3ur$yH~};vViKeoz7k;h@ysQkF-~u zh04wQM^?1bWxqmFe9`cxe4xx$#v%1^n2p<4)opIpwsQ?P|6DwM^bKh)imxdaKSyQ= zA^E8_P99=(8Pp~=rP!72wA?#Z_Rnb*qIR z%5gJKI0?LLsH&$?nmM1K_|uXVdP-5&yODN2{(auJJ`Ywu&k9atn~3}Cd*6%Xd`i8# z`j+N!i_vk?`r29NeCP~Ml`Jl51BWFpAr76jf-nKS5Fc%TKWyWn&GhrHG`{=d$wTVC zYRd1abFoYVelU;hY^j{aOSQiyc76Zl1DMk=^m{+|j3kIqlCCLH7MqL)#EI?a>Ywrc z`DUM;_sVN!ve0GeV9YA6_RQf2=?E_Hwh+1jd+Jch=Hm_eF0l)kJztF#tSC29ihEQ_ zFsgw|8{ZMfQrQQc`{()?(J%}BctXi@&E(XiK#0)i=UXl66nkvO_Q07z)qN$K z%xvxC4$dwcFb=% z2`nt#VfX`bm_A*;41WldJE%EA)3Hcb$csaA*1B>?FMrC-esPk1qVlCfZyh=3mtCJp z=pshjX71De>g-)Q1}GL#98VnGPin%GS+9kjj{#Nt=H(M)P$fQbK7zMa`i%a+h3|8- z%Z^m@>%O#oe?CqPc`aRvQomKTktDx{Zk%L-s{79kWa5tBcyI?4%9jlm0l>8=dTuto zJUgU|xcgnfRY;ynfSui|y-2^gn#IRR?=9CGUZ*`|DEk-DcQ&rEpI?XB-ILa#)RRrx zw@IH=NHYyS2vk`lSzhH--qCxe-L5CAhdu&AclA3ymc^Sj8FRS0FfvhRz8XgeN&NG5dlB+53W}xg>V>zTWeEx2xv}DA!q&uyK5kn!T?e3Ku^~Y%?IA=huzYYL9Ph z3LiFzkNB3AbbTZo!G00M#bn=N68Kx{56CsCI9snNwkOSXPj^Io<$>@w(CY{({|T@p zX}W60&c`C#cmPw44|wNaW#b5XHuLIkt)#}%)-^seU0TO`bxj`RRKvYs1gYidd`q@} zuq}6n)~J#2bY^9fCkSlPd3aCy7^UVwIefsL_>kxk*~TW;{SIHCdN!yX6s34nNyJkU zKdNw6r9GSmU9Ay1LZ=uMdbV}K#UdmitwPrNF6EVR+DhHIO7C^h+vD@aD`d#=MvCR+S^9zR6pr%X-}IlGmB<6|0<<*Q5Il8ne-g|TXeZ38G)FOU*b4i_+kdStvange2f=mZ*x6Mf2r5A&^FC@wYb)q&vSp@Byk$&VlXuvyQY-iNR&IBJgYI|iuWN)mL) z1RR42FZIu&dqugts18o7AO+Q9TlzVw{ae}*qx>hiRxxbAYXc;Nla7zuKPbmQc!1!$ zX5VwW)gCQ+G>MMf(G!WfYWXnOu$J$M(*6UcFD>}DQdK!pihB>FNFMJ+*^R#2DhkCB zVcL_U?W2495+LjlrPm=a9^2mTjM&(jX4Tqw(-nnSnqVnTW}s<`NAl{-1iK}^c6w7f z@Vk}a$HI+zBc2f7MtfQ^9y~Dr7sWk?Mkw`9(ri)Mc zm6#V#meVAvo>vad|6fS&f3rGdKCL{g&fS|!x}z4da3ZOEHwvHgE2apc(lL?)i@1*n z@)wq?TuezdA(W4d_L4{KeEyDKq(;%e@U_%^^*s*kwke0>a!CATs>l+-*cgx zej(w{bxWI|nSSQBn4ai7K0^4lo_6kdzOJbqqb1unq5lz9VfY6YeH|fg7vn%I3j7>i zLVy0BGd+k|kuRC8OzD0s;g5Fh#|Qn0cRQaS=~eqswo=Azu(zh!Y6Y0e?~h$wnIIcw zzsFy=nHaBy=k}0pY%{F*ls(4Km72|ne8{UtZTJ9(DHO;{i;H3vfIfSPM z@VIHXdOgXknTB3m>+3ths~x|$!&ZyB`%gy$FU75cD2>)FWwsUQUP9uex1Rpvs9CNI zTy6gy@vh4U2|yDXENmfR*ytU-aK#=yNCiWRjB0km9aZbI5$wlIR)(la8$r zcidZDD5kwbl>hG9^;;j3Py}?VNu0PIb1N5(PP-XxBtl;gz+?1r{MXA~IAoX;ibk_` z1m~H#$bmh+@A>Ly5wZgeS^4XP`A?p!lmzC!t}lpqpX0X5ikg(bDxVeN{!pdBvS%|V zifim}=#7BU$3hTNls?{~yyJuaf}o@x_xL{#j7zv|8-|?X~zrAY-Khx*|T~b`2JwL2G}Bn z8DEIi`ojuTlz`zl%+yFKmChXV@OZDtQ`jxI+LxM5mQs<;$`?7YN0Zw%H%aD9*6b(W zVM@-@87l;Leb`Cn!_&6V;PB*W?{*tPU)RTr@t*8|vG!XuVnAJ z?3vG8>zNp%@fb#)Nrhvx!F{t)%RA2hoMW8R&roDNC#^SrPX=AubGR8xgA;kwCh|En zo=FeoSX%g$woTC(d-w><>SeY?H1dw1IgGvkUi3ypU_!@WX4dqgb0OM8YM8p|Vr_^j zJv(iN#r6%6mVRkFSq{L1l2Cdf#b5D;G)}_n3pn0N@>I9H&aMyJaL zCsjzr>p4YHE*gp)nlIw@4~M`o!te|d-{qM!XNNhBnik}#m`teW_8!4l3erv@Sv+GP zu|>eq(+{VEIm=cp>gF33(xYs1IV0Z!KT=Zj#N1*(q?Dc{1(HScGqiew7>zR`mbnV7 zm)>PX21ETx(2kU(4v#HzG)-RrB73*XtlZFNt1MI-b~>SbVm1hkhiPqG&mkk0UU?LJ zJA^owV2CirGP?U8fUiLE;fe`73_%#YDKQ%0)AzOFpczo$i+Y(P=D6?Hh5`{J)%~42 z@%7T2bcUgt-qt?|%F$8oj<)B#6y-n=gTp!#W28T{H?I$+C`}AMC}mfQ*o4Gk>dz=E z%v!3Q3PhCVcELD0*=zc`Jma7i*bSdeZ)>%)b5Qy}0+j5%|{+lkJbFasimyoVECy@#t33M;+vYd~9?p98b`1 zPqAzsqTNhu;4Fi^kLm@A#fiZKTL6t-%3;FbTiy^4He1BAt{}vesuAdb@ZjjT%jkXX z222d^pr0>zYE>;S>?30!rh^Rl(n`+CDPya${fm=ztvBPw6APEK$c5jx8_l@`DHdvc zrSnqkxJ{#*`kP>sthYYumBG~Hr_X!%F$uH8bdO(s=yy%iQVw7oQrMdFTuOW9@2S5X zN6aoy3DSljgf8GnWrzs{4H2+p@|=5}ot%E;XFI^i^1y%Jaw~MJ7ZYarugHtq;SnO$RVHTg z>{gYXYl1YLjUh;A?{s%TQg+vp^d0dX}g+KGtf{0XB1h!Jyg=QQ$ z8rY#JaS}KA-x7-K(sVVMuWQhJpSxswX!dBFoMd~PYbx%rkmw2?c>lkA)|cI@-rv-} zc0K#eq`|nT(YWnT)zfHaWhRs~B)gdgWl;)EZkw5LT|xhFY`T2tS1xO~u_bktsDh3S zmBpk`NPWi0q$4*&ELqPBK6a50cst!?=5$4c6?Cdm`7kLJT*=!10%FQ;T%L;C~hHoSVOz}YDHe!TU;I-zZ&bw6wM~`pX%$ zelCiYM?_Q&AOdN})ckD#<?^1qI1VteNFAT0JLlqASeyQhLC_f%g)0lS&?MD$B>2J-^pUQef&iqbKHCv;& z7@91^;+UHy>7HZW#xr%#dItOZ(_pQA6Pzv9rHFvIa&5bM;1Tn-JUDM)W_KBCiZ4-f z_+j#xlp%%G%d2^U?#hMI_;)2;joLf++Z$qY(zX|?Ko((?&T3RHCzl{P-$M}9u3AV1y9MmM?8f4NLCyH&i7v**Z<#&vz5w>z>Jd%hHsDE5hRQDx=E&p}0F%Z@mA)OwiYV)Za62pX+3^kr~kx zP3F$1S#G>NeK)Sf2G?f<1%Z)UOyfG?9W~54dmG;0e2_adzu@E|BweS(>*Z2#Ew(ve zoAwr&sHP-1gH$@tfYLH}{|n~kZ11#~`l)Z!?@Xm2<(?>4%is=&8^c2eb)@(Ug42W1 zJ=<5s=Q|raXH6;$#dt@o;wiSUhU~bB^Nk{zQE&!`BLMgt{{?=(DQRADlBjU|SBu-( z$oghq^YnSRlfP@u{L$#&Hg(Ek^E=M_%EfXwy#`4hQdsjf#wXXCG?&xLN_4$3d}jJR zS-_lYpGZop}{DAI%8Y4T0-uVWopq-KVp1DlFHS(oz zWE(ZWSr=>78&qLA@ciXFc<9pJ!n(gpv!=*g+?p&0k*o#p(c;m4_~q+T1(tHpQC|lk zL0=_dDjMh|(R`Gkr2aYU^VYPx7Tx^|HJL+~R3DdNXyOar^V&h{vzR*Hh&1B4J9%~U zKLFd^zO|d=!#@Ch>u*$JY14v3P2cMukCK85ZUohu=bXba6_~WQ>9r6{JygErCMoj^ z2i=`p{S8OwJK7RkgzQ2ZOC!%0?2rmh&KuAmI|Yt5NfHePe0#XdWI6p~U3Zj1BsYub zM_~Y4gnrITcdpmZDzLKjPUBb7qVY)E3o0fZ`^4^}$_-A$N@Uf4#w*TflNc~FKb_pq z38KYSj-jaR$JoFB3bA~XO)^Ldc1#{&Oia}POL(fvtJW`A=sTZQF-rZSVuHl$>5%Xc z+T~#-D{iQXUy+&BKE9#-9aE$KR2@_G^Du`_u`_iaHcSbLP{g#gj_21_V`YhwMJk%c zwr}hHknhzJ)!Ctk>#*4LbB3wa?EU6IMEG)jYW@rD=vUuULEXNV`_DJXJI|WnQD=mg z-ZfdsZeH|a6HIAgAr}OiiAC|it^8!?ck;=YEP`!~)8i4W?=q$zz9xG=#A5Kp`Q~aD zaiUPlrDCnFtPCZ|65o|jF;5`0JrHp?NX!~n^n}bBmR=^=CO0>W|Eyd1C9c25;W_-6AVLYQBq{4$Y3gJURODywy^CbITqC;)(CPlw# zhM$iy3xl8IYsAzsw-f>EGipIl@r<@>Q91iv{DI#A>vad(JmEFFJDf>eIfc zM;Ex=nX-t^LRcA5l7tMH9B9dOUcM|L>oA$=Oi))9@rfNJ;t6@0ZlqC_m3~cwfN|5K zeP0ilh-=T%y^9YjtG5XeTM5(-;(FL36#l1%5slD!@jKZe^O)q(vSnarNy-6>-Ih~p ze?~>zMDwxcQ*A_=>D4NG8FPv)Jwoo8c}*Qb1D_6bE}u3W=%F2aUzi;vVraK`SaT*e zR{NS+K8d8vV_k;wGe+&uWX!rNGwbl@d0L!{jW!-NV==^yo!QF}lfqCF_ztfP^f-Ux z;zIMp4i{GlG>Lu6=xTuc!5>4*dP{tAOm~| z@K!=f5?rU<>7+TDUbucbj!3g@?Zh6`W`uH$!Qln)zpo$wz~YkWi>5I5)B zZ37FpvCp0QZ}sF*nPm=`DwqILbe@3^GwZj$SL-;m3Ly!v_7@LcJEu7v$?&&KTn6t* zV(;$!0Sr9;_!Ox;U-UV<_0qZ8G)P|S(xQFTzSRW@A6=B=!;BXW`@H9=!^*yI3Uq$l zb(n+k>m$APG*%YYN0s;MFX|M355029TxNc>Bs1AbCjV^Ro_S!^q(u#8YNql~Fo<;= zP)5a({Y;Dsc!m?$S)-8dTEFth@I!eoG+cJ&z_7MbGZ|pNotg%fr1KO=GhrM>BrnO< z$7J4DWEDXqFv-2W5l^rRUEW5RLu5b6V$=Z!eO_o3(bDohKUjiQ)#{xaJ|2qsE>k!z z<;o^TL2K$6KSjP2wfDlBmsd1kQ+giXNDhdl^p$*Oq;dMCg6_)H6mtZ5qK2l>SN5{K z5$w_Ozp_D(Gx@Xr<$3itK1y`ocfZ-sJhE_FhO37Z9EYz7+tvY|Hj;On0wm3CH6|wO+54;O&?uNg`T2=xy&>PT zL}U_@9jcZ;0FNWHGN+}=g1tilV9;i;F}s>}a$F;yqLlb} zYmElAPqB@h1F@D-LfwdI8P(RNgKGqX$E?6wko;qoiaVrY)7IYNSk5|oh=E1sgnTLe ziZ=_|nZnFDuB3LDjx_9EAc(w&dZ%0`xko>VUo-A~fdgT*DR@pjPQ`*c0!1JwC^U}Wn`{Yx=Uu?Lhj%f3uDF4~s{x>M@f1+qlKQD*oA zIDwPBy@3xOdDG9mTc!q1|Ya#g48sMV!R$bq{ z{#2}FGc+BUoB-ai4{>^tqBu~S{Al@`K6 z7Rt~14P(#EGY6YjC?~Y*HTy=sqxLT~`_H zjG^A&OOPyc4)DQP>I4M%0Gzh)&*CnTpS;!JUlKP;Tk4fp+_T7B48<4-P--a@ws?i8%^5)5ao&;GxQHxXpq5mR6MG z2f0zgmZsEdgCl*Yg#(g1@2H=a>&yx!cXaM@=vftNv83VK1lJ)$ik|ZMO;YYc8uC7N2OU z52o04cwYfTq5Au(6on11*cI_RDK{s#Qb4UT(b*hUsrWb-^k<~6bo#96?1p3+=uTv= zOZpL6T^{}buH6q>IV5;{8fA|=c2j=%=|ztDJu-0sPCQF1P(@j51{6r`H)F*V%b|A8{^Lr(N z@4eg=#JIQ-GbaR;+Q%J*21F*LAxfIQ#1Ph~g)+_OwAC4BR|OeckMYL_jH>o$`l}S6 zqtIm{pL~-OXN2FG@wjBe*`?{Nl^e$osz|-BcEWSpz4x>#x8_Q*u%@Vm45Wyt4}_xQ zEfFg=qY}82i}XKT$2>79hQE^>ujn|mDYBYK)pdm+c%FULm$AL0T|8$6I@&aQ<4qFL zhOmIeB*mI4t6x+s4O5I0?)9Gy=aJCC+0+B^?rtlLnJX7S0z^{g?w7jVV;~ciKY);; zz;l@!@i%nU=$MH36c5dmSog1PM6JqaMb_?UUAdNiD9Nu9`vcIz1$Ffwh$kRY6r4P< z1QvhTF{Tb^AnLZ}3v|z}+Ck%eU37E~o>U|>G-(ilSXA`YGRzu$VVGQ?&Tpe`7xub* zm;Iwdt?RAU2f^sY$-}oL=OTBVs=qea-Uya=We*(fUkT3n=Dy!3`TdrtI{WK4)$RWj z`H#i^(_sFO`%CHJ9EzgwyRTm>Q3~wXb!OD$C@zw|L_v3Vg{8D7P=(wv+#`=UCGI{U zcv4$#|4gJ>64Cy0i=%W$V{Ch4b7oRwa+#@W+6gkFG4rl~&wOW8X3jK))eoKhffv+~ z=O4Q-ZgBnpM%?L!-u(fLuL&X;tC2v8qtGYDy5w38tM~K$r+lMi)@GK{v?R)0@Vc9h~{%1u9Qrf%s0tz)f5(i7>Y>?&Sz=BvI>#b)FTLU=*CM&8QGU>)~8)zG$NTY)K6ZMQg8u90^|nkwyzDeNvRR>VB5Y1!{kR%rZ(*>uL*(ap&3 zisuD&*93j;jG&GQ-EG39V2QNIAh}qT(iO#0(#8-&+Ywjud+te=do|PK@O} z6_nqmhsmya0szve2H1TLgsHb>BKD{ul3#5{p1$jnFvTT$+AtH%@eBMB!1k2Dk7qel zdbp)_uPS5g_f$fy@gZ``pZ3`w!0Ln2AAr*RP>ax`9b3Sd)zfZ$)_RCb$KIG^Xe7RS z^|qb~KnR3US%FH#8-)fC_z@=amb+xGWmXl`6Auji+Pf`ruM?yXjaj8fYL;FHdyrKTvPf$z%BcXZJ3lC8}xo^!2*Y zbG(E^&$K~H``tI*O1@$00!fPeb0{+cJ@`&(=h)yj{J9Z*^v7(5J;pBh0xGHxtts~4a#MPihhO3J$D_;ZcM#;<3=cbEY(k@32mr9-)R7j%W?BRk-@F)@iLnO(E zDF8Bb+TF!{=;EF1v+_?rwG*B|0S6U3SDRZBC;P~)1CeA)t5E?|U|7xBjk4$aU8;ccOxsLE`2i!Rhz z*VD{@_j^^%fD_FCpIekI;%<~RD@e1Yamx(0u+lP+d=W|ui2w^kW8`;ncjet8(oiT1 zA=1dQw{@;EX?livecZFHsgXS=R6OJ?DOpT*NEw(GDNofbDVLH?l7Zng0ETciPJe#t z=a%bRbDVs`qlP|A-nGu9G%Um|SI8uxUd2Sfe9`jc!!*=kkU6h1BqgsxRc3KaR3vqq z-^ZMQqb+tLF9VdbCDkK5VDA$9%)wlx?4@OjF<&t4EMTTzjzZMq%^ka@#3o;X>XR_P z@O4B=%t1s8Tep!#&t-Hze1ef9qkQdzHI=g00nL4eY}3^$MnIWc}5U_sqLg9 zgO=pxcEaBBNE*wOA;=-8vQnTd(8ro=1;$X{MWmlp{niV{_+7895mX>G0%h?c&@;27 z>2{WdLyP3q(nPA(ksQi2%koNeaI>uF%+6go2K7@<`h7gYXmpaWH`g%4Wl#13GC)(bfSkoH?#DmZF-c z-8Cf=GU9DE2rhq#;tLU#A+qE}6A3Z305ZrgC~)t{?9#%0WV0(-7YfE$9`fIeDh+o@ zS?2Ryg-Avb6a;x7ZJQBr_hDc}DZEfh$-$~9QOjnv=J4cUP-{a|2yx?rK>#Rz-hlh{ zDbmaN$=Kao35;`!F!!q}ftuyeRnBln%H5IioFmFhVLi?)_?>QLs%n9nDSc8ZA&G1< zef0Ph`Y?hNG3c+p(^>pVhme~XwZ-k;(|e=-KJX8KB?pmPDb?hI)cGI4Lz?T+laAN_ zd_AUzF~7!(Nro!)kcnFFUze$GXcux6T1JvdN?l7or-jFsoV%tk)p%+Qvl1xSI56fb!hJ$0T2T}+uubBd!I`4`%q47Mwk z=MfdNRR=pDbo`;qgAG_aa-k0Ug7u$JWYC_wW4n=K416q6MMJ6dKTrF z-AB3qGotl@^5CcAr`{@!)s4kLnvZ_~+^sSxs|R>(7Z2(`A1D!*Ufc?Irh@?YTh&lT zTKq+jyTAG5y`s~iM5vKpz|QE;z`6T$HhRwlxmIEMRoX!72??~;$W0m#-^R)9MbIsO zL(U(-!hOJX6N1R}2)e*q^ClKX&RIn_b4)B?dUTC^?r+@x6ZaoQ@Onh{>4Ll_BmhSl z0YF5!3%Sbt7X9zXO_&gwjcKSEy~M&Vyw2wA>x3oBJzTZx*3ixS1bWAe_om|R7rvgN z=sUjXYYV-*_}Bj4xZm;mPtk$^0_gsK4I>e1ydm!4D{tKU(=p=};F`K0@jn2^!&k;g z5>Se3zPopWF0F;uiz@}Z_V<$v5-XtIK@>0M?!h$!(0KgU_x8WJ^~b^>@!Ey;F1!|Z z$867>kvEiruiDz21j^s(89^Lb)SP^Hc`v+ePVWEa)86%kre zFn>;~iPSD5sK_$IFUX6?aUuy38sJ@vHflQ&uI+CXt2`8T#ibC zcK$KcT4AjStUWS$BJ9_foh?A&BW$W>cZhL(+vkD1@ln;=;&+n}7V!5oosyDQbNmL27v86Z=+#c7zy=CGMs zp!ORio+JYeWLQ*NIz3R1D(EqQlq)KGmS~5nC_`X=-q=f*7oFQGkx11#{E8#<25_&B zLPNaJtSW}mBq>)#lNmk^AS(Z}^Auhv{i_}TvR%|MV47HT=OjVl=q)Rv=lub!UyC8C zxlQcfl}}f*f9w676!cyzzTfG)#H$Sin~Ve#hzxGa^~tkYn)^`qcM6;Ih8!E_T)L>oB)b;u zI|p}L*wB|0QqLRO_X3)E!Xvjt$Q7_BmqiUt+4JzYNgjURqK5FaI5E}ry?nfrZ7 z)?;t#(g7;pQhDz6@ROH^sRlo=#QH>2O0HY|ezD1fR8gncHr(Uu9;A@EqY|NQKC9!* z6bNz#(w4&fywN5@)|=j3`O1n~<&WX+yEPMV@8t5ozBzbnSpPMbptrLi%K-mr#ES+! znKRQTT$yTze(~|%?R?jMOo5Z$w7l^fiEvF`j4Q#OA5YjjYXE_ks{qE(ZY&L~_SAg% zV)Kfb;GyZ7`m%rvxRlbr`zYjjRXypmJi@ zVF;b?s~eA$e@!csTYm@3|MujuP}18ovhmm(EeIlJGWM4GAz=Ou@7BC!ea+@EkTbh% zne#N+7MervvJx0rYK)QP+n5=oCj!q7V9O^no#|h8>g&fJlKaf1)1L^Eym2_(Ny`g z+;RYd5oYd1^wH}j(JqB2COA>+PN2R$w7s zF-&Y+t90e;x83zi=+7lutK1O%`5$1W_ot6vham#Jt=?QC01fUwg7RboH~j%vn6Kgg zjC3Pz!_5Z-Hm){o;Iv&oK4$IX{GZm$&Naigb03~ktc>t-y-u6#0cX|F`9y<(4=&6~ zejFyV_5yzZu@~>DHSw&!T?NwBH*2|~+~*0!gda0u>l?d8!m%CkH{F>DU%ZC0`dc)= zu8bnCP@EX4Jk(S)0|D2n%AuMtCG`)7U0ca3Mcy*qdXy5|3nxgwG?Gd!+nM+}Li;qn z7-kSlJ;r5)eK5l(#AGCKSv)U-xoW>U03Ob5ORp5P(7n)yz3sQo^822Pt(aMfJ1A|L z=l=a$bVzfXP*MRi&Cr|!s|Bm(XorEQD%vHG%%Rxis9uM|%fh+f^;V@$HbUBjsK$LF z4ahi|+i0LorqEk6z!P+N1a>{u-Ziyo)nZE#3aINQ(besZa6ZeyaYKKt4Iud@BP6wp zl3aj#WWuZAo>HypEx;}ELd-A=G69-7?H#vNxUYq_x&cz5$v_Ep!cElbA%+cPWAB)sTfJ_RhE)Dg`y`u z5x?{(bLSy7MOGBZ|h`P zg=@3d(;)5b&#a@|XoT{kk_b4OaS>vNRJcRbn)CV41F)-!wtgG1{d1&&7FV82A2l{iqipngd;bnopDt?{`Sm+ z#-ENb?<1Elv}t(GEXZ5*Hzcdmh~%9ZaLGl1R^&|tgh#k;T>tc#{(5o$`@`pLytO*6 zO$POPyK@@)%PUX9Eh_yZ%*jJe@K~!IgR+of_An6};wgGG5X?_f=FQ}Ra3FK@+4keB zw6vbPEN*tL>`V%W%5J><)AnNF@ix@{?qs8z^MJ&xZi|it`AE+@ zP}A%pmiWhd@@-X{!*)4g^h;HBg`-?%QtdL>wi}}@4_aS_%<1hoqm_#F0nSkW!KXA< zvC9z+@%m9=EJIw0$1kC2PV=umS6C4~XrgYcZwSmq$4Y3?Jz4Ap?yPG#Wu%4g=X{-# zWM)qJhLE1=Ea7xa_!MLM#X8A1NrDn;`%RVaQ;N-XFFsw%4yR8N!~3mL{r8s_0}?Rn z0)N|Nx8sK0!25i(c3Z_Omu&j#?R4(z(;rQVDjL5t{s1n?IQ{@=j}9#n`QwbMAN(sb zjlvw4@*VGx4lwb@t6UH2WCxbkY7!h*XmVw>4`)@vzUjzNuL@-JF*1rT@E>+CcP0mD zXh`$nqVXRO3_$l(#wkd7Yj-uQ<59q|EGi+Q0Qn5`|4}W0f;oE+$=|wTt{yf#(l3+) zo}MpW`c1tV<+{J^{%8H1mP*BVT=21l z&&IPzxritwhH0({l_7_9-E-E}0+z>4+=mK!ufAYBj+=O!gg^ZYWIbWBojb78{rq)^ zv{;a5id7GaOuCmjt~pJs5d(><%SE$NmMM*fz<0mCtD3=uIU>)u-6pld!hU`N@fE2s zR)1nYKTRtloh=62$4a(J-jF>b!->)UEtmnw=Qk@h*e6D@p|#3h z1D|q0AbFfr;p;NYdJ#qZ05B0)#-`o!)vCq70vSB5(1lOZI+N3kuUEewii*Vi$OsJQ z2NbK$T9arZWmGUGxIgxJel4FZcJq(1`hregu!{d1q3gZAQ4v3jD({)=?6lB8f&`$@ z5d~pnSAZPZ%wu-wlZ&i7KDZC1V_XtPoTSD~dJS55)c|{B>{r^f^BAc^YrIl?ZxR6}{tj1vc^AYi*70Eq z!7I#cEea<5hG^A-R3QFPvbS6O>Y|(Z=x2`U>77-Qx5&n; z;*XxbUWjt?ktVb>|J7l@k1&ZIkM1;0lfe}cDK;(>#tI*^pWU{1 zfXo(E@3q7R2h#9{@TLiYBfdu)DzTLcUd6PkJ+fmF>Z&8CK!RQ1^xt)$h?f%p@RvDu za}uwdGOOi?^$K$YWvo8Ec9a~}D5`%6x^$wV%4|ds1-1q?-ccCwiKW^E$Gn{WqT_L$ zE9jc6<=F5Lwb@N9mZO}{ZSH(FTEPoKcwjbvl#17ujGV@8&-OW44tcf-GDU3NNMoU2-xRfW6-0q z$HPD+8L)5zt5WCdXf`5JQO=6x@^X36T#sci8JhbK4-y_Q7Lq0I4-9p%jJc9J2!K{L zMJ7|0GJ?m-N1cf2jX5f?N#4uRAy$AMW7jMBwB+q?sWlvzez>ul4OecL_qVmHnq3X6 z*p8O@@VuO{oityU%ci$p5y|E~`5)_EdtaeXevl-n`0%qgx?Zfi$)YBGth zD^KHIeUHl25%kV@dL3Y;Ll1CbRBl!81H3VR<`KgG*f^4mHHFjCH zITRKdcoY<_$(9v3rjz5uJ$y)ASCq7K2f#A{I}m@$TIb__DF zZ8FMZ^XAU1dH~SYi%rnvKo26K*=j7KjU}VRYK76iY2Y*{Bc?Lcha(LtdB9o5SCfK` zV#+C;$Maydl(s9}xtGh*R6kX+&OeiYf_`D(#$W8k)Nv^mm^aGnI;f_h)|ofk%vH0` zGe$mV1KscpKQKO4Ox^G%92iK9Qam?;j}aqdVg+?>#A6&+;1tZTAw=dnP0Mk7|(jS`_ zf#JHXi8|ND?-oV^Q@@Buu(qwU%!E`d9&^*GjJL?nh3Pkmy(k&fff%sDOMoI_+Lc$v)+|t1;sqe>CqmsmK>&O&l4?KUaMyja33+O!zO-|%hZ>3FS$@$ zD>C2N7=J!uu9||x{Y*&D$1Fn&Qh=^qK=XAc-(uH>nRIoYH^ypTYP((dche-6M|Ot^ za)c1kOEDLVvn21Owx4Zmpi4|*)hO8NI|1DI?X9@CR}dlq30d7-rMf~1@)(jA3l+2a zSwcOsR+*2eJ!CtBvR*IeBTyaPED?#qreb2K>+%`<$ zQKvQYFy)&&o}3WNtGB@L&tJQWoovyoA~SY7*NpKv;R%u--oI=4-VpS<#+#rHj4u%6 zj?UwOR+b!_&1jGl=BHd7eWzgBx)bTHR1*yu3n1O`#t={`TCKrbAP1oAuk>CDec{@r zY8jVT-=Su%PMYx`PDV?~$#Km{zh6&xaG_s{L?sg*%Y=j-=_tAU@XYo%uU|4PXL$ac zzEpo}E?j^(4;2I?Qse__$r6ug&#Dxw^1;HvGCJ^BVUcA=?FVspF7XhTIyrY)p&`in zbI^%~W+ZDxs*vaJjI0h~S#K_`AR28?&UbE=DNCkmkYtd%eJZ%_{+KtMj}`1GxjLX0 zFy1uDTniBfsLc58P5>vs7Z(?3nM#A=K2&U!C+)X)Xt*SHS1}9FkQ37vo-sIMgeDr|o!dvNTMZlD#WgL5%Kq2|?0v^oJCFNV@O5O6$tu6lp zh)A{iVYl_U?ID)kLOSvo9&2NM`rsRPe1~V{(Jqy^3VcC>y2qcZsq@O`DVePxQSRcA zA7$HiH~%-7 znO=$2kC(V67lc1Vrr&*yH=~Ok6PE^kV!coP^noyqgL;Pj>yTLzp*lT7CXeTF(Ju1H z2(#Ms4>ie^if}czA3QfEq>pi%dg<<3r>hKxrEi+)Yh!9+?s|zaE4wxyEQ*eGA@V;{ zcrb$6JG2p70LT+7(BJUjK$U4E;W=2*T(O3cx>hIoqPp7}Tkd zX)JWyxl4*g^_fnLffNIo*ZeU_P9=-5LA^?kSF9H9EVHbRDs;M$Rd+lKMd7r%nKjPF zHvVTf`S<|iX31M}Nt!(AI+e)vB`kP(keUduiaM_fk`{3f;!nm5)+4ql&QMMPg+X+m zkFFa|kuM!h5Lu$~-FT|$E-az3u(Ky=;qA~it5iW5(RtIr?^1R;{C#6IDd%((N6u#_ zh<$w}TT*h8=Xwkp zwh{c~=%8vIg|fkJ9>DumM<$~P;fJXp9BM$+A>YwJU%W}TXVz=V?Xt+j7Y@BIf@ofz z63rZ-rW%<&c*l~clt%$*`r1`86`x6#EZEmtfw4G`Tx-uUE17YED)Ox|vFk#otaXT} z`x{iu2?$O>>wSvcAp?BMjCIyJELuV?x%@;z7~}=iAxKo{uXBeNGIyT!6=}$&9lk)D zDWh2qQbEtH6gs^!0z%MVrIN^0H2j^2}Fa zY7@n3!y<+k1VY_|Fjbt*6K-YiLAj^E}L9;*Cqu&%`mWI1?k>bjU%aTFWM3QR%@ z%h68-Fc|0SB)!%+U8GsH$o-n^<5)Yii`Y!B_yipd5>Y-bAJga_#>B6es1~{sF%LG) zprouYb--glC8h4FCpR|HQOXrIgG8)+T9+J&8>&H(lG`+v>1d$fz<1KXd3B{^TVb0z z+4V0sb35J&c2*?n=c!ruUf6a^7zK1T1IftbV{~Q|Iw}Rjw}(`%1Uhh;?oYK}Udf5j z((<;hbp@QeEoFh2)$U@=mo#B0L--(!un&b%!^p06aL8JOL}a4tY$=u{03>(GSBagS zNro9ed65TK4f2Vuo%P3M4lmTM@VpJ{$s?H0(El!HLx|QR5+%=?sTSyT0OX0EAwbj& zrV4-sAMsj2LLQ-~7xDG`jqU6(wh6`36M5m?7=xwudVJ;MGva%WOHVu))$`F2lbI!Y zMY5r10WVX0x$@3x3^}Xr&vfhI*U85` z`eEF9?@_V!G-c|PC|FTF*e)`lfi|Qb;V5O z@2rXnk~QkFN6dlhpLN{}(E*aQWms#&_($VS%5+xb)1JN4&oat&doo!awU|`~?1j;e z_XKs@{QJ;AK}njJ$=anV`=Hk1b*g7&2h#0rsf`6#gp|wJvVh`uBSS=9j-?%%Vm2O_ z(?UTQxi5;~Sw(FoxLq%3YqCZ(=~-4Z%W>D3%wQX|l7xyK7G<9C-;b|*^>9g4iSZR> z(+#wMgo?ybY#KVaE|PyK9Ht4%QDG7->VcNEW1`rxlJ)&_mtF0bq7rfAwP<-z7U15J z`s!NbaK;6VqCZMoHbmYJber)jR^6ZfBA}H9)9Ojcok6YmdO7==_%DjDdY~f$ve23XeyhB0c*-_Xi zt3-vYVwoK=bWG7?738nb2q|NxZV!B?{$qZHg$2tCOiqQSvrgPDP}JUnP9|OQA(s)> zX0!Jle_=xvy>ku0g~PiyQUY$%@%d4TasKF8L@F5F!8wR3Jv~uH ziakBAME*$Bk4nU(e3Gu&2W#W{oP~&k{^KO! z|DNjIo=}rSKtlcz9DX8I=Ie|Jde30%-U}mYZ~KG^-cr_q)9p@Edz+j<3GUeg%v9p= zVXT*;TS>2B`Yqb0a4l{@L`oz6*exjhlU)@2Yw(ErsnF;p=kIJe3-{PU4FmD{<(UGN z!gnhy8!Ig6P@0LLjKW$-suLrX0?ouuT!9+&e92vqoag-y@j0xtb_XqYkI&FrKWw>^ z)nP`K1I9t)!Z9jQSm2jb#|yN{H3=fJjDhu5jn$Z)l89DaU7`*HFPUpuq`H+dKgrKhJy z9xaZJaaVO;I&YKS8jyY_6jwvPgaVjKt5RiMzS9Ka1GOiEf|%-EHq>@TMg^R$n5+z2 zEa`g>F-2_dQW|92OX63#(HNsanX!mnn~)Mv6b1%vx+P#;bH{&-+X?m}XN;ZTJJM#( zr14RC!|+S@VR}|p-dL_w7t4$>^52#ul=HTAd~d1bhwELWG!SwKoVt^Sd6ExIW%*F) zNQEeeMk%vU%OzqL*x;cxAxt~Fxl6JHrmqJ8^5F~s3wiK&rTCn2&V}PjlP~!=85s++ z%TA@$R9C$Tjtz@U_KZ8;9S^87k5QKhPXK^ZB4VPI-;gTmIOeft64NnTY6Y%OXVqln zg{&<80BrP-Omf=2?L}E%HS3+KD}91rq`uRr%)+QbMb50~nlR_%AVv|K(JvMbe6N1G zLpN0yn9OG~*pl8h#QI|=^7U4&mYzNFk2$yuleB(L;-Uw><a}0~pd7eZb-@EuO)wFBr)s zDk(&1BZt||DBxyUPev+U*piHUMo?i0xs1)Zf~X{(QP|5=QF_K@Jxitf7|qsXm$h8W zX8u-zNWSM$(E)tX$Exly?7aMWm>T7$(z5wl7XsC_FP5X>Skhcn_I7J3>v^q$V8d>} zvCEjKa*L~;r?xCFM}*6?PJL4he0k1#j!(JSH6$c?gfrOVtW|c}~<&eSyqWDFkf|h-BFDN<@9+M^^OFsfj>IMBAva zMZ<_rJZpX)3-56H(Ua*9#CMEJ-oOQKWh%-%yE9lKZ7pL+?ycv*a2a_6_@gF{vMkD< z0YwmD|2-$mP_j5&^&ll7S%=z#4gtFyg4A~lAeIVBC^dT%+oQm95`<1?*oAc^=UGWB zE#$af*4%<+L$FOMIizQ$P-QD$UAhiG7hNkCu7gM2>suuUF^1Y4;+s3?$oH}Q=V{b%p7nNgK&cC-W7AKqb zZf=bz%Lu}xUf-roN)E!2UKSHc99o&cna%OS1)K{dR9Jf7(F*5GM&WoA5S-Vd<17Fs zcMUHnzw8I%&i@kSWr=zwBkF9@|V z3x&6wQ5&>e{&Iir*s?G5;)?!oPPF6s#h^rh#-i=3OhJ1)ER_2s`iPQrm&UqCb%~p^ zgzttNYrhjzt-1}bsW$eSXMxt@Wf`Un4OjXCq;|wdu$~iis|==#49)@sJEz@0lOlN5+i(Gg-AOA z<6vz5|35QX7xl+yJ<>F|U8I_?)IpWOpaUrPy9vvOF;d#>HpRc1Pl(of+X;oT1!SLg zmEDtHT~(vG>6hcb;|X{-k@-e56`tetkKpD^*-?AO53$w{)W63m2^67Y4uc&g<(1u*hIqr#u`_!g^ypkm z@A&4b)g(^M8v1@Qb;HvPAsO6yI&GLIMX8K(x z&G#6QRMhy0@U;hqLGeMabd{jIbUU(@T6}zB%*t6TO{uDSsi}&Fx}04!`LS{`aG0}b zXZGk<&}n>;O2Tu)5yMZ2*S*j9)l#!~2I94yG+QKP?E()ze8Ya4%z;gh&XD6m>K?YB z)(b?pIdw7n`uiZlVi*7Mm>`X^BJ@rOg524EJv{n+Na)SqGJ0CFkSkkuQU+ zM(PT#P-Q3-%G076tg0G3sL`p6gHW#>7gwcz%<8|Np!al>BT@lCkbo}2{w-Pgo4Fk4 zs{AIG`{~HH+Lx!!RVRl@5vsZ$IKy*%OP9(0s3_~VVy-Fxi08AEUXjON1rO-8y4+6C z>^HTuujJmq%ArD81m36S%4up`;CM@DIRDh=ei&9{P0S+xY&5?=j!QDJQn>tLyd%}i zl-D63oQ5BzgrPXpe7=gz2Qo~0z57@{0U@JHh^|GTEDZB+umiBx+Lxu)h$7Gg3d5xjo(x+#n*EMvxjv6F)bBA;)6LIi~YNQR(^M`9g zi7k8H2>N?^nD`lYzjo8pX)>o)u7I;D_c9hlbi-uCdF9)gK_CxxCJ_W$Uj~9U{UgoV`s1 zoyJ(!L?`uV!whSmQYkY_<(bCRONxR>j(`lk$xM%n$z>A@E6faN`BUs!1LITCf*_4I zMgl#ONhvu6cKNXa506_lPseySXDH<=$Qiwkd0Vq#L?K=W?mh|@a{=LLs`0VL;BU{^Y zt5pD52PAgLHO+fw?eyRW6LncuNP09QQju!8BbvI=F-6rMI{{bC(2PYH8H#G=6GRjv zD3t;mpdx?(c(EbN9x`1=ZpI5_@ZcS8!R<1+P4oD>cGajJ4J5hw(4{{7W+b(!zD}=5 ze{9e0?_J+xlp6l@Mkd9&g#`_p0nT^FLL89`WspihP%XBiAy>9=D`lFe<%~m z#`0}^c2_NT_vrC8M#saA1$_nwrwa&25UJRkSnl)<;Q}{GiKTxO9 z5_qEB*_RO~KmAthEDOoG8SX#k<4LP}x}imQ?N z)wyF;26X*I2}h>cldYOc@vvr7e%7pNG+L{&Q#cX!6BRPWq|3IIYKL|R4^mPK^rn8x zyMhd#UaeTyxu;W2rS2)G)Mla41dX&OqD~j^oFUE~UgZu-viGDPpucpcv#1|^Xq?tD zW@b9VAY1RsHeS&JTRZD1Nz*XwG!`zmi>+>Lsc~y*XmX-F zd@v*7V4nUhi5`__C2jKf9XqZ#Fq~VBcf=)SD#Lhkpf6i1SUq0NIm4t@?~R|-0uK*c zg@83ZO7KL4F*e77e0B3$J$ z23$d1GJQ*Np-{)!H^XA&IUinGbCBmjR;JdS0eK2(%#8(?TAvLtFf&4v+#hV<^ljWh zC@fga3_78ewSW0j!~dGg-|C6lF2-aSSdMQY`lKb zExpB3|9(Sles4vn9-(b5u?JYS7T|z8aoDOVjvLc^aX~-s4+t3oU4o?$&-kJvCUMkX9$|%*xIU38uIy zO$MN@v~naUY=D=_@^}@?E|m^~<}m7I2Vv;uVN?e}tL=X&S4dEbKJ?2%$gMEa^hYSF zBIO7&1gZG=&moPR;~T+Jo0H_bmj@nyCY*1#183Wo&K;9qA1FsWZ2Ok2cu&Bcyp?;@ z;Hrxx`0NA65kc2R=u8uaX#Pk#vmT-;;X%^QqHZ3V&|!!qN%jRtdXN*1FX^ z^r?}xN-j1m>Dlm3{R)Wey$-~~T4yT`eeyC_Vy$C+-{@Ca%3LOiMFxlcji1b)XsloV zf)8QaL12Q!}Zh8XE;q741E-obS_R(}v4*i)!hK(5inc$^iJ3J;C(T7%qe2mViE;&nu6VOFIV0sUlm?w+<0>2JBV zgsmpu;E$GjRcl;(C&8nYLK4)HwHw7gTj9jZq{GmrWAXtlG-qYNBt{qRNCPE>1JD4n zw*}MGf3#fg5B>V%EhGJtulOGT%l^Gvw-l0)st=gc(^05oI$O4zxB7@)+jDUaRcmIz zq3M(aF}5B*B|twM00-?RhFA~`!iWBdh5gosL9fSB z*c#>EZc|Z;Oc+0&CqS&QY&GwBta#ioG;dWK ziHLKd&wnrST|>b3Jx9sb-ttlW#hrZ|DHH1oyuN@U$|dqsW7%l`Lzj@(S+t>EO*cM7 zgg!`mCo*r~aoHcGEl*k3tFb~XT>tJ#Xr7H(?w1!z+DP;9v_RIR^$i~F!=BTGh;)%q z4bDUMx!@FyZ{O)Nl5}$q5u6%tcv^=cKS1m;bevz!)4<@T$*#wR2`V;uXvC9IY?wz# zivew@^O*;UtYnMa=8?No>*&audv_Ia!8|96N~*woYlZ-8lyXq?8ij&E~UPajDg-n^2$=Xk#>5hZ~R%w2R@Tm zPM>2ol+dsyJ92JS@!J#~wxq^+!`W&Eb<5*3>Wv-boD<|(Rl#InDj2-&c?ybYPBTuX z)pxPQa4j@I+xm!w;MQSgB^kVgx7Wl6(rn5G@GTE^)^%xSF#V1y%Fh}!8+(*C=*MMn zZwtDrS=JzY;{}4%txlWl)+>{Gb+MRN#s`fawh&0kcER)7CrAl-TL!QBCJJyhHj*MM z8bZi#0D##*T?f{-$6uWtUAkHr{6D!I$2lvh0N+iFohXLp$e;C>>sLm}tfjbe+d$t_ z-!%4Jr2CqVJYJ}GsLBq{l=4e`*iv`L6{ekk?~RbI@7wGp%Uyfz1qB}K5~zWrC$d$^{70_O*3QH)oaOu^Jk{Bk;I;Q zw~Ro65gO&p9=_m2P@U>!A1I2LGsQEsXLdZ%pRhENZ?du4YYIT{fIWQ{CJ3ykQ?h{{g&F`0sn=Jm(g`_q*)&flI8(6FWE?Fb|JHWcBMrccLv){j#}Z=+ zuk~!i%Dwol*GV{wTg@}UH5tCcB4}Vl25KU9qyK=q*=w__IjAERCm!^?j?*wvN1xJ7 z(6C;LH8;+g`Aq(v8Jcmw0u85?%#69-d|{Vb6^R7PdD$l=3)%dVUdv z_*ccXD9wi;0+b+~2FFGYllDMO$1zR|eSX$~v6JDu{v{u5;pFf*Kbr%necX|uM>JFV z&tgLaN!e55G*bsbF;J>VWNntJPA8P~-Y+}3LC$SsDmLMh@1H>8PE$AytBFcoBNB^N zuSgphV%iU=sT>|39}e(b5E}jph{>p#-NNfIiG#l|_kN4ww-va3mph&|$A?5(E0Fe~ zAM2lh%-`u53$h(TwgQrZYwFY0;a&ZPA3dmdb#3nI_G$)85c&j$I?7pJs#JiZXm;s% zG<$a>5qmNByH#iN`q>kYBC?4`j?()1jf=-LJ2_?mGAa1TtJ2#y+hnKKSnAev8m0BD zdd^Rxb-i;hrDtj`E~j4o=S5ZG+8^O+`a9IduKI2ZFNUq}Nt{gXzLm6{jhbd!$AT0LJ4+b6HhY8(nWQF;BJld-#DF+{U8FG@o~=d*Gp@P=c&p zTXj?sz_0kXy~;Gqy00a^C+OjWd1nDPc*(i=g3jP(d30BhgJq732CsZ|?4rh!KGx{D zRe+&Yz?|%aP~#-FMxZ2mUjP?J&&b*jg%LW>oG3!r^dIRdRXyT?tk&BSRYZllxfw26 z8L*{+l;1G2YvdO5tc^jtj2L4_8ESSg-}@kucMB{`#D9isuHyAFS?VUOx)ebM9sopW z3w*2ZlEPDhM1BtUeIi+9N+);Lj;MX7ELLoSlER$5A)$g0)r3-Mr%gZC?B1ftzeA@N zB0Y9=AvP?Vq|t($nx6|GT9N{EHJDiQI7K)R-uIi(R+rLlc|2YtQ@glR8`4S1!IW+q zt7B+{>{nwj5+~>qP(Wg^iVH2|Li9O(q7i#buuF1QBV3sx)6p;rZRXDQI`xw8&zY9$ zhYdPn0smRrBp=|hgW-z6jTW4Ets$cxmdJXBg(xadHOg6}>}((_TA?ZR%+Dog08b5w zdm^inS=psEF0?{0-fNj(n~8!uXCUUUw$OjMQC4XqT7KD5LdQLixiZ5;Dw z{*X8Ib>g?Jnigv5r?+mqfB*K1mgDXXNrvAE^~mtb}&0t(_6YO-Vy zzvPPjtR@pNA<96lG$>i8z1ron8kD*-UPYMMnc)BpfqK?9FnQ2 zscu~(udIbLjgfhhn#Slav(KZ;ba+P%*gqk{%i*B-aI#nO_kQ)DjS`8zH5+E7*Yf9l zzH4QI{u62zx##rjegDp4Ox?&_wX>jl`0 zm#BgKK`aF&rbUqH0cR6l&QBWHFgVn$Vp~8NSZI}7trs+II72a4+bw?}*1=;&7y}bG znF2Gmq&XFzB^Q?AC;{-iaFGp8nNw;|J15*!%l`LhQmB7H6U=1Hg#+7epjgF65UH>KHT> zYQ3~e+)zDd*0~Yt@9A_XlvpJqx!8ysxJO*!r4N5}faK)PHX`+x!b65S7!nN@%pZ5ZD4JdQQdVi5CArQ8!O~OJXW6?rQ{oX0Q!JJOx@P^od`WxY(1dR@sgp&0DW1;rngINLs9#(an-AxbqRQn$$ z=`J0bOA4=rV-ov(?%zkqNJ9*>}gW2l?Km(MaNImUTgz?6IW{IwL?Q%7qL*Y3#d~gQ7Gl9DJJo z)9A$Tk%eD;3q6hmMUD9) zD|stvPi4pSMM zskM3n#2X-ovKoN09aFva%<|>UTF-RG1kfRLgMR#wStP8w9a*XL$Rmz!-s#NvvB}wi zgV2*7D%j(Qlxej_OIZF6JY}o;E2TRxPYVV-C^Z09X&)Ml8WKP-0CCCgIk7^sAwCWP zRPFtyJcC~XhplXru#TjfSOL}%&5s7P`IXLG{E54zLnsm_#^woem1_D+48rMHrX~%4 zpDG#}PW&%MP|BE@av-7YmbOJ>!nE|cwWjhRP7~lERAaB1mBPz;dwRKl6r!pcG#+1T zIDh-U6OLn}O#dH=R-yw^5)U#Bs=GfwWSC=H+?yk&Il)|GT&og%oaQ=~>HltTJqf z#r#?6(=JAdB%=>WYHzb~%laLvYn3TZR!T*>6&-X5a(Hf=JLX zB0rR>tv2ltDRDF6mbhW3pNSF zV+N6126+&}f{#R2r$;M{L2NP^g*UYg~@N=-2wu}BP(t%Xja^W1R&O zs`QH_K{f~)mDm$q6_zwbl`ImujJ(_Q@6_IVGr9|$kC2KikIs6a6KQ^ZC$*Nq$fWo; zM(chaYqOF}y_~L-L}D@Yg@yGZF)5^q%sIvr0zdDSk#AScXTrCI{5*aW!3}#gY$!17 z#g}$gg6?mZ*vi(Yf}hPux!2G0L=MJArSi`oB9#u&Fm?~0P94jb+VSeED@;pSe zc1WO0UAY+6GWX#h!20_%PpMN({c4_sqM{TX$vB%W$o2+xXYh!7VT-Qzk`spq-H}-4 z!>sgY?O@eF;B&oxU|VLvudWs>=trmYzC+^>_hNv(kmcbmI&v4jAEfXJFJ7J!>YO*8BwZt)j@A<)nk{=7K4YH6>k( zFrV2_3rU`urBxbGWfakH2W>RJ=)Jj~iXJ^AAFi`sUGT$dRe zwE8UxM$k<(i(gebI-1dQ6*et)WJEzuA6PJDTpf*l@!IgSa3a;rlcbS0C9VE24)NsR zAMG$Sh?TahL0i6LZ7etT>G!y5pPz&C!!(6zhKQNO(jDy6eT!(P<# z>k$!}87bx0@=AOhwn<+i#pJaskrk+}Wp}d^q7l{Yag?~b{ti8j4?T^il7Xwy82)L` zjKDDEYonUM`!5@poOV{3$~@%RdG*~o2*UIV0hcx<9`-#ter+h^R_V#B1Gc&$P=FCd#mQMH$_pksNQ;Zj=^|Xp~F$p#k=@~xvFl}jzO?=UUHyLX*`<=dcSr;fk=QUq$C*a=uVDml&;{Z*NAC`2 zm`lLa_QKWcaRZPil<)GO`WkCHvB42>l#+z8FmL84=BL|8GQEHFiN@#8AVT3y_dfu% z&VW%$8aFtXnO@FT91coHHXH7UyzoJLlI26NJ3?__a$*3cmg=)cgdJUUWQHDc2I zi|PO)6?YICxkI9=iYn{>p$>2UbB2)Nm8ss7i+MUY16;Ycux|5(ZU!$&EN0_b%*+3_ zhn=P1WS=FM{8#^}Kd9jUXbDRH*t(gwC5)uKPW|&vS4{170lYktQ$yjaTFzf#+5U z&Jk~K@@cDSoy6bL)=n486cl%=uCXh5AdF#-Cp`xUK^I*DTMGsnrKiHjSV;ia=3Qfq zOGrImi89ZTyX{(mlyB_(9o%!TRniPlhc#Gl=)Mv+er|*gNg@O4S|y4U)SN>01n}P# z{yA4zmbp8JK37&c-9w)ov}I(lMvMmBvk+nAJkEFl9W}b z+REtycqSOnjSG7 zPstnsfVWg$!a5_k3$I;}MVMr12q`CwR0SdJSt<3ybB9I3!R3Gzm`JJ!x#s?IRAacw zsbk6n=P}W&433&$UhPy9XS|cFTg*XmQ({m$ZNDwuql!tTaJ=llCi1d$FZ}L)vf%z> zB7e@3I_a5n`m6bZyJ4Urxbw+ZC&%E5s&?$UmjWq?UdWgM;5h|Z!TwF$7K%2>f0`&i z_JdTnqqD2n5-8_6lu7i~QcZo9@Qm$!N%1b^s-a4g@5Ji9 zUe}=b4-%9YY9(UM_oRT}H1PL}pz`PRr9W2Lv?Ks8OCPh&y-&jK1X?FIj3QhNfF{pc7atO2b-T9o?D01TMqspkUd!SHt>fVukj`URvTJ+~Uejf! zUGCT)eUx#R$^hc7lU2n!mbaAvN(XD(r{kQ&TyytOjz)i8g=eisyq7P(lx$Q6g_iHq zhKJ28UMR=i{u&qb58!F@0~Mj93e!z4h>mn{c7y8-n2sCN zNiKRz8=J=_51!L0*ppnn8X!YPkC~o7j!_#=Vme%)%=)4S{kkuWn@vuI^5sWNEH*vP zL-L9;YZ`gs1mQ&n7Sg`yu67||ye7o3k&-8|Q`*WsdoDP?i7^L zf$n=FBDgg~1inXxz*VNp#HLTQuxT3SRGXR^^ZM?P>d3G?f!7~3-1q3##!ugrm3>fQ zCqU6AniLtF`2B78y%~Hcjm&*hs?c-)r;c9K?q+0c%$knwoDlThl^o@-;psQjDwA}< zWWnQfRLlf(^pxp4{&}t9tzU}l>A+SD-f-jQ_LS^a?JeM7NK&o5)~+E>MU(R2{hCov~2rTUdkoP5X*SimIk)?uvGsO-Bn(k&n+{dgb~4hs-fOzWm(aLuN|?>RebL_30oc&VW(VWd9CFb8@gvWV>9h)$I)Tah&rsk8?i6o+yPz zALBX7Rt$jfv}o+zqyFJ`{YTtQ;QAQ`r;1cHVx7fHRW|0NCdoTI>H1P;Vs*3f10`mw zHgZT5)2PYI4*(I7iM~mmM+Q@9sz;B3t^DZ^{dQyNgWIOX?S5+A_Us`2DR1TVL@4UOsMfZyd%wq0lr^KzOpJqDD zkK!NPg+%lMhNOM7)}EPbQ#whaJ5qAup6rY1L}ifmw>w;?kK#0_OB)7dXuB#-zN6=B zi`ZAoG$Vpz5apw*5zYS1uXl6naZ&#O1{99SIE%XT`5iBxVV-;>-mc-uuo{BeE>9b> z_sffrw+qmun77hb4sWE_njQ*EYe6pQ!kg_P&IMbU0) zv`7OYU5~(JcYFWI`;qn%$*xrP9_O7OKj@yuOdU3px#vCZoM5=kXcVufs7WarX-BGQ zM3k$wVsgO3@Y_JZX5OB0cE7zsyUXx&G4sW;&Cxqp_UTc~+Vc%hKNguOVz2rW{oCzJ z1&l-yZAnD-Pe%HBk{`Vt_Ea(yBMs#k7+$OeIyk(Mg-vk&G#N^<7GdzOo$HQtdQjuz zy9QO0`UVba^#mS^HyIp?8@ZDN4<1(3oO(lfR}!LZ`&epq)c{oNal+>V^^S! z+S;0kp7Qf5J~60l-zfkSCA49P4F2a6`JaVZ-KndDvi7`9UXv!IH<5RUWi~@Vt4k|G z7i^?lMsRh68Q}bYfxsl&6`r-{Z}seO8Q&SsH0Cbu)KfX`@KD_379MtWe6P;*Bc!-p zkTXHsUdRqe<--6Igjc1yI-ps{ zUfK{PhjE{Kq z?X+(qc`eqcLeFQ2{=QZOX)!x1@#+Ej#)s(2&3$>IJ>kK{hUw{*@!WV1#sJaX_!R~2 z7Q8|OhfGS(j+VFyJ1Amba*M~JmV;aBr(Q4{14ZJ8tYFQ%bC!MYy}a!xwuKTTDZm1G zjy1YN6rH$FdiaS#_hw!zoE!gSz0mwZW!u%p15;HM$Jnp61w}++>A7l zao0qxlfU7P)sudXGLr7jQ7a5{@^NJIz7|v6aSy_}hmCT-a@8h_Mk}- zzn{$vAL}*S|4x5S`D4dDeBGOyR+0fbRaoq^3$4Q06%=cj{o7$L6LQv1eX~W$M0c&z zNa`rR)hm~vdboju5PbN6qSLm7;VgoM+1&MAhLu|<1%r$X9ZoAzF_la0!)HGo!Zr1G zsFpBZTNde(l>UqEWLg?yRo2ykQ@v}p2FU3?3k~O$K+V1qfi?A^@a*n!U?Tm z<;XP_pdss~gOu^V971FWd}&)%e2HEmf^biAkIF9Hs0AJi_?*!6$j042|0LQS)+iMo z5Au(L9N1bR;Gt?(dbHu>7GN_Y;uXRx{#=H+hOa_W)?e6MYGgiQ-?`gOWJ}AEYC(mK z8oOHiaM>F$UG)#3g0}nCj%>-;wQtyAzz`dsHNt1UlEqss|AtV>jEf0tNCR%iBR$SP z-0ui+K~Q@gaauXuo&E#3((pW#`FL$3`gM8cY(zFW$E-6}BX(~OMXH-Hx%P?0pDg9Q z=qw#Qw3_`3+}fUQ7#S*I)SJjvWH(kt92(ttq4r@;-A*x==z%f zR4Us+3!2Dg*4>I(@%JywrI9?x0Gb%d3>4FiOD&uV41KjD1BhEAbg`!`=`@@zN%rwa z=P~{L`ax|iWFcn-5KeRjrJP~UCWbBgl17$h7IuyL0tT(M({@ubJ>qPo%uv5j8 zPidEApm5+TI@InKf67aDOT@e-UYw*~V7Fn!kfktDyj3<{b3-Q=j0`_aa#h<+ldjlp% zlV8rjUV0pU_2i?10+%1LghsU!*&6D5a+#M^)Cnf{dzH<3JaI$tAmWD_`|Vu~8vwMq zDNB#PQw>&F4mfYSh?XT+)Q|n54IqUR5JrF{0N~b_Z-jZw`j=##PyY9^MCnS^s(hlM z24;{z5==6dtKv#{U*VsRAuJ39S?do2pO$kST6A*iT>y1@z6@wfhO8M+Gjhaf*(FeM z&O~|3&?-fx1^0%z0+c;;8*asy4 ze+2a1pUq)N`KO1y01{PIz+c*ODg*#NUH5>N|- zFE+v}mEKLQ(meC}M37C+7VmIHDZTs4)1H@ma8eGBq%l~!LBN#TwxZ)&Q7gn>4> zlsLA*qgWhg5M}=)GXVCxH+M)t0$DhI6ArVgiCKUup_~lzHUuM)O=9&zLf->R{*nKv}#0m zF-UdP7nXdGw3=Vw&y=0sY3l4-f5%-wCo(`#W`n)Q?NjsjEL2fp7XNudbomayI@MDv zOMYT*E!B|IEzD1tG#Ee(GSyT}+qf+0taM$!3(rz=N ziLM~yG`*Utbt`cteshOT>AUBTIa%nM(?MNb#5m6=fJ2sNv5Z(&1emVbH@Z|h38Ot2 z>HB&?oQ8-hB>>W0q{96|&3AzR(mQqj6+LyzMNGY0{q)UM-CJK^REUkrfE*4&XYGif zBccjP5U=N6y45i=augI50F?o!6&2Hph+yfcb-Eocr=+=Yws*SvxFKx$juUDZdH@ca z*C;IK>izWlZr|R?LBYts$nGKkY58ZS*Y|uNX!FxCdDzhE-|F@*aPRzev&CN&Z_dgh zlg0h(dgd#(+5^#NLEp$%yK=f<})hI0afR>8&6U*dZvjK?diT0+U<1b6dO}b=Vyr;HJ-CQ2}ERjPX}v0sDBth&jrgMSWI}B^+m77z;glQ z=5l;Lb3@w$^Bp3(qwFbzaO1@O}&vr_@56AI#YBAmun|eU;Mn)z0SjD z@A5|fReXa8wQz;4ST zM%LelZm#~_4lE?wyrcdf$I_;EU-8w=sRJu@{-(c&0AVLk6ahU#D~d zfd}8|T{VV0*(2i8z@}ZQ#5rct@l-R;p}ym0=uOd}&L!)@E$58y!fN)?4iVAXYng_> zTGJ0RmNK2jplb}zdE@mKgtz3#&DQ3`UO?N_Ed~7Uq`cC1MuOLZX@!@?={VF!Tg7Hh zlMFfQX4SI2swmx9$pO9!NR7rtzaGBQodsVfOZHcQy?LUftk;fPy8z<$#K%GO)2b!S zb*fhg$(sk{Q%kYp0!}hQu6iTio`>cOIf?2s%#8(;&zMQsrR@UM&3g+plCk(Z@BqA& zQl=xln~4_eV*(cAK;SE=W@tf~BxZHQj?*PZt-|#QU2_1j4OwiHg^|yKa(wt=KU&18 zSFvl07)*I~6^JGnDevumm@33(#!fS!$f*~RiIC14bOuID8G0lY+!!cahJmM9)nC5hyM5SPzvZ)Qk$4nmYwN92R-}*p849_GZ;o%o2H#VeyQe3}j zJgslkmA(I0v{UAvqvW5&OZdG8wit9SNyT4qlN7BFLRR6gR`ahn#szV~OC^GkB5H#g zO@22_Y4kpMM{%BqPW`sFLaF~fv09SALJd4PZaeNA>g~5Ap6J~|wM*zhF1OGtM)Y(J z(;jLQK{$9CWc}ANRr{olP*4Id`!@rf6d?Xb{P*}&cq;t#Ad9?UjevXqk0JNpG5HU` zW$ifgag-7PGD?WM93=uVn)t%soq%VUsej~d8Bs6r)*_~7L{N6}QPFSUc?y2Xx9?Z4 z{YG=?LGsylwpJzJPmX1(m+le(y8RXE!W{-RYJ$@D zr6DDNVUhw#HKP<#{v?PMVOvln+f?nlNpv<~LN>|!Xkhkap22rl`D?~UJ=0M>=P6^{ zud>HSW()u!$vKW-(GpS#WWfli=-0>J^@U6)mc&in0^`I@U8J%f{b>L}eTw%-Ht=oU z!fTxs_SP`5JD=i@srYu5l+6!JI)ZP!P#JiCoGoa-D#}q|bD&9yDyK6upNOLr^+^D0 z;XDd)G>OJNADrzy!4t313~Q*S0R+!zBDLKZYt|&yj=|s`fjR<~`Zfq|twOgef~LXU zybZMfnHTboQ%ig0BfJIkJ9-lHTS;ZCEpQtUnHDJT6q-)5FP9#uno05j z?k2XJ10MQ)dtoPC=>y74AFVu)nKCt|_(kSBS$x!7KKqUFQmq6h;Y^MgSyDn#!`S(N zy5^M(g{P3;i<^%5gBU!yoxQ~KFOGz&;ge^5ZC9MSW)Ggj>a@3-GQpU4)-t^eA2Li9 z=szF*cCWQ^b>CA;iZ=$K6GyFOweIi#tKti|2{M>(Qw#0r&3y z(m{LfT}X~|aBln}_}--aHCgc{cE`!TFzjckzTj`K zO{doKqeySw&K4@yaTuhcs6QrPre*+v@ThWzdvnuvsX-izLK!#4$9-nAPrYeIdjhMU z2ZsC*wQurN%7~Lm-74LxLvF2v1ArE>agNwRhtT%XL3B5qAiRU%HK0yzS_AZy8YKR#GUvUV0Q6 ziU?{&@83F5e7*2W0?)>~Q?s!0B|Tnh`uGgO^10d!=fTA=Q2t_&^DE*NE9k%lIx2@3L%a zdW!#MQgrAJ^S^j^i9yJ9NlQxq=UU3S7#KPNL+~2~{`IR+`g%~Q^ffjrvtJfJR+qnn zZ+lZ!9m(QsYQ$5>mg)4t(`T`uOws5oAPAI436qT_SZ*w-H9zKE2k@yH>U;&)O^w(ap6(KR0^0T#nwvv2)du zDO}0kYkyBDD1V#37X18YAo|fa0*~PS2e*y-8wSLzATQp!N>RW-yL~1rf}hInJ1CsB zmw2Y-Y2!J&HrdP)yN|^najDBkkFb_N_dCAi&mE@dQ+#?9S{-UKceL~CCy4c8&0P;% z7RFh_yGBSKG&gyW5es$9YQAONtRZupHcn8_8qR2P*_?#+o2}$t(HIz8KH$0mWVvi% zPDc@yi8H!EXdczFjW9sekKWJg9jw$E>(Y6*#xEU1e467@c8W72Gkppu*o#zS4TpII zg_JGoXHms4F@ByU4pswMZEi-Jh;-Mx z5DOt}5(sh=3#)sgtT+wuJm?vBuDDaGabNO>5Y&-7ToS2Bd@^0JZTY__d&{6W!f;J= za0%`MqeS@aFcZI~j|8tLmQu|?`fJrzg zX#(-z|9c`0kepx!|C4_I-!3C}uUUYJ6l(adyZ!cg>WtEE-xU43s zuqN46N70ZJWtL#p+?ET;ObwpICv{Twgr2Q;d2DF`2!@lKy6@9;oX+9skDI!S`?9B; zgJ~MX67&yTqkHN{`ovl+cj#^mzQA)KT;k}QD=vWjQZkcpva^*bwXW1)_5>Rxkl3Z3 z?O;(#0MTf&c-n6tp;hwPZcY7*>e8?AnxK{J)0Y-wj~+W7{ygH5WsQ$Z!Nko44SV4% z7Y(XI!IYRW^Ph*JT-pqj8@_T9;(4w7NR8zdJ!U>P@oQNTp28PjvavTho(kR3Dug6fwpRb#E61GlMStmh=^~HU+oMJsyvj54BKEP> zs9w4vRu21$ggRDFC8q=`X&a_UjR%$W{O)F=BIDLL9H) z89l~4KBO2jVS|Z3>gVTJP%YOYiG=p)KnL;E)<>&Q!HC1D6S2b7 z@n#{;PC?9V_i?n;6%^q*PFp075oy9{X!$AHRL@I_62Wn+yW5nB7F_yHnwu1OuM!XaJ{Y`Mwxg%7MAo{YQ(33MW) zP~NX6thJl3WS5<7cOhKXpo+v0q_vlN|5^A#J?2cRA;OV@L9|qaWqi(%z)X>$scOu7 zAeYvJex7%cno{FWJ;d`@=6)r>9s5M$Ec8Twv4C{k&=u+|D%6)Idic}$>)ARc!;|#n zm58s^#q-kF*ZBm+|1nKKb}gF&rZ-$Ws;=eqhyk0 zsqw<7B+H=<$phXe&w@sB&e^s59l}}ex-);DP@PHMac+0rk=58_cl}P@k0Y0Dg3+50 zEBN!=v=F#DZg&^Fu`Fn%amd_%_VZ*#c(i5XyF`ipr}m**l2M)`=6Ka+g@^n)sm=De zZV?afIs#qG8^+=piKW-RGe7x%RZhphl+@p;*ljYV>C2?K7p-8`9j<!d2jPy-9mY+j5cY7w{JV zLNn3ISAH|EE{{=fF+_`S&;buW{T#OeE3Ry+WKZX!DxTlMk@XD*KNWRD zFM}ns7pSQ*`q*`F6RE`V2fmg8#$BS&tz1}Z?Lz1ttW*V7kkkBJ@0eHympheEtOw6dQ5T zY~-SNhdx!6XsslvX-fMRrFTCtQ9UAIV9-!UH<}55E^)U{dSDr5V_kiklVyx47*g z;44jf=d_#07cTF_Kctwt?~k2AY-2BM5w=8+#rXwGyAKUQl;;iCE6-L{PQ|P$H=TtD zUY>eC_8BTH#;!fyO8rSK@~GOY#gJ|%CF`&8TnXYxVdGRmem^ZXNhWqHm8dAL&9>as z*gY__MwHN-bG>*eay>Is$DXDg&h<1}euGPCz z1qBx=St}k-by_!^n^CpW<(L*&((ToqoPBZWY(I>Is>Qyn)|=vlUw-ZT39N8>DL;+D zRsvT6uNlKm3ZAv^WS9_OE)h#-f|2z~eWo&Ag`=*-dbI!zbz~Bl;FkRbOb2`9UB5-D zqWaJ7tNX#!r*{ot!?{KQ(hN=*yF=Be{WNp)1RcF*;|ZtbWO+$^FEwh{98upA z;k*%fSa-nl!@+pEEOaXl+kOe;ex~IFvkq?G7}$J-eCeZ0Esa3x^9e~k<`6(mZ;Dt~ z0uCiUo}S0PG(KYd7P|+n!|odtL2NYXzPvI_lv=hq4&|P9=+!bmS!3v8)pmTXUfZ5( zl{p80F@!`3sAUJHrQ99Yx`AXLMMptT_Av@|3oy2OevWn}^VpIMpjm? zWqi&^dj?9DkQ> zrvk4QIq@;xBUAeM`&G)d1zC5yQJN~5MMZc*pJQ!&MCUwqk1|fqzGewy`>IZm9#v_j zu0w+IfFLq%;18I%wf>-hn&ZN@(%X$#ShVy{v8t;O@JpHw43UKg20@;*Me> z3fV|0rnqwJg8c%sv4Q-S52dtbGdSJ*4O+M;b4ct0^)~BgGm*nSdM%t0rMCdBg0G8JNz>E^6dz|2?}0D7und-+iH`I6Vd8;UadbIjBF#r#U3VNd2S#S?+t z0!RN)UC2E!DDUeZU&m{aSL48%>TT`XC}^~7XgaM_qQiQuT4s9KF7NC=tV@=e} zxaa)%tr{J*oGScp)N9i*HX52PBAT>y&i5*})8+?8e4SP%`i$AIqw&t}0x4|{`jhWh zIwD4yTZ37Pk|rxs)c1{Bbf)RO3mO&R$2vhh#oBo#Rwe;fnN_CMZF0XMl{K3?o#E)WHg~4N~8t@ge>>( zmYwSD#Z;7zIM(8F1#789Y+b4|ix!d(;Jc;i z!U-P2jt`M&j8Yl3%N}Vtld(I4#Xyql)kKONr$2sj-t5T!E5Y3<81Na$_hu=^6&ifB zm_yE(rOTR_lPamE=r{qvksi4*x1ZHKekXh2r%)FwR;;Ym(1}?$eHEkik`Xngl~nQ8 zGkl-aGeGJaN2!604B5Z@aG29S%o{LbnXj%{)tq%ujVp~5H}mps1@ae=p=X4)5Kg6* zbG}3d15Zke zm@h?k(OSvphE%{Udx6bWNV)7$pq-FjOs|xF3U%we&Y}cGp6OVusFR-i; zy@M!-jrCxEQ8cVMrAQt&cnw`1V-uIKpi0Uq_qMG%L1#n~+ZJA!DCLp3J z`H040xh<4|cBVS1I=9;I)f+A#?XU&F4fIInbVO%|{plQ5Wq|RNmVL^6tNop5K!rq! zk2hD4kPTPX-Ly+aRrUG+uN(`oine9y)@V8RP{;omW*uwlpXuIjsqh`h;#5n_>F7Hx zGc1=5tIit}5vDkkJY%@ZE|9vdNEnk(A0q{N6Z`BuzdT1vf0(o`4Yo-j_WQ(TOJ0&P z(GpT_W5uey5Wje-q{Y&)EKkRU51;FL|1ost1;q@KXLnHj2f?VPPvSlJ3(se7$C)WC zT_(FBU9=YK3_4%-Toh)D#tE}3$Yt-%*hbM}ar%TdOw@E&G9`9MH$|F{ZxS}F0 zO6ulE>65qv>nz1dEbu)$tT>vnfc}Sy?xPuPv>nTPo!b+qz2jU(20mo8NSje&xp3%H zOF}{dkK2m!=9zd|9;S^jvv6STcYv*?_GYm)=CnEvRU%DOtIXZ+OFpK1gJ&t(tS^1K zdP*8HkeT=oXga#(?U+N#H=w?6SJ)V)ir)kS!s9|q7iLM?jAmCYMQsz@bBRH9WtP5| zT&nsU#}xQY`#_vQ8c0==j*f9Fb)A-l{9*gbA&u&^njx3Yd|h)TN*>F*)(oed3qgzb zGpf#7pqQd!JV_D%Fl10v)}I0D(TP^F;X0!`!0#mU0X7}pEcyo(i>8y%ph*zomUt6KzP5u59#lGu#8Tvh~O2YQdJ zNUAABreH})cs~3QGyNWhp&L}31Yo$sK7)zgXo^R-!7^IxI+?eIPdEL<2<&9ln(a}3 zI+aERL9IDHpMY{CBYB4}$}Q;zEbVgpmX@2TM-uO;7A5O+1eGNXgOrrUSsuyL2c7K= z(|z2uqx*zINVkCbM8_@@=RUbDicL<9^#O<*&02F#)7dT|86u%;%Y}=t@ z2Y=JE-R~$8moVz<*FMufl1Yo~v>u-ic*51NbPX>Wv+3v2%9c;*#)g4fs#BH@p6ToS ztjgY4noSQVw%42Zs{3|vks5o^4B=q2&Q*-ZuuF{LW+Yq_@|pJvD)%8-GNW2K@7vtE z7h?z0(P>!mfXd8*|Eb_^Vz70#;+$+J_s4EfL?C)Fk5_fh7&MX;K*O|DNO8w+8-U>(tib63~@XLMN^#f zxvnAY)?U;l%rk!@mHRTgSJ?_eHL_=yFXu>gC5!rl@{MwO89nkg~@*fRSIk-b(uR#*a@QSSikX?YTNCt z3qE>iot&P$fp*7G4)ysN*zZjO&WY?XR zysmdw%aqQ*MU|yXFd*vU*lf&0l_2_4Dhpb#e+o$NS1#Ea@F?9Bimz%^DPro8<$ZvK zchma=(0mXzks4{g_DD6cvF*rO>2+ZA$wnb3XpH4|e*R5QZMCa$%>Eq7B<-?@j5xpg z6KY$&VjKGPE)-mvz?)R=RufJF1xw3avg7|ud0B6-jOHzz!?9%BOi^1-;Nm)?&>YNsgV!M;2>>shJw|U_2{o= zP}RHf^iWy?{y(h?>i@mr5*VH*tD1Q_T)EHwF$pXIGqNtNOUF$zOPQ2)2Xk9-Aw1lw zc|=P}+XY8#yQN=z-Pb%yART*R0Z!pO8yT-QxM&SLR+qU*jIVict2W&4<5RmVAD2aS zVl<&?I5a@f%w=gSJbUA`?i{JCTYLu*r}lMw?Q6Q=w_U^<7fJ?mwRB#c!r8Cvoj!t!=mu5?ADAV)`hfL=3iT) z1ZmUW5of9y%hAR#HOO|*lN>m%IO6MCh@{^+$$3pH2>MyF93SixpF1&xpDk24-hPXf zo0?Od`UB8mvLdT#8NG-xZgZP!|3j}fm5t-R9kT7ws!&VAMEf-VkPP2GUgI>|6cC@q z=^(So>msw+rbvsU%DtzIFTDBz+5zPi%8Y)Jw6vlvh&Q{D>{`>T5z78-MwyLpT(kFh z_buULgF!5<&9P4Wb{BOX&#kgSj599FD0H8^atD)>7fkT35>u_H1iQJ%^-xS?soe0d&Jxh`}yAVFQDM-i{>SVJIhd?c>R{(#KGu} zcQfezg!a-MKBHjtZlOa2Z54t`z*9CU=%=_gncdvx=eB#D2!Q|iYsxsnVA`OrJDFQg zvf#6AmuYRD%KmyRdV-IlS?g9tgt5Z~+ zj+~0710(`9nXbBy1XGeXDO^CqhA z8_oSi_g{cm;bZA4u357IxCUjbJ{81MVI(y?Ol;0Ui2POALX<-;oyR;O%!psl!$p4$ zwpf|+*tCK}M{Bz|!n=Q++ec?2HQ|cjG94^yPprr=m)H_T*C%FdxyiEi|G0&Wr=e;4CjmcK z(fxq(Hc(b`>rzkk~7R|$c&ND*;&VcO+5l<&gQB?)$~YM=YWz`8cq8>16P zO6-cd1dv5gl$y;aHp3ac)KITD&<(kk_x_T$9cl!&hGdC5qhoXelhdtgdAXnd=+e}V z1W#$w`+lzj{H3e+D?ZLpSG_qD1e3ft=3JRD)w@mUPm@`l2g4q%>P%5w1S)-?0t)x0 z`t6iP#qnR=)b|-b*(i~`@yf4&aZSxGGt%tdIy9OF*M=>s%w|r5{;;!g+y&y<&SKh~ zOXXy;%45AQu4B~RSXj*0WEBifPq88S-4&;r-C^mn(iJYmpKY32^{@=hiDf?Yv~)+< zaGOc7yA(tZ`Ip4w*_s3WS|Rl`WW_Bo(gb2n-AZOH6<-Uh5VhZu4Qk99`d6sOU&y|8 zCqAZ+VWve*{3^g^;oBg)LtpSZX?B@5%qJ5vYi*Gl2(d9@TK>X>_i-UF*h`5)RV%o{ z-jOcHMsE~UknGPTA|XQk-ORzkNOmw^K}lI@0w}xHVX93Gb%&8AJH_UTqox*~el&u{ zG=p`_exV?P=@^G~KI5$)ng6G1fhNAv7F|oC*937*VtF=X7n!J^Rq7xn+V?i%40od` zHrC4*yZj${(Nb5Ha}d%=QJpPAm)1pcby4)Q`TwZ^J|VZ ztMRBT4RkIEWF@b1SPHA-8)?ptD0DQV)5Hsr4QXKEcKQ{D5EZ(r@a4IDnW-bx*T`x| zOf*?xsFT{hR0g4o+(Y@$hBH;&S71g{^Pq9Nb#O|{719)e6Fo96E;5^i9c!oZ7cj!k zGnUQGexQ>pZjkL5r#8p}Ofrv&1_yW4F6ucbwCKr9qBh2TGpX3s8O!h3cJf^L<6WZr z>pExIE;FgYkX1V+0qwhzjgN<+Y`%!kzIL|mmY~iBqdP9y5H$_Ne5j}riUnS0!R44dwVF|lZ6WI8! z9xd}UjykVhGR~<-`b2<8pK6+$yC1)+iD#ZW74?3I@?Dq4epgab`M$_<=g^5a5e?0$q11}sxs+=t8R$hHc~DY`Y#X%C5kdSvAlTJ z(Tf?0x1s9jG^c~xR6Y;aR==rK9X+|1DJ{c1h*&+F!Ah+!_{udmyWC+9pCW9zu;x?% zrGYu)P(RDK+b8yn4yaonzw4QQVrzt;=st_+5*NMB*rPAtKqV9Tda<-2a?${u-l-hy zA3@TAlwytVX2iI7T}V40*gzmVuj#wY8r<&}h)Xb?gof(*6ohyr`Z-y44{J{o5%1)V z?P?178ivsy4PM%1U;`0g_9b_}NmlPEVV$>JsO;F_QWV$~3&cPq;G?_rIiS7cBIvbv zu0WKfF7FYk&h+zv`Uvs|6dyuCl!yO7xJ5UQt<936IZ!O_f8y94IzQLCYeUDn8|(jP zH02=*_ZJzmnI9V=xso!H;Ri7}NAS5wh5p=;>^rJ`e*q2qgf9{!iNA8%FGIeryx)HS z9j>`EJ^wK78>}Mv@94OXy?+7vk3sw6&-WhCFX6CX>tBGuSl8K)GqG1FilI#y!x^c+ z=8U-PFmO?PlmMh4A};iTg9;FA;f(~XYjPw39!9{lr?vxG30-ih z8>^JR95bOZOj+Vkyu#pr=siJ|9z*y3`4pUBhW>{DP#KTT9F@+mc8Leq66P?kTp*QPI&9K2{A0<6Roc-6i}~&ZX%jeT6{Am;$2eZy^={ zH-lDmAxgZT6!io{dP_8$DP@His&fw^+aW=_i7SHFItMy)Zn~2y+-)f9NLtOFDI9d= zM>}Ba0tH0{L7w({Q)U-!MsOt8j2N}}cx?1UCNM>dFmC2I8_6YDHgJ{-IJCm$H#@Fz zRH^bO2CHIABZBd2?E}^Z$=j#?=VmwmH}%$nSEIfr(Sie4(PLrjTTzQN2YY?D_)11g z#f_=BdL28uMrA=g6*l~_1L#zA$x$c6ZMaV7Ja4`R(kH(k%M=@(^?-rtF!Dq+JJ`)y zAI(5Z#-P#5uMnw$?N&uuqp0`U;ty*GM(&qoYAZvv|LO(C5dCT^tRaHxE3G4lCSPP? zW3_Ae;vc&+J7p{MCACpO*6Z|uB0OLJ2aMUAeTo&4i|>ipjJ*PRAGB;=en=6hOnBFNT$t0o-xe-5PYxPSl>{q*~)*M@mv2Kx)h8H*QOa zLx~e;t2=OFDn`p`ttzGHzu(0r%WKf%#9OkS*#EDn{mohN-|mp@@KYyjG$BEyo+Ig& zp-IUiOA%8z7xz^vKm%2z=MR$2tXtwG@ZMLxV@^*Pf!o2V1+_S_QG)?*r!p?wNv$`@L3eA~BplQn5j#Z{{d0O%FXv6AdbEQ}?P$3>FZ8~U zS_??4E9wVvy*Wr?^~;QYk#H#MicB|+L%(Xct7{CDjl0CaUNQ0ggoiw)n%XkKjD@Q~ zC^O&%q~4xVrY?9lCqQQCTH)pzeup5gswK*(PGe*t55MA)XyEO$jq)WD>(K|IEaQQ{qWI4@hP1%ZMnvOVIX4Ec& z^9LI3Lmr;&vr9H}jzfzedcaTYq19g(RgRGoU;C*4NG7g=Z(bgP4iZgdp6m9Tt=r*m z@uMwy-l^Dol^WC55MvZd&Z%v$UlRM0^?mUfQm(DtDuYV=CS#6V)eqbRUxtb)7I z-#I?pq@_qt)=g1RRgRkD-rUN57p)V{;Q3_|6?T(qMbZgF74x1Q1DmTWld)*8QlX;W zt$j2I(zc7Wo|byw{MAp|bSsZvF9S6y9*Wq$@#BW3VHtLKj1g z*su6-oaUA|xryQ3lMI)QfwqKc6T)xyZ;hdcGvAuRsd$cj)oF7xxbhQ*&IMf>d1sET z2LO?7C85#1b}u-(=C{}?nM3zebCwY!_LW{)?i!IIvE+htxno8WVvbW41mkZ3}; z;oCMx6zd&&Dz0a|nUyJEtmjg_+fPp~(S*0Y0K5#QE~z{%gF7cp*#XEi;S{zU{J8hq zQ&iUTG?=AdRs`*c0{KC(W0(kLc*3apC6!q8mD)3!l!J>TC-fUj>gFnrAN6zsJKNp7 zU&3Tkjc6$okPPWW>S*Td0knUBbWoHt%n|To-Cw|0+c(DhQGx11Dr4MwE0T0y?o}!S z$bQLP6zJ34{c=TrDzm!_nsz6j%PHln?L=CnBEC8~3+bQ7vUlWk2SJz{)$GN``j&73 ztfDm#^JGim;V|z^;4T6zSE|)DH)HeoL8q>Z# zwJxEb;S47Z71Ldv>JQ0$dcZPw7mI3|PGKqYn`cdlP)*Z)x+PF*p6#%jaK5uu*j`<- z9mLe?u&^}*{yv1wAyMw82kE>08m@xfye zDDLHd9ew`xWN_O~Io;G_d9hnyZq0FOi9NZpSe+&6(|xHEuTx;Y8c_i&HD(uDk~zN~ zQ!uWVSP5-s`U=`S>#??PKZ?$F;_RoN35n;!(OJ}6rf!$(wW%GFML?90koii0f*w0; z`Ug6NoQiZQrn-xMsUP2H_dPK;>J#bcjmFmM5u%fw3b7X*a9uD!o}PoPCC>3?tFXa2O_%*ekpUj!F#9Txt)iR=5i z^;-}+suLo4GoISs?`uaRV2 z<~r>9^_na6%?4G1xb9}sHoY4KsvV-$zlBtV#4G)svG=u<___-?(FNok-Pvbv@F zfnH1T;G3Iit-bC^aF|Q!5lO3)FEwV&3P;Qq6=VEP@*waONvo0f#cBanW-D{Qo{EA- zRcYnqua)%|4dn9fFBt97!N)g%cAPN*^7d)E6dwzOxa=rKc7ZN}KY06Ikegz|Or-3axA zzktpiTYe{d^8Q1@Td~}rG$A2rL!6%@Dx_WNSTDxw%o%_9`IC4&rY&b26D)T*$Gp2) zyf7;DIjtG!t&TKy;rLlrW>!@hRJZV)h!5JbGaB~AdCQxQP-#t!@(m%bd1?R0^dGqb zcVTjn8g0-|w`}d~qaroz;x*y7V99Y9TSK3i!Kr_Hw8(3|W$&RvHZByxh4<0j?FkIB zRmE}WT6;SlcZDpJe{#7Z;!tdM7aJ8t??f6in2nir*n%9WD^*o#)1zc#@&7>aafU&s z8aLB8P+$2XHNd=SutE^vc+ejmW!$2TBEvtr3*kj-9!ZO<74~3iaO9&7VT2s(pcdSc zQJ4Jx-mZs6LQ;cD9Czy|_c!e_*&~&J5e!tZbufIzOqceAI>aYWuX8nAO^P?>w$#3~ zjgU=O)&uLDRf@!!h1|xtp2+3MYOTdS^d>6(Kp@D(v}OEdGbE(abrB@*i{Izh7Od-^p0_=-qIE;ln(<@ z6`$6GaSazK_P%gw;J5r+0n`Z&>IR4I47fdm`oaA-dR^V`-Ialr`SsblhmPu3=ZrtV z{{yqGj=g{QMIcBK9QprVx(5gC_ZS61EY0>1Lo+KkzIk8@dila-4t{rZr4L5>?ct#R z;d|9DV@~DFAK|_T+@sK_!&f`@AB^vk?qlIe*#6Q@nVt3G>IRip#%S~OaNLA$9A-p! zRM9O=db_9e) zVJ`_|gv27~=bB#cn~92kLK0)N)ZtI4_>TxDN*KIC6xn1+!B@-d4#`){w}QY-?(Z^F z{R%;4Nb|nAZ$Y4#InF!G95bu6ZgZ=jzU@BsOr(mrK1er zT(Zj-uTmz5*A$prTIwUix{7_S|OPcAr)z&(pdKh&>jhQ2K~)@BR8x z61zy(-2+rGkld6(s$C;3MXSeJi4tC6u2k$wFZy5t8vTXH1({^e-XIuyZ3)eEt)Pc# zoY|f7s3*XS@s63~w-+>|IoT?H7J5tWS26gG5qcz1ep5mrAR24^4l}){@}#%4&AVH; zptQNT`|-0yKaz>EJI6^o^xZ6z3!2IPI#bW~G6nnfg+^F5>&a zdG-FYtF3%M4ZG09j>_F?7GYQT=EQdUWS`zoy@4Iy$pX!b%nRYSQt)M0xPU-=SM$|2 zIbqN%`&)crsqLaIbhKKiV)pSfmp>F)vrbZ<+LM1${}&*94Xk;Uu6prxU*bpX8Zr3+ zS<1F{+7h?UP=~CIUjBr@J~*`LsTnB@D6$qj>3Cy2;jMBe-Q@OMXfk0;?9`FC?aCHm zi!k{W=CDcDw-3k;p2Nu!Rw(=W2`4ptX^X-%ZV-?oHlwP=@`mWDrFyeS4s?vho}pP9 zrkL`yu!aAzM`GVqQGv6}?oT^5V`aj-#7}WraBGqWpPG?1d6!}z#~Al5n|CY&D?(dh zECF5dsULpC@+2pEAi(d8DYTiErae^bBWPnYj?1`H4BF7n@e{D(+Vr-Q9d1`LEFvsHUHlL!a86Z%+TqWs6V!Jx1y6GTG(u zm{K#ysafu$?{aRr=7#eTF|k#6_n6wIFmkZB2>F;4JJyH@<^+&D>jI4Y=#GUYb8;i;LQXv!R5saGCbLC= zCG(c4{(K)A4d@HV<;+}M%w#SEy{;F-@eGH<)3}y|;w+(h{9eO2%O@;Cza#CAG3@f# za#AyrU6oX|i1g%%woAyUn9!n^wgkr+OFI^j8NCe3nx-Hw<0|)_T9h1OMpCEvh0)QM z-r9BWaqkLeCcr8*vx^j7BOIA+X+?CnL^5c4ot`mU!Ew(&(1BwbT=zzz_dTX@7KIRb z%!9LKPD6JYlYHKp+dr%9gqwif##8QBztUVG{kAO?eJ5%{nhh2Z!L(Fz-SnPpjRt1|qY^6ad|E;OVlV3LnFWyZ%C znShd_v8V^?QJ+T*J8e9id7k`zZ>+7$&AL+3(o(#-&xx4`r?VGVZ+5!4(haTCxzcoX z(ddQ3RG_eNpgY_}%PUS_QeeJap1M|GzIef>rJu&X5{tXa{MFCKXC~RVk6acvx(Wr2 zA9{D{A1WPsvw+bIfvG_6Cydd~bzk4-Li$|DW-~;+ z(C}-#dC5(2P(@-X$M2S(o9BeH{iP5wvys05(Vk?4O%dCq?w~g%Y4GK9;@G{ZM~(#t z*6Wo{i}p&BrblvJrX>w2^{7Xe)obWV1#=Z7rkokt+69kr!a(i1;i}N=N7O{eUu;)d zpbBxLCN8+mM4S8^IJ4q&ao&;CQf(=TY25bs+zga&U8VPENV=EpF%*9FC4w${HMk$9 zNzaZAQ?^r@%45ZAajre1RoT$r5h5wty`#+vZl^iulEDWEPqG&*A#UbB#!dP-HKx~n zRk{1PY3KK@lly}wahZ-X!Lcgjlc!demL<_3yiuDv$c{Q2@UG@JR47KWzf8W62zD`4 zbn032nfA^+Z=Ikf@Mq+NYBDVye)i{9R#wpmg2ML+$ljG+* z`kcOp8bWTi*~j7G@A{5ZEb_#eRp~w9A%zTh-S&*c_)+gsW4Jum^@S9Cw+UwzQFPiw}?rf7k zL0x$^zwgLh_i5d!=j$Qi_Nm+@>O`6^dB5tLE;00X_b#WpoGHk;-TQ>LfJou9Pn?O6 zSjWU$LDh!gsWiR>$Zg4UYl_l}K-wu;;P6Bn!f);C=q_faq%udoi!JbkiBzXj*)Mav zyC3(dBCvQEoy=-ynECizoMci|Ik*61Bm&^Ra_X3x760vDT;#um%k*jeM_j~-=oj2t|7Tn@ zy7-uGL*t_2lK<)w_jJYcQ2(s^%>2CcZ2I+D^x;|dr|a1zrhcQuR}NY!pSkrT_T9gL zl%$KUx6<2QeUoy1Z0jxp>Vu|m`7g1Mr6cEf^Fyb6To}Z4!8HR}7haDz$FXa}3b}Le zLr=}HOWLJ~&Vcj%#-X`B8*{k}mlApndWj%0^-j1zMUd-KtO&jjD(EHqtP<%Yc1>-=Y3EtbbMsOXFK zTLs{cbh|8t>xVP3AGg2Z`VYSOz8C?Z1a9BHx9;5_6DWT&{vVu+?Lp*!hI3tLdCbH+ z#?1&8EIhocr&K64%PXId8B_ML0Pf4mbA6xR1z^G9ez?MB-=Qg?FVB`&G^>9R)djO| zv&v)^H1N!E`r2}7PGs=SQBtiq*-ml;(`uoQTR{`#btK`G{o36q$dn3XJx z-aVA+eUw#JJirf=L5ax}y5=rIB+r;5RY}suFqfUS$Bt!76UunK)~PG*MqTN!S0f>(_zB6-S90tQd{qoPdkMtg*2oI`663-0{&p?_g_HyS}%fs z0cLgP52pl$mqe=_WG`SLgK{Y4W%x4So*}#wfMcT<@eD?XC?L@x>_lz+5JNj^0xGJ>tKZ ztQ&T4WhE4VMEZa2aki{1w=W$7?RE1%B3g#u>!07Ta6WO_ye^2Ey6GCDs=SMgSz}mC z(6tsHN>cO7wRA`h`~kA41!)S%>brmINI;2qRG&81d-jRstz5}#hEvOeBp+8^0B}#0 zsZPX`H|!f-qat#Tyh3EG@Fv`CSfnrglU8Qo!Mj6Dfw7}we}-X95`%Gu9z{MvSDl%) zlkq4MGF-;oM*dcvLSqe6%U}S#b?F8@Jr-OxcW~kR>6kvUdV}u>wnbwsG_6i6+tHPI zj3UNVxT#|4Xy~VUhnwY1Va? zFl`CCL@`LMp%EEaF-nwR3dxTG> zX*%AIU<}ds>Q?9@@6AQSs8;D~I0DJHbU3RTlU-Ljv~TNl4K;BSeb#;U3(P7xj3#Cd z){bW6Q$e&4qK!KPnmRjT`56ndv3`3xa*TX9Cgdl09VR_ZzAs)FR;tRiwyjmXWU`5L zX^qPbbCi*JVdXhnrTCzbOB+w?!J$?eCM0b#b7a{rH#NKWL6L7{ zLG}6qe>`K=@8cZn^8alpNic(?CKrG-MeFU7BBDmSLrk@_MM{?u#07ZV}WU z(O;RSjj&}*EggSaJ{i=EGpfSIrQP2gA{b8Lu01J@cfXbsy=%*zy&K2Ys@A8`7Noab z^dVkOvRh_5#0&OLXGDoZz|aKXUwrjja%JAnIdysf)rLMsx2|9r^0*{wS_D^jINfrc zGS8oyQejx0#S0o8Ouw^!IxH$JE*mn=8MR6?EGaaqSv44&Lvvy*(z?brG#E1Y+{v6O)&-)(r4p4O(L zY%vN=&bIwZ_#BRHsol)Xz0}KWAY1#BPEwQnIJmg&URI+UP3+pYbyVWgseSF*_P<0H z1Bd@7?Ck00UqFPr?SF!2(UJQ89rl#yer-3DPx()Jh!;y0f1QRmJF4xxkH>ESxIFU# z!==Ng%(Uki%FeN%;N7(!Z0fGlfLX&Z1OtuBkV@^NsOhjzIPtbg94SXZ9d`F!YQ|tQ z%aTUPBLNQ1D{>9!PwIG;qxFyKkNBY%FluM*vD3O{TsQTXG?sL(7SiIjqXDOq_lV5J z=7}lXwi~1s0PG`l?$|h+6=8n9QXA~sE90{HR07s{#B1VahhV&D+7xRhT$OOxv?3_W zmSm!bRoi)iwL^Cw=bA0v!hy?m)tUVU z+OpWbW)=Ek+QQ)(Sy5LiLN>hHkxzM!4>2Lv6r6$IMxgRNLP}oCDtn+Ts6&aghECiq z{cijW6L39;gp^~Z+cMjt6lYBvr!_CPNb3|MJ~#gYNz%rwHLn$Dfak<()Nd9v&W6c#7eTj#=w4{id(fG~sM+FX+B&mYl~Y z#>J@H;h0Ny*p_Vt4qIl4DlC#}KyrW@78b60dIvX_9E~R~SjhJjktPJxM$<3}hIP~1 z4o8rS!`Ts^7hYcFQX)Fxu6aoPq?y%Vou}0Ie#m`_(Mm9Mn2O4GjMlQhk&u6wcAH6f zy$uD~m7~ESIvqH6*#Uq{#Dfa=HSJoq=Kq&C-yh87o9BKgsRct~wWieejd-;1d5r%% zG@~7pyM6O`O_$1X0k!D$YOJHYX;*Q)Ug4yW(_`>U~li%_*(De0~zSt#U^iV zLCyUm@3P(f`HPw~Q6*C_)UiPclvUbN!4#SXkCf$&pxm?_8S8{ZfWCpj_|@aiV}(c| zO%fL|>o@0>nGd73LYQog?N~~@OO}(8V2Q{ejB4z};BY+__iJ|HH$DFVVuRGIEMw5; zNV1*hfzU>8@C|;c-uL8Go#MVS5Tyujvn^xUesfrw)Ufv=^#ps!2;Q=+RT>Oa6eucf zloJ^lV{k>91G6I}K9cy@F}Vr zX@TBf?q|QxEBhPo$sS{`lVpvogOxSsbF7v?Bbr;sJ#DM1EKSq2+*XkmZTRWzz#3=1 z=)D(-B-r&3(g(%H4*lo20D%rsQ1FH84D(6SVnKvSK~3%GgSPll%Cg9C)^a*pKu+HIZFknU%BxP=Z&W%}8GkvuQAsp6tM$-vHf!)~)=C}f z!2ME{^W07~Y+MP}S{`&F;5t7C(wrau7;tE;_Wl}OLl*ulFmh|@iHh!UA96sI?t-du zI@_N!$Q?Y~dV*sqU#*A*5&#@H8*(VUNe%N?cvpCN^e;Y37dqyUc??ahV5Jf!cBm9% z)!E+Jj=3`qONE@9?km2wuGwOv&v+?!pPAedkh3|;$$C9Wct>nIxnv2jC7=^??3XW% zF_SG{bUUxo{cho)KDrDRZhyZj(`HchvwPW*jbDh`uGx%&>VO(xILrV?>_;NKv8Zgo zZmX22Z+x>mDT%W;Cz*B@E?lo9EFhsrLCQP<*cK*6Tc=PaH&d2wyRsjN7pw2wMJ;kh zQw7dPyA+{h3@@+vWsIhY4EGKqjJ@2eau*2HT;}HlHa9wkXuKak!K&j0)cJY z5v-J@%u@13*E2Y)Xx;o_{ltPX+UXCsU2tpZTxMrtu_fXZC9 zqAbIiTJ}%m1m=U3Tlj|L$n;35Fdz?cbiwrId^4x3lbw;3*@fKe*L}6}5)t%n?O&+L zbVn|;)Wj+ptnFzr8kvGbR%wq$vhe9Q^i5mM`dPTD5kX~N>>nE7G>ma6kpEJ zH>FGiTY6EZmZ48(@`q|&^pw(L0e>V_&~UA&rsKL)R)xZ|ywY;A+>n(Au+$c>Xz8Z% z^4WD@FG!jt4=KzFCjRfnra!;={A(l$>1UA=G14=0MF(^qC&-ev;y~{2WyB~w0&;(8 zT<=cJDx4rt=n^UjQL`M~v0dkB$L|f7flQMS=Irs(`4`-78ckY2X^|LqFbOWI_=g-_ zwNb`#>--qTNMu8#_y|PJToXj630ex^>Iqes3Da`0)^Xb_q9=)+f`;I9i!1WoRyX?P zE7d4)#)6UBRKy@r62_r`O2(;HbaYWES)>aFA{{cVH$lJH5q;=CP`mT&5EP_?Mnn2! ztbhMZrQmx?4U+2LZt??PpOxeHsS5Qou+zDgj%y<}Y)C{y5X?}14ay+_QT|NWV)9Z; z_G0oM^FvLexqXf6$5BPSp|zECPE{~cE0v)H^WjL{=EOx%>6SINnJddnFjN|H(UucR z_t`wRIBr~qh3$O5e^WS*x)z*2vm)HGN4)xLq~@q{%-*`==2dkmskarZ-=|hnmy6qq zqEm}e>+ti|u9n+cIfrRkq)LAiw8%HWeN`N`E*uyy(i|3FdQhD$XUu`FHuknIWt)GP z2rsm{`dD2Zm9}FyM^;wC!w##CL-j(0;_2~Vr{#X9$@};WB^{Z^(dYaM=e>hA{SCkp zc+A2x1v}igvv($OKEL|4u~98n?)8Ul;LHFtEbRRl) zTNu4c2EDL)5eIRtDyh?uoc)}@HqFUNFMA_n-Rx2+aiqS4MO*Nw-;9M_)%nA0k^{b9Co1)!0E>G+ z=c}(vRNQ5oX{>AuA9~cC-?4Im%#=Za{fqPiQe#P%h{(StS-&y3q5##c;U>dV(?$d` z!#4+Qjdgp@E_Lz=&A;=JDUgQ0_;NEc`4^R&{qE3}hG!^C47l#pu{jC0Sf3qllh--S zIdk@=kKf5i z6x3f3zR9@&)ABqN2el}?=qqCTiL)%FO(Fbq{cQvv9l5wf6rw2m2~-ebRx^CYkl_W z%`{-MB+Z0WWpF;`hV&>&$Y)u?W!but)Tmdj0zce`T;0BwC%nGNUR8eP3*i=7T4sOF zN~Coimyu+}g7sa383;p0am@y|20SZEt3!#FBe=jnoiFIsanbPKLkWEa1R}^eWH`Ek zv@jpK2S>AsK9`ZMz!k41A~4kJGA-CI``dyFD)T&*Ub$XleG!5;!@R9=V2308V+msxRMU%nU z2$5z(p#n4l-tPzoC<1~fri>B1IgrgQQ zF8z0#|8FkgK3Ip^%Bm5S6p7R{^o6LT0az8N@3YVU`;-Lk(3u$j!r^q+U(CQpN83G%ivNJscr+ zzL9%><#Y`4rX;AM9dWx~Scc0XALyIf5zqW)hw9P|zMDeH-acSbscCVX0siMrC)-ZF z{YkXiX0%yE3co$SBC3dDaNf4|=%&LMxdJo2O^tFL)i1tpWS2^S7yPN#)%_)<4oZ~5 zFVokkPxC9*sQs`Scacie-5_xV3jtej?oarVLQIe#r>goXJSxCrE7bL%*m6@zxk|^2 z0<^W+I#3V#ph+V^nqeN3z)31moXW|E#X(`f;WKUrxdL_~2CJ~4mkg1uJ)^2ndTUSM zw2p6ep^-zVNzW<~jeue>Jy71XWCc#kgITuwOqLrFTSkVddAf0(HZpBIdsjn3(&`X(d4i)4xv!>G z{(G zQBUrETA&9w9+jfc$HylW3uIV|@z@yQK&?5& zXp)9SjO(}1cBfl(b`RlZsN=0Ht%U@gm&t0>UZ%PIWR6ME_TmMZ+i*Qq zCzCr7jq2IwJGZYl@fd;|DRFibEN5FHgGOJajve=$nSKeFwQgOY+V+>>aG{Z%!_B22 zsT@FuUXW*6e};KPCU3@j(d%I$`Rv*wu&Rk8#YzP`ljo(aD}hmyl#$68Q*Wup;6PIt z+of8>4LsGUE-Sz^GApBL-@fkr*9&~Z%EWD>byKq-Yu;T@zFv)EMeW*{^5-bO-B&tH zXDwm}>ijk6yl}~ejYq8f9Q{0$&F;jNwce&UdPTp_GoK5MR_^kiEey>NdBSlWjuPL` zBg}E~Jbi~h(`jMj9p=<2)?n9^s*x)^iSQe8I(i7bh>nzB6Y3C`s zYu%H1=<7X_1S>1A%yUA}d#%=FC?QGEKH1mC zGC7SLnay*tyJS(9smP~*`?YgAn@#SP+yc5Qf&{Ai;F!$jf+zuDP3LRif>D~js~H<` zyE&LZv6(@au5Toy;Yz>u9UVzS*A2^u3c4}nT`FA9NKMjGWwB9_+Qkd5dkg@cy)@UF znXgHBkNz3ONo9e6q-kPVP_Ib+(2tpAC#RI!{!x3|I&BVdDki~JNQ;L7m%&bb>P+Eq zO6&n8alNT#+@HJT*W^*Ty4@MDfpz5_0Xh}w&@~KnlBj=2T0Tjn%c|2d-HEIsm*H3~A-?}=cdnk}bTI{I5FmG8T zH`K@@Tk@PEXX13r8Ol`G)f^v^o-gAj`$X>DYCG};b;HG^-hMX;6G8Tunvs3F^s7{V zMs$Cd4E1$lc1LSONer0xYnaLJ-Pi6~g1=4_SHQh(iI7CqNPqDp&m*BspH##95v#G- zijD$5ai@F%V=~$$1AJ&ig)L@Hui>cM6^RYK-sZ1MYNe}*6Dn`+dC$kn}&te*iK|8=-9t#-0NY(6W6w4s`0C)KkGktbC*^jq9^p<483@7q_@E-m|zEl)?Xm{QF>Do=)Ne36J9@`t+*Nuv}h<0uE31Pl=r@M znOpG`S_QJ(jQRzc1zK9lpvwGYlD@4k!(VRmD8`1R&lz3Sy83MYC`>Wc{e;B4m}mMs zmo1j?zw^->3v?&b;Qh*7U~V)yv({sIy;zX4KWY}=zu@UIM*qiJVC9(u)FCuN~yn3p#nE59T?J}TWX4x{lUI}VRdlX*yW;gPdj8xAFg#oc?Pq3f({iMTapP8Qt zf5cY%yW;Xz2=%!u7ykLLc65OOFSU}TF;@K)-#toUW*G{v4iqClD>Bxb$`QsW4?;d) z8|z)3zCl)5$e6Uv(?{Qz8xFkFOXgk1=OlrwHZ$0xsM7oyomn0=eksJav8prhUnB7a zkP3<9*{{c6$I>$e-WgGTrFr8R=WCOvA>uN9sf3z&t>HJ3IIzYbE|gT8bbB>O@RNJo z_%0}rW>!-OS?^U*NSE~Dg z0omoGV&++~OqLAq}<*`QlEZmRb?z zEC(fpHE&YoxPm^~)HgP+ig7>l<=PcLzD}eRa`^N5VK1;XZ30d+m&? zZt#N2;%%6QS3}jptr$1~Ojh}K9YGyn!Qc;RCnEP_S!IhwOM9LhIdqjtt6HXWiMK)$ zbrts2TU>RtH)=l8HVOlm9nbtH|R6$^b%~S3&cF{MUiI_Ai+8r$0)6 zO~p+G&18hzStP6Txj0ydsR7kLW1wM^C|cS`l0O99+3s%&sZ6xQDzxxF$(w*e;WEX~ zx7Iydw)2&o+`GH$83xTjX@PuVwmlB`_fw|wtd;vX=u{z5u))_e*v!{bI&fOBL0pkr zJbd=*9$j^nrZV#b$Z=c<7pzl5TfQ*O(TXb4y#D&#&vj}yx`w@4!8~fV@2ix#;Dg`m zOX*H%zGEvwNAE?P6y&|WF_M~P#F~dYNjmQevl`{)Dq_;rys~K`@eD4+ZM2>C9AY{j zYt$ScBY}M8FWtx%R3cupUBuDxeeW+N=9_EF)yX6f^NHs=C$}Y|{S#)<+$-z%ZLrr< zlNi+|+sY(K?iEv<6Pt=J?X(!G*T0d}8HuasZ41vXW~U0U9&jVIlp%ZX*#xrML~Hn{ zUpxeI+gK}`5R<)~)w1qd6_u(}!^HvgsGzm7^m){N51`l-!26iVQ$C<GR9+l<70SuUjDEkg~{-qzvvdo*)mpBrOpDiV9K#`1cW1=&|X7S}5 zD&|^*bW(tox2uBZA8H2kO{-5WMwuao`zt!_YQY zv8|91oU&VyMhaDCr5HYRey&wK++C4HcTR}QIg$GD17N9(9n49R#E5;BUb;3qwt@iA zbvQcjo>SaOtyn`GK!QqB+|!?UN|)F7Pqrf5YZWRjJ9bTrV;FHuDc&`07Q$mPB8b|P zy>^K^&872}SxkOhG3aV-5 zxBA|Z+W!0|@bE>sC>o;$-aJXjqNQ?5Q&Lk?92#4ZYn%8_J33Na4Jmx|YVl#5{kFyWxYtl9q!Fnaw5U*L1T*JDgQU3$oLg%(`G z3#w`_K5Pt&jd(GB(}Vir9KE=;qW%B z0vKLD=Qa^Ijd@&O(lzF|#Mz-t*III6HB8HbtJq+1*91yYT8Xs#L1G4+9a=rQ3vto2 zpxIQb-18G$)+hE_k%U%?v&5x3?(dQK!&naVU-8V7TxU;?pDI9bkc*<*mPp-?Im7hf zlZ@&~O@q1|e{Q)&{t`@~A1-VQ#9y+qLhkjA_0Fp~l$F_4Kc}Ro1a}p@4tYK=rPRwb zLQ(O4)_ z`~@jJ?FU2=5fqF8&6R`TO>)mlqZfEXneo%P8)C36*PNJG2~}p_9RidH|}joG_k6SUN764mYa-T$-{Bs0n%R zDYcdrl9NHk}X`6g;$r9M7f2B=c-h9 z=hSA<8~el0IXh?5^i~CUPS%=IJr3+=<-WtbQDE24$fjjixAAjQ-O>XVb7A=jV#7l@ zd4X^Vh@r^KeTUf1F_P;pNO-nJp(ETO`!^!-pcV^18JZhtH9Q}$v~3IQ(nDy^=ztz7 zbcHTx@T}9z`SVe%hpJe?fc)KINgXvjPoU>EhiYpO651p<_U3qp@yuGF3%u|(5M#QkD)ZBxB{^+b-#*BFX;{3hKi zs)yfyY6={yl;gc#cB3k(<;d;_;=j!)?3U;)U~)@bBt6GH0gLZ8%H>CIw)RTTdXkP^ zQPj0O)w@i-_4XEC{=>BFH^$xI9~>M**Oa^KkBxADxv{VYJ7WWAqR(0bu>S_kVvXz+ z2P=O}q4j8qeh}O9@`g$*h27S@D(R~wI~J>}HdRO@Yg}=?&%+-A>vCO~fA;6zXyjE| zAJ8|m!|N0xSrSe8?iUBk?^BHVg?DTmAJpqn73mV^+)k1rNe@fbqALT!;$_s&UTP%n zraQ}b;QF0jd?gjEzBdp zEcgFikiYjqA(v!s0a9U1?2Nz4ecKUp`;-;*%5~;$T~t_KniH zQm)Di2HkVtinr6%+6iM#0i(nx&lIDT5Llc)A-nSv3Ll4_)?gx@d6NzFJa#p3VXE!S z_cL9?v<^P;Ac^?>pjbMddHxlfT?v z*3I*~2vHckuOM}FhDy|sTnyUYm0`G=Csi(2m4b?^Tp~*z)^MBiCng8W2uX?WH z#>5h&sD~C`sPoE5C1I2A;||;*wo0gxZ&})F#tF@s)ZQp}hoCLTUP1H30Erx~a9>g< zK^65#!Il*j1I1FaUQ-qz8r;@RxQ#v;zigSq^H7^LK<%(0CYc|F7>_BosbdnxYtz-W z=xRyxp|WYN)I_PZ^NomnBpQ{Tv%i?)z-vq9tmB?FZL3+W^-VW$I;8CKhJ^UJt_G~5 z&pV&`8lWKGW;2I^<$upSK)dZW%Hqb~x4^h!&zEWBVc_qx*7$;rJlu_={94W5ZSx_% zJcpAnGIaGcfhkLGq4FQJY1Hg5c9P2>1$GjBXz7ezWpZlrg5_oEXuR~t&zPUK6={*M z@`o565)PniHAxBES9m#+6vd|TCSa5iTP93Nh8&tlsiK|U8w#KaMX4n?Vuo3quDD-% zw*2l{_`IkJkHJcwh56tVnzOcZ-jt@HP*=W9*N1wc)B5=_pF6p3(oQ=zZQR^bYSd`ev`lF4l4A51Lp5$BF@vJBuRXw{2-x|js@+&V z7FC&waQWB5apcX~TGp|$82_#L08*RqmIwEgp|ZQ%sv3fA1#R?Jv#RCJQEEWH~A zz=nLnDv4A-D*cKxkK3R@?1g)VXI)sqrwpGuaWb#@=8It|pfrg89YeFdR?lc6whv8* zmi{HN7OAe`-<3*nF5}hLH zk9Huvv<|;ns?O9}^c8atOpQ9n1Wd$b_TZ0&n!4}J_?ULq9P2DOzLnrGm@Q3_Z?3qq zeJwopuu;-0;ifIAQ_5vgyNK%;TsBGxs=Qtg#IXxv?y?#fvTn&BJkm zA^iC2-SpvY_8ejY2-pw0_t=Qo8^4}yMzK&ft6R#|s*J)aAh|9tHz_>v zIsfoRnS)jJ;yR2{Ked6uY z&sVdA=3;?}nki{l#lK8Vix0UTz#ATe`yvSJOx5Xah=*T$lvd)%aezu(NjK8`3$iCV z9B9uu?)p5wG*6W0G9Mtg4r}s6-X17kadF8qly-V@H2K%2f z?dr|Ku#~DoeI3uleum4W0#s?^MGGNd><)EHZ_ETR4)(usRv za;}tbBJyvci<1ncegp@sUIajGd_0bIs!mROePJf8f)tzx7If9tH(ng!F;*+54R|CI ztvh=f*j9xLv2*9v6#U;qp3emA#J5La}we>>S-OkgMt3a050#zvz;9mjX3m zpHsjnxk>8GmKcwuU8-|E*FuM{rn5W=8N9HdNST&DcFEdb1yl(^5ts|=f1sYCuz0!D zf)J%!6h- zBL>^z<3Fo~m3ZV0B}%latlM8v4nAR2Np%-wdu)izu10FhnNx}3_Z&@xDLvo*%oWzc zXrwM>%1%6^*&GDPwpk!AwW;>D}!$xXd z{N+8A%B%O@)C9`fO#6aic%Yl8fo@RV)XunNL=_IRCz)f4Nn7&nenDDlsZ;${aHsE` zgxr@232E~^ykNB}f;ldkwy%gr^{se(@fa0}=D0eC4=lSrgMKPgRAtUD2L!4X1%)O* z8+R1gmfE`*DqQ2;k~y6Dh(YThj0wlKpk#$1%(ze{E|vZ-E{j^lST*G6>O%~As1jmP zfyj`D#kD((YqfXW*6v2mquqQg=E|R#UtKt@h1md!j6H4l4wG6XBVeMbL2?LZsoMM; zI6EynNP|s1Hswat^py|8j<9uD4{BSGffS4~O1`(ut7n8w$65>!>}TJC6coC~83R<9 zeaGHggV6vTv0cX?gpYlGs`LxnkT>JXLKKhkWN*NYn$rmC7^zgQFV--}cD?oe_5yup zsC-a~$ZZev+dYjWm1-MUeq`&G1bzn#b5a_n$a@|ur}fz;;nfqrx=q1?I73)x0IQiV zk5$1ITQ5mJwS_!18> z(J}qXNRohyU*P_^unwz0B_Wy8_`~R;7Ac;!LJFUjj^@HFZEgo>JMj88gh_gD?o-w2 zQ&=R3D23k8;+)Xt571$2P{MToC^{!Fo;!wGUC!JO^Xqh=m2G**v$EI8ux{*&hYlWM z=Yx|f`~p-UOqLckx`xK*2jMV6EN(ddwK-9#pT2U~p~-||Gbc=LULob** z8S$sQD${=z3nI?Z;rGBn<8K?MKlcmjKIqU5FQ^VYV?7(8xugn4I z90!}KeYjJSop!D)S=3B;O`yV(AkRcgkJtn_q>g7fVyR&2Zc#QOgrzxRd9P_ifJuF| zL7+BWW3@?ds`K6fU&F+Lq>#F0Sh$gc?brh^q#7+2+uMapLM}x4k99443BNa(2HSr!XY=y&Lw{I5^KT`o_bB4#qJ8@ShMoGRyg~vODABq&uSN#9He{-!^ z#cRUjYlHPJulY^q*9Z9;IKCZ~j)|_z4aMbS){#k#4yw09dQwVEgp$A747%y-hcj_U zztverFTB5S1t&?|Esy}*wA0m-OLmgIRk!IfQ%$$!-{pq~_Kdg>Vl)DVe9^n% z7|a~MIg7E5PmIS9#177y3ol_@Lhss{R;p$ujbotydnhZ5Elq16=DIwIQ|SNj>g#XSnqO(^+wHT_Bx8hQ~Xz4=n7q?P~R^qczsbEIGq-_@F&`L4Ujw`yCMm z(}t)UcF@C>K+tdtCTH}xkxEj6xpH^?Za%wKQ(Izutg{i3aDdn3Wkt*pCYnGtu0I-4cE5uQsn7v#SsO}=Ht45dXs}P85U-<+eV0q1BQGruK9H9%N{q(U%dEQ>HQJ5|N~_{Y@U=`N~&V_LVfFOX_TvFEgj<3qk&0TV=gtptW{wef8gGVjq}U8B-~G zf`9F2L~4}Oa(aVHq6v}R(QK{`(q_%xh34OiPw$V0Jm3lt|qF#hL!GFteA4;V?? zFArur)`Ta^yFYegaQqE8Z)Hm``YBfWz^ZN)w2c0-UjEw(*4A31<9UlR$7jUC) zO*=+;jpKUw!*G;3Eh|zUvBHWewn~RkhN84l#f#c9G)9SF`~6El%7dIqOU|0IY{F7c zI=96;D^8Bbykq=6D)slKH?9Cboc&!LtQVmKJf~%}p`Z$2!LkP3e3G)_%*`aXCV-q6 zm=(;vzGy#a-SI$(Ptr8ccL6AFESeOI)5QPuRa55N60;v!=e0gjH?p8?4cviNV;2Vo z{^7Wfjwd1&A)U4ak%kMF0EGx7$6ij#qxHJdT(hS))H8^OZqI+Q$E3@k9>-Y*{e|1* zQwnVsZfRz~X&*(X$dv)pM!CgD2~Rk&G<{0_!8O~1>?HqG48bO7dZU?Q!2oNhPiV~c z?#WMyPe*OKVe-06#o4PI3ewf&UjB)AoLvc8dO#%oy9QQMoajxVhGZoqmc#vU$hB`< zntAPxSm~;`#FJA~JRIE3MRh-7pMkw;h7d_jp%Vcy4BOY{iZvo;hZYPr?@jOM5GH=7 zsZI3-QpV>%sxS5>30X$nF>YOWw#%aEa+Q2|zMIhLcYgVE7yJcYfSyM{iJ<|}(Vn>S6A3tEK_iq5g` zL7h(1jWb~UOIkjn3fN*ka=5xGyIvIErBG2iq|h`9zxeFl9?y~o+?|iCcp>&XIAOD9 zVqT5eXGn=QoBU(G?0=%XgiD!EuRVo_*|4PdU~mMUK+cUMul(XvI(B!xPMI=B6l-IC6xIPaLkLFM-LM^0_LidAW z){>etbQbJUi>;h;(a!L9l ztgVJDNND6devW?I6FqFo1kMC<5x;5`k5RU@HZ#oAEZZNyQz-N3qNbhDu$?Rkl?Kyn z+9ULIEC?TMNUE{}y6CtxQMweCgI8aQr|-9& zgd2PXc~MxwS%h6qQO~LdHYOjb-&FE@cHr5Wraa zgz&4XoZ?9I8FdqP5q<0I%Pc1h+HX<^2lz^M5{&t`WYiOj)8#gHUXq+h`N_cGw8Vmd z{K!q57a!!$MKMwWc1g1QQWMc@!7{W`5Ev>MOmsL`39rF9_srm-L#APo zd^xw!d)!&NJ`WELsi`j0V-NsEn8Dv<^E&G0IPqlyG5^b#BwTWblEdr7Rq}~xf!#NV zt>hA)C6V-+Q0(;5S(L`*>7~(+WRyY3Vj3Yq9WZhPuI@^O{gQ=!!iws=fg7=;YUNB( zXYCY400ssC?excA&^h|l({~A6X8dEXwOVCFc5@D1rZ(|1qPziJ{S+-?-6%O?N>?88 z6;`vpDl1lRloPrj{F*Jwo-oUELrUC)?w(_i$a4F#x%}X}3*JCXUfPld<=tx~R5Jse zo=)VRtGD1QNiWAp6466QKB9CLSQF48ZpZ!6*lSc-+E_jh%9)Q$bSn=`+wkP166MRu z^)oS7tD4P@y>@F=??j~Ot4j1nK0Wn(hFAMlE63`QdhDt%ruQHvCtLvdJpe(|;a?rF z5at##?#eeY8CCmgmHGT`(qnfAT+PUZ1c{z zS;~)++aVq>h;UQ2k0&RREXUN(Id6E%%{2Ayyn2MqmIHwZMwHep3|D&<;2R}bIuo& zzU9guj5v=<8VQTtCp&|i!aT!svg?;Q*Uv;fhg~sb9EFU!&fe6us zBfX^Fk{CW@X2t-SV|Z#7f&C4jxU(7iMV~Y)*o*<(2i<>hE8Fg@n$7G^s6bUFbY;e| zshgeSnYk6lfmZZ5fC~iD?r=8;Y*ju8dQc95PEnIrBK@I z?cp@GE1edfD4=!pl#tS(snT(;cnS|ZGoj1E)81-4;Pkd?!~EQ=+bV2Hy9`Wk>Pn` zN-XGZam>g!(N|A2(T2jtjpT)EZh~vH=DD1))6!yF9*y>7nJtOFkQ=EkO_2>~lrcWF zoDd8pU`Wha73d&&Qnc^h3gXV=Yq0CxP4kkVCcCbIEg&O2os$x7Db)FLCnT|?*L6r~vn!-Hr zj@p&X0iPZ-ZepRSgwb^YW}7yXm-?clY}`U7FQcvG3%(?WAnXcW2c6o$yT`<8^2Z?Y z+z0z~;)$t=e%nvM5m+w-f(LE|!-f>&w>yZcQmg-IsKK^`s3G@A*npFVhu%QXd}E>n zl<@TMYx%svB0mNXUCK|b#0608Zyf3?F!3HKEc^MT2~pw11KTpQ&X0@?&x3RHZ-TF_ zJP`Y1Zs?PoH8m7tTC&;5ZB+Ikk}d!L_l&K@1dR0}S54UgvQ@HxT78lV2fRFk;Y`P= z9<&iNW;n$+A}wr(D%CbkL^+|?_yH0xmHWLIUG{f9x>|`10UwrR=p{PPEd>4s@cR1n z`u*=BhYOT|tT9Rhs+nR@y%rfxbTQ84lDP2+HWifQ_cXuIz_SXXiw;2W43j zHBW9y(Y*$0IlkYXgSJ7gGtm%Aa@Kv#Aa9ZPMj@T)C7Aqiqavb4uOa;x-z1woPKRFs zz}x-2yot@+I`*G~_qfhn)gsNlWifo0WQs+ceQOr&j;V`8Bm=J41r=Y4Tf@NsIQ z!NA($i})w*bg;_-5lf~Z$C&$Puk*Y?DxmZ+FyUWnMe#-otXukk zHcj!oywu2jC{~uDAAmnOZ--31*&f9WRUFp{bGKESwg*7as>IwV9IEJlslLB&EE4={ zF*MCSL0fDJUi9Z@Yy~PaBB_<8bfS!vw-Ukiq(|H!U}0n1E!F&1o3@5}Q=J$^N;dr% z1+>U*V!fAyLC1q9of6h2(`jTTMbbe%h%lR%*2=tcs19NVO<>= zAHjrNnXJs7t))Ey>e9pwbX@~!3&bxRc-cDx_kbWJ1|GJiOtom2HFuHl-}rdnB=@xf zIsp_y*sRPn)ji@zkK^2c*T&U`A_04JU%HFm%Z&<8fDqgG#JpM<^TqVu6-qob<#cj8 zjhgRe$*E_yNYJsp|BT%ENQT-QjPe~8^Uc205P=vnCHtHkdU|9tH6`UkR=u;>8jaYEp(psc&3C5X#W zl^e-1UJRtJWE^*sMgdBr952O@N_WN2hx(RT5uwo_ROl%cwaO0()kJ5LL`5fBzmR;M z>7^mo@;G%*tf|=0hv$*>b6O=0K4CQsWTy8NPTZ#`FYxeq16hst@Z^%Qr6O?U>-=Dt zVkld!D|gOk+CQ-yA2NG*a77R-BkfjYz5_btmmV2=tficAgcn-2qL z@&bhrYm6<~l7M+HED;|^Q8C%gc`u(BrNPXt4Q zy|HonX3b^(g8_?FcWC{`tRK7oQ8zF3d@;0grbBlq@t)0yLUeo|HDS9$=UUtM5#72+ zGdV6#^#Txfl0Ph}=!lK>P%6Ja%Xa-Fi!v+1p*SiIZA-LnzP%6ik0vd9N00?V z&)o3M=Jsj{7C&;?-HwP!WITU7Eo@b@2~GiSSD9bqUr@ma{}Qte|7Ssv{%8DVe|<)d zABZSBjRFm-#8k81FP<~fr9#;9g|7vlCb2tuwI=Ntr4?4$8ml4?cUv6jzf(nl`jDC2 zljF|w>?i)tisX*-6i+5OdoSzfu_kqD#91C37OLcT_R4)+giP|)np+TvLP%}Q8^C=?wN=PXTFN#!{)HO-6toT1-Shtt}j*<R`>iab;RrsQU|&&=_mR4Oa54Z>?ss9(u$A7bR1aa(pJ8{ zz9T@I z{dMKqjjjVuPP6lBUnR!*?Jn8JJC z!N{}AAM7^0;Oz<~QNt9S{)dZ*WMz&b84SqJKsmXRw> zJ8C6I>sY!++z%6Ces_2Ba%U0`6w1bvD96W9L)9UXt3b+mj_?5oP*9FQmu*MX1(EVs<)a2!EtP%0*{&;unkkYPbjY`MVgO!Ya#=A_gb-cmKJ+W%&s>waRe5oS4-;{3 zTR-U$iIRY0)zB^)wp9P*d@TKv{p-&^K*DD_$;_wfqgk<I398HKX0Xc)Skpy>w zyK5=M2_d*cp*X<_E-l5K;tnNPixyfcRQBPw_c{Cj%$zxMo_Sv6WoEvU%zW>4uk~5i zweT-25hp9H@P^acoT$~?ZZ`|T`Q4*Jp-PwC!BYKxDg4d8!GzdFle^wEfp1!`_&@5sMJiMAd7#CU~)ZNFWAdOp%wT|D|Y=xm92nQ?Ph zg*UsyA+L>woxU^Eu%Z}8RtEV=)B0)GotfP56$`&gS6Q;(LEi+c%e$k1Cx=S#IsE&+ z;}uz#*_e6UMGop++;{1=hJkCze-_wvZ%zf{N8lJ*!7@>f=nlchqiPhKv$5v&EOhK^ z5yk6{A_*xr9P7nQ_nAuJX5{^uOwTC^k26KF?o)rMV> z3V8bYc>CjIy36_zP9z< zKNLRRmTxcYR)blSAIiqQ*=7C4_FHG4>bw9+m$%GYHUqa&oTTrLAi6@EY!wX+s}DN3 zyHr##DLU*_G+sTj)0#0x^f(oCJo7eRVX1tTk^3q-at^gU6ZPHRr)`<9UjP=lj=Gfb z(x*!2O`MY(+0k~~)?k^sr~ta|E;S2gxXWy@1amwvLR~IbAjF!uyrqgYTU^JRt!lnI zXVw<5D~4Mrf70V4Fimu6{waq=SMA2@TQ5tKAle(`bZ?GfRuhf>h~3zwDCqzj77qG8 zUilLR&XcHc1%@Y43K9Q)U+&+y{Xi*Mc8d9Bqy8$g3V!aOdB^l@o1csf{_g3otpp21YP~66v+asg z6cq~7Jia42_!GP|&L)(Js4S>n;m}Mpgo{VqFOR&ST*ZF2TBE!q8L1HJ>K-~St#O&7 z`d~0fuZ8NXICIWILYLk-XGDwn}N@%wN`+fZk8yn8S}v+h}qGqlpDu zz0bBg++wwW`jxHKSX**6e)7oLN0(0$kLNsRK&^pmnzN_!BzA_=)J9t4+D46PGGf>P z$@J1$#-vbQ=5C(b8ngj6z-q4QoA=Ku0 zY&c|i!k4=1$%e8w`xcDv5J(ilME?X+v9$L|7{cuTd{zJ9kXw(u{yRgg*sh7j{4T#K z@v1;Vcik;IljF?!l~xIYIU!UyvN)H#!5aF`By20y6bnIa^X=EF2*4G>qPjCukhz?= zCTmAeNxm+I3dDg9yTmi3dSaNOm``8Uv`@jhsKyWq`bnEOdmi41j>%3B6;3^B%x>?A z{zW`T?sCgxA7t>hg%ckc=e#&VB@XO_%5W zh6>YOzNEcN8GbMgze--S)WDl@O#E(GZ$mGUY-T`4&4f4NBi`;@(is-w@}3&B9HiH6 z{4!$V3l*iWL8{Egn){{2*OJtD1}QB(@S0u>_liv|d{5D@g-mQoptue*2pwOtNa>J3 z55g-S@wm2H@ChJ4q+pl&7t#;gIN;Wm2EApjADBpKgp!GgCK$_p9UN1zA&|A4frh!Q z^s8;2(lwi3ld?^5c9lK@>L>*j>Qulbft=Klov5vJU53)cccl2?R25`IEZXjX4f;n z;S^N*a0I~4BI&z=|G3>LXx|W)HAN!^vbbk0atc0{NDqY^mvM_p2P~zEShs?LYYgQH zWeE?YzE4~XKZUP9vF7^u{j1a0FPR)}N-uu4oYoe-G4w*G^M^Ma)4kg5UG+LssM`10 z9kln_5L00Oe3{em)0Y|22FBqzLh`peW1A5iGFUb2bf9Tg01iNBAz`kX>3H)#m8yVg zI-Pcx5L41occ) z8=$YL)Ux=Ow)y;A&ctt7zdJvlaE3h1nd@qrvcVB*BOmP|8kxrO<1cw|UTj|ZTuVuG z{LpllO~92e>Wpel-y)+Y4pS zq!bMpe-1fW;1tcXd)@y+7Jdu!ib&8==SQb+i~GfWQ>{>8rvk3_k3Le*1t2y<=4lSj z};PfVVIB6w+fBj-MO`7L_>?;{nNwwWA*Ho&RQ+&DoKzT04j~mfkUjIaK zI*!%g`cyGG^-YRG{;3qW(=cl7OW-`ewB1TLdh1uH@3R*jcQz%1&&*q>!ry*H-DCY> zswo@|Z~Mylwc#|~vqrW%BwaB4!0I;S+U4R`Y3BUy6*^4HhtyZVb?2qlhw(CBXI4SG zMw|1%(BCUnOtGrRMD7FbG#J6gXXcq%a(b{+!)PN>_;IBP0Bbqqinb?zJPI>c?7cdlFNiB{oPKj1u_qZv}$@MvI9|fhQUxjlLJQi81UzON;= zb)-x<#(MjKl!YpuG0mzI(Cu&o<-hJ&z&i*B;B3C0DVrrT-(B$?o)=lH&lRODUuiX^Ky{@ z@KM}uN+>cn8of3%X@gRIPls?f5hX5ug>HUC#np!uMWl?ramE>rpi5%2^dWUUZ5xQz z)Y$5b?aSdic<;*YT_5MfMszn^Z!JTGb#({Z9FTEhHVtcv!wl9~me226Ta>r;llbj5 zVrWK=gxLk=l{UTjv+Iobksb4flrb+ODzUE5k)U8DV%uf!F2`pUk}fS4EfewuWo;X^ ztsU{!oB&Q;Y~KFbbS6}G$Xq6Sj*(hUJa54eLrmYz(+3;pf#_I5mKf(mBi~Z^GN)Ow ztOt0zvwLEE#_YA_;GQVF!8CjRz8q^`hW|~bz@~4~K&8&1Seu>$zocUw)PpK)UmSsJ z>7ih1+?mcYSnMtoAf*Q}m5o2xr^66Qczhn|-cUY?TuJy3uR6ZeKv$&ozr5;*g&|B> zQ1(B(YJ4blEvM51ubR4;v)>usBljZVxxM*5hYcm{(q*+VJI1RZ@y+F#HsF#zeAl{T zXPq4?^^>HnLWz4A`6*_k*ZE){F5u6D6w~*sSJduLpvZXHX~qJoFro$Nr)88@Rq5{4 z8@xVr#w88_?K)N+Q@=M*TW7qt{Q00=5X;^E8g!5UAMHZ^@Eg5*n=fI`UfH?W zHFK4=S>qZNCW?OkhvzvCOdIs1?62p~{0G+gHt=wI&*P_tRqOR#zg#+>%qH5`Ts{Ps zC7Rxnhm`55Wk`$u!0~9K&}AVRFF=gLY2wETUT=ON9XH=g+S~NX%m79JEL5AY?lBU0 zm*X(-(l3>-*u0!aBxlu>h7yJ~CgAanc&vT)BNpW04%I@LG7_X&Pw|Ks@(@)vJ#EyF z5zK20b)HH&@sO`W?CYuaQki78zXL`5+JvW`z;*cODuap)SR@aqjCkx9)oFa3s|HQ9 z{GD-REh5uz#}szVxY7={RM?Pe3uxJwqeF*TR%Sk2NK?H<0^Ru`Jd)%_5RjYZC=Y|| zJ*TYq7iFi7u3Ab%IEVx(l46>!$ksfnK5m#FS&2?+#Ab$O-I~7~{eX>%D)unvNK$Zj znW4A5_}civ0~%uApUq9cXEQyc?a3gV3to9B&U~z z_n+268kWPhWfb8q89kSsJKi9SMSR7e#gjosR!56lxDV%C!$_Hf-KcJBB()&mZpEw0 zIvct%IX>BDJ!jh*J+-KV;s@6271i?^ijw>jlJ6%_g2S?L-mM9Lq zF&6{JF>t3i@@M4NoqKBHhDsd-ZA)u2%w*U2^8%2#LpG|`VO1C_#?O_)a#OO{hskmr zvwy;yIs2pAxTSH%pP3$zM-Xg4jgUprg~ak;2Sbt!i?I00J$Gw!`#ydG%Adp@DMW%g zQvilvV%X_TT}+-L{TM;Y9RkmMQE_3YkFwmARmG|#tpJ(V)S8DGAzjxY1*@X^y+KMb z+LA{{!>)?)fO|s#1xUQvBp;?&ia;(ZMY3x?P1M15!J!nAa8I6)61!EVAjIMju zJh7%Wa(&V*-IDooPQ4ea!&+miwx%r<1y<B{6W4(v>QtY_h}O?fxnr$Mz1(!C(o_=uzbk`b}74skm zU{kcp8i`AkxUo+IZu+;~g2pk;~2Br?thjvmZWs9?Pp3$;+v{?MgE9|nr@LTn~C9G7yZx+H7$ zQ5ERm%Ek{dr1bdn6}0gXk-B8UJ~8Mf|5RAW1s3({HPm@B;W@9s7@^8ZLg%oH84v+Z zO)q1?KnrIp56i{^zcpibKz~G+{<|&(9>5=nF*SwD-m)#)W)@0KjtwwP2igO%19STI zXy35*BusoMm=Qco2^_IR1x{73OPl%8I&(1mtk3bfS3D!oUx`ZP%Jurxm(m=I;tVM8 z5-E`~_59qyAU4N%q_>}41#2j1esy$tBRvzE(>Q7#pr)&xWqM_`Eh{ z1a(hCNBSMHEZ;oftrjTxLOz-?648$VI)0r|Id)zy1w@G0R@ z2{cR4fbT&iF4fbkI)n)xP|bc&4vv4vUtF5HDu>rvta$itvUgI-guv&ms;V^~`E4UE z3S9+lnF~AcbUJpa1Xs#!+kTIZ7B5%KOGy?*nm*?N9^j{CJu(W;W+UpPb;oq4Tl0?~ zI#RjMWLlv99eIx4qypEDbwk>l7c@G$96dngT5A^#zkG8Q5RZ-Ugom#v#*t8y75EGANjOm_$4s2(-?W}-HV=STgzGE`Gz=;&`c7BICY12MkT1T8i0hUVE_)kY<7{#~JCBE5r4 zd^oGJd(1JbXxM)JIg`xoc5#CRn^r8i8_E^+!7)25G+I+}>0(Y*Hc-j?dWp(zh3bg^cZe_n+&4bhLlDec@``Ko-a2NaMpa&zuKU(Sk zr+cdM&}4ZUsuA}+$aC=f-NXFh+H(+P;Qubae#bZGW@S12EcZ>{Df(@CL^3^BadTD- z%tkyYCCB4uT-A!tr#^VMUshdR0KFV^$iA(NUV=`d1U6ut9VZR%*A(Yt?4A0N(~=|C z49qheqrIsy(lqiYp){Y7mUl&Z|CYLu273libanKrY4XAKE+LU>Fn|utPvus6NdTfS zT`WmFRGlw5fL>$X*l#hEbXC&!$L^l;!QX4{-+rMFOhiwqrd_A3y#UMAQrt0Bwav}f z)Aw9ZbpUqZ2p5y&Z-?k})dO_mz1sGk@uZkyo@^{ou)-cw>}Be)?Oh-X=KYwwB3<$} zs(XwbC#XEwpvX1hoy^zECRv8b27Z^wnX^`YZLiAAgnU=%u)5MJqbfLO!u1gy54lgw z!z$P{vx48i?x%*HO{PCthPgGLZ*+oqWfT%dEdi`H$A{@1&q%HGVP#LLX#JfLi!r6* ziok?cYr$+vDSrKCAJZx4R^2}6MaR+a!#mCsZqB`CU~q}7A1YO#epJt?K#!v%^7VD% zgDa5W4U6KxKQI2#fd4ZnGDJ^hd$ftU!rEeFRjMlF)hAfpP!)x0zx==b$`twNjaLmT z)Z}Di3^ll$vdY<3oV%>7nJvno^qOlIw5nm6D&G|W1M%l$c(MNTm;Vc^-6ODNZA$sx z1C1l_nY>F>pJkPUCVPTe$B6}Kd|eW%tR6zZy}16}tMpUk6U^m{kg2`=*1tk4)UxzM z?&IX-mu$sv+vRi&9M16t-&MW<4T9Wp667j!k|3Y1SzT^M$EoPNgY{m3-p%5sx=3X- zw^PyU@YnzOt_vw88{)-Q#tV9%KtU;KgU#w5qvz`|PK8futrYbOp~P0$4x7WX?hm_M zAnuu?nX~LtCkX~Tbe;%R0S^fgEUe`7zupE<|AlqrJ8Jf$7gZgyJfigMz>|6)UparE zf7Dy*868cT4fq2GKRY$DIgZi3==ti)UpO>gDv~Rd!Sj|~qNO||1SqP<2GWrpux+we z(Wo`^bk@T+EHW?ZXa>^?tTSM+chcxA!m*w-ZAUtic&F;w6zNTrf_B7|{}rHQZouS} zrtrD7E~Le5e_^vZSdaI3j-T1F=#Xx%g?P>M^ElMKbF(BC1@m&&;b2G^o-2_l{qP;B zP!`rcH4t6sd?R^1hiZ@p`LbJAK;h{QiK$-7ApL??@WWp0rPO_*?40t!FGau7aoI4y z%(XRCh@y~{*=DWQzJYkrBfL7>;~950;DCYROut~hJT^rFnl<6qt|Q~JGTQ#(in)HW zBOUW~&_$og*6&H zXuNCJMD=^>?O~v`M)dRB>n+^GLEExMaNrNwlhy^?63`L7wS2&w`Ck*^6R$#@j6kBS z&BL(^GPf1@oYM4=7KW|e*Rf`S9XO?`Kq;<*OAF?Z1?`!>*%j;#w>f@xEdLzoIK%$t zFtkP5yU@Z}s?4wWaB!xNG30_2s%SGHU>Ekf`(V0iu6c$&zQ-%3Z^n|9@)D#S?v_&R z9|+~KQzMBHx+6?_OrOats3&=WU~m1P@EY~%`5^)0+lc|wgN=EbfP|$%=O&g;Sc~(|s6vX7Z_1GO zs2sRP7PK=<3R}!c*)%PE(OF}`VB6=5STjr7(PQW#0x8CCW;g^ z78Bzf0Dt}h|1y_SW$fYu@9H*LBhi)S{?rOc$y`2!=-e%4Z4smD^9_gFQQu{pG1Gvn zF(&z(4>jHh3uef!fs=dM{Tq{}a#)fZi%!qI5!{hW1TdX5tQO;wPNLNM+k=#g*V@R* zwBXymBOJpNBpa6ZKV=y;$o@3D4* zmnM?`OPD3}PkC(vGi*ed>PUDu?RNN)2?6&O3DPK*|au z{^GFW9yLVSA=v?8p%juNkzl+8L?k{@0j1WM);z5U{mj@-Kh%}jI*efIC--TVU-cn{ zrSOkohEw}oG+(N4*yT2l(B^WH}uwhOLled_gHlaviLqoQTMf76O@(v3l~ z`VspGo+ZKK2Ei-hH+@gvmmz^XGPBXr_SO<+yCCi|>W?U4c3vvf>> zlbh>jy0uiK2Pyw&z>lA=rmGWFb>GPM)}CuM&k-1CXz0VT4RINOtL%VIhu@?Ax_{)F z{>=Wr?A3Q8>Y*s?aRibzltQ^r%h8hbN-2ROS0+-E;jx^J5V3`;K3yLt->;`$jr|eH zLMSjBwWimS*K0Asg(Rd%m{m!g;8wwL5O25{lSC4SpGdyZ!{N z+|SK#7XY%z?TAoOp)*zdwBr=+f)QqLY48(9vk3^WQGkQ4U0+38RG;0i^;+mMRA|j+ zxQCx;S8Q(n)4Mf~$y@=W3{jgm!N-Z6=?mdGs?|8zbwsmu_2p}cK?mqUiAu0UnTS5b zOvO3dr83HSbX+k(qyO=EDZ?DBPovFoH^-!L0ba#%;M*mo-N8plJUub9zORjyVbE~c z7lukQ+U$bOvo#9p-Q(a{$PCX0B9iMD=CL z5PwR!i98K2$!$}7)nb}oR>1X`QOafu$m)r^{{LrweZ zzw;%R_N1|Ga-?IhRU2)$z}CO(v!yKcjriN}d?MYsRp z))!*$ac*&bTq>V1PuI?skEi13F&1k~-0RtQSUZ7&%?$o2aqA|^Z3YxL9ib!jAOl)3>Rr6q2rjKlZlS^?hN{#+)|CIq|m=q2|s8 zz+%pkpyG0#HIkv$hPpLZX%_+KWJep|j!om%GcFqo6FQ-6<7DC*V2hSx&4c7hD(GS0 zU}o2FrhP?HItiGv{NczBXc52-wlWtC@SU$%!yh96i(7C_R|qpz zeJImel$B-&ACtd0Hk=Y&*X7{CX?bBBxg5+0fySJyp6%v1N9pgji#i`<~im~bX7%s z#9A5K9^ng#d!#+bvc6b0&B4tSu?=@gdH2fiHq^xE{vaXh9AXE>P zq4S80_$FKdbDvcKl=?6aXib`9>I72l=*{dmC1KUo%JaLuQmdw-2-QTAX_@o5XsI*0 z-EouZ@lyt!fkT>N-ryd#bMJo84Po3h#j?u5+P=P`&c4DTvE z^PuCs>-1Z%*mmk#xTq4mppke`GgP*#WzbwmrdF+p?;~PkqWEeyT8J@+>kU|vXiy*sFK%=tO3EJ zTDREo+}V2fQgq)0?}km+mm&s9EBe*2mwI8}l2e0o$ko%9Rm5tFoZ0-j&bcOfj02@^ zo|RZS-}ws~58AzW>e9O^QkgH&7!3DBI7togTUd?ILu=Mu0*LD~+j4mk9z2hw{4k+$ zdos7rF*DpZ_s;pF>u;q{jXU)N2>~glAjL!m!8WXVDaUt!)B-Eb(~ z@dp#(L4Qb(1}rK#)Uq_Tksn-tlBZvU@8ri-SJ*H`G~#^Xsd=E1+?%rdoJEA!_@UQ zKE~EvGZ)5mJ!bLyFal3FHrIWVT|7-a9b6_d#LqeQu0+@&IM;t@PDG?&Dm7NYR2g@O z*`6^&U9?#Y!z$%mQi~I_+@a#6a{}p@-`sV|@e65_0Ak@{(J2IzmER@n?8LQ;-CN18d#51+gp7q?D)7kj`?Wzh0OefLGS8* zJ+m~P$mGbXjK_guxg%4>13}lgkZVbH(HzeCdeUu{qiARSri?My+(@C*LA;jUY~J5r zFk<3+iDl8rhGvysqQAHe4!!9yPM`a_7-$!HGqO%m@a+M48Wl(9c8=%a{eT>-e@f3P zOTf9Kbt0z^-)?p~Dg+fL&HzxHuk{#%rE^8kBguk4xd<|1m7pZRe(u*xBA`dO0(YP4R*=TECxUNztTR zTDYUGM(+CgpZ}z4zUjEJzvu_T`7$4XimP%(4_ofa{{R)Ov8s6=tYrV}-L9{3oi1(u z3oFN?2?6?v|D!l%BBrXNZc6~v)v|7pEGVt;`Oy&mc~Hn5Of+>31zY63uCw91dyV|_ zTR=@GL?$6W3jI@FFoT}hYe!cevKw&xEa>`}iEhXVP>Ur3LfO0bI~QrsjwMd;esl3K z+KicJ)z*>`&o=;Uw&H}HFemUn(qSYv#KL;wG!~p2>%I2e_RvJmKur69+Nh!#6J|U9 zWSjewghAnKA+k{U1DM!BXgBfE@sm0wtfyJ?z_-)QJ$VOlLYoZ=JpO~MdR+~MoqnIU zbeO3{=dwgvjl!L1hGF&mII;6k*iIE?8k3S=K`f%A3O3zW-#NeGiXhSjH^>j!z+P?M zDmGh&j9&|h=9$|nABS!0XH0~1kB666{Y2LiP{)w)%7A=~EI4oXi4-K93mTt|(>|Vd zWk5=_PN}S&YU>uvno!Xvk-+Ga8O7-}ge#afIFBoLL+b8E3FoFFn#y0M`3;Bf!*!*E zry+dMgG<12u}(inqF_jy?7ImL6E~F2Y*~TZn$~@U*I|Upl8^T`sn~`~yTeUaM>&|6f?9 zK2M%beQOK-)KK!bNGaj>w|j|yVMPikA_unb!MB>P>>zg^g*LYp5~%+!oZmNB-Y}b4 zc3Ye)Amnqj*)D7GeT&dy2?USPNGnix)G=pLj#kR9G%%4&nff8mtkTTKFD0|?w|w^( zh_;qk(LG9_&1}!?<<_n|F)(Ivs(DSKM$pAntY+KtYz^4^o9T=BlRK=jvL$}dcNy5^ z0FD#M3yxf|`zVER7ztX~oKYO_n!W+>bka=W()R*X?k;>?qw>UB2Rz+GA2)e2T%7nZ zE*L@bV;!U!aWS+cnM6{;(X2ju>^myzVg{hjmjUPz)e!Rr6J&>tOD!@UPaww1dN2|D zNk3FNUGTWd`Kh)2+BSX+mCttRIJW4!vNAH{uy0~k7tU`Ju=@1l$DNfHz!Owr_5;u~HMk6wn{qngDP>^Gn!CED5btmZ4+qLQ{oGHeRuD%>Lf}{SUFaQ)t>uj^41yt zz4#u0}AZPf8{^{S`|7?(

Y2e>NS{ESJ3L>_YkPim?hn}^AbcQYl|f-R8?`lTZT?ia~g|! z>GfRKkY(d!Hyor(KTL3RY8}@Hj7{3Rmyq%mniFOXaXzD2t+>9Ki}hofQWvzPzA*d+ zl2wVtpn(9SUO~Un*7?o>=rX4?KWT``PCf^jh81v`8u6XZjIm=0X~e@9DWR>QqRe8b zc^E_Xe!KbM4!vkP`B9)VjUC;^>$P~YqK}{FQIOv&vMVVyr6nPv*Ec&ct^A zC7$g359s{u`UBxV{S&p*dV|T`zpy^+--FF0-$|cchyT)+4orN-4)r>~gt0z77?`cp zaF@2(u5kRF$`~r|QYTMY6--7Fss+}=r6PP3kK@y0c$eijuz3ZW2MJ~DIEzvb<}suYHecIpq&f`(0W3PR8@=@6fPIFqRbgYIlS{+Rzt4?F{pEht^dh2 zEKy@DpJ|j2bRIP-Y`W7xJh5d@x`bRLd+uj3lHR~U``%yQiS6wqlnSQpA zfVa_O(eR$r^Otj5xbwbzUa`mLdE%-2*q3=S0tr4T8i$m&+krjv8m6euhdwv)Z=m+v z2G5BtMLa|u5@{XgOk*=VO(BuZuC6}(sF{pvI+gp7r*`bYqOCr8MM2nJG5EQJ?`U{? zO{>+JgN?6BUIk7v36Xk&ffHRN_R#wMoE9WE+NaRdLY+~IF(1_unQcs-jEVL>P9x$56=Aialf@@OTLlqhnQ>bbA5N1L ziayS@t=V7*m>|B|RbOxBok0e*OaAsPA{lyPa%5FoRgHnrWcg8x0J?G~EI^pshvL;w zX5+tQ4mEca+uw-3)pU$#dO2WBxKqRHUogYCeo0!oR>(oN$HzxR$xm|Z^!P-TK)uw# z`t!}mu{3;BA(babY?q;%9h!Pj8=p84ZW`#|;~7^^BqhOVFqUfG-XCzU)}qBw4yL6x z!S*FUUH|^}0+VhOW&ZiC&q5e>Q($gph4f4(@8-sDhYT)|M^{#MwSI4wS7*qJR^CtT z(;8ZwM=2%8(h&UZc0qy8M-L1gaz5Tnm)PaBvcq42->R|i-srv>YZvuL>@ew0qTUM` zlO9I1MGKus-@SQ`uVHV?N!9fEI){T*s}+$tMdN{V?HoBQ)U)w?<+9s$(e{x`B>wB; z3Afl$w@D^R{5tutUF+`QXQcEl{8Tpy!?%Uwm1zQEhQYl{#wX%{(Q$_ndVELx*>R)P zk4aKQvTomPP>TN8;sLe|Zfs80BCkR*S#znkxx~=0NU+@Ia4n&K7J{k5cT7eLb+|i0 z%0%UOvDYp8-Y+G&otQUs+TR;46Mzx=QrOPlGi6bA7Q{j+qg9%%!9el4OlL+B|Ht@}RA|-_E4? zqZ7VdlAyMHzu9?pQ@{$uL>tI6bkXG9o^7@BXs_^#3!+etqp0xz%Fp)}=iiJYOl!=-6O+E|$*jCAs-A z2^>i)N-E3}wC;7>HwsFAz#jbDj~r}x5Sjj%^r258XWZJngvQw$vm4*M+vhMPqnCA}oP;fiE)6BAxZ$9j& zBdg9l=36L{A)x#464dh?L*h@;na(>~X9?=+Y5Z|qN>zKr^t8$rh``YJ*Q+a&oN)b$ zhd%|3<6y-^$~0QUh-xU$_R@ImUj<7uu5x=XPgf9AOO^?ZQ&a23K|*8ZS%iCHxwe8K z>{xo#J1ytR{aG(9*te!?hre-@IrB#puz8tt1o)VyiY+b@5vBGV(-j@Cg&omuIL|#8-WR^Us+4-+Lqf>+Z-y ztfA3+mQf{)C`Vnf)l^oMFf1~Ch5TPwPS7eS@5P^+s>1FYrr>`v5C2?*A6CA4>3^L- zy|#t-ayIR?<+!!8zBs-a^S^~UG$NwJCV8C-2z@u!bD}W2;W( z!@BC1>gsJ+t&jV0x@tFjuJl!@gOzpgfnXIJqQ26DZx7EM|NCcJPkuMdx}h|n*`P(= zkvK%KtXe!~-~f~;mEtbW5!h?C&wV4k`W@b@Pq>;5B2XWNf=Pn1*90(5IGcI($9iBl zPuE5rZrjd=v=rv}QQT!wa{hu%jmNTS*={f3{m?b7ouoD90cfi))sMLJfoBwWG&J>c z25kPx0>Qm+>i5*fzI@l}FG61fF-K%)8nwJlVKnSxK`XEoE~e4}R*EPgn^b7OMu#Os z2!jPG>nkG8B&^+Y%J}-O9hLkv%`Yg+BnVa2N1Q25YUXpyna2qTZCqcqeFbiRpo{cA zBTyNNGFpL#sGRR8#>Qc=R@W&AQ6C-|i(mSZ=ykf;$GSR6~q0pA}N+VW7E)!Sr*3_uX_Ua6Ews1PjrzQT0XqWLC z%R+}VKq}DU0jlGooK|eaJr6y5(oB8M5eF)EEkoW=D$HZL^-3{n$2lw8gopdg*UCxr zk_2!t=bjz9qtLDRWm+EWxZMC)9m#<#-*xn0Lx(=DsIWG$dT#h-{gnt~kNr>nvq(_C4+!!)wLAfS$*k?y&US}>=Iy!ser9?e; zb=X4n6$Kk@_{j}na60!)rnyzp*WdEmLj4?3mUDlJ;uOd#RwIdgp74e zQ(mSaRD16~gI3gJPoGMtw0vq?T5{hW`C zIC0J=M(CbULK=IIzvDf6ti-@m1Bh?L81W5oSg^)HnS?w)KNE6Z(n3OH=rt>cCjgro zzt@3C}?<^~M73HkswPSH96V|#6PDj#hldQuOT zD)T=D71@zI@QV0oi$3rL&zPh_1Nm)ypOv*bfm_YDSQIV=yiByAtP~FBsX3E?1buL1 zVV9lET0rS((#|KOV?|?UBbeE|sJm?n5Z?AYy@}LOD9}*!j4fN1XiezbTVOvkcfh0j z*X*yq;0wi1=91tb+Jb$V9CZ>6Ty2xP%iC*>XEMCd?;p(Fbtm)tGyO)dN%o9}P-EFR zb$omSvF4%42)8e-`+hZAwMWzKHAd>&E&PTh8t!zbAM*LO&nM&yMo*<&bbgjeG*}320ipkQ%9@z%4#*N`$}MieppSqBKEQ zp1vt3(QA|%au5FfC1FV+5N5ouXW)QIT`kRMTc=o--IsE*6?N~lau%yZRQYp8m5~=L zUP|Q+#zy$;GmB-B+QghwB6nV~;W3(%4>?}^reb*^Qb5l3Fjs=GOD z`5aM>igjP+fT9>jUhaV89?>eDfc%_znOo1~zoLFs_{t530`I;)Jz1eKhBpt}Zv0{W zVA&;DczfY&VCthN)TK(XH5aK+u{}4Rxv*O2RP`z&Y%aK{-=p2Yvd^pj{X?}tfM;lf zw?zIBKE|SZAREQR5R&am9&E2Uj6}K82T3iGo}W$`5u0YbwP3RlNGY6fR(KKQ(-|?} zXK$d5Wh-%dmL;CED=XGKd;eu99i{SzLrmTQn4iP&0ZA2R?SoDzq^J3)G)d=wTm*$s zi=k=`|I9x+8{NkGg)vkL#0c4nR=P=%@%Qx8tYn8RusRmS&cEEsSgjGxwQw$#H>|QX zHrlEUxG~NS8}&&wox3#YSt4T;aPWG4lrhu8uq$v+37kG5XcCaDsC0A8u4Z#B=5C*h zRc^E?M+f6cdzHWi$uxT=qx46BZglSh3xzTFeTr0(cF8xFCDT zssTXHUSEWgoWOytsW1{}&+i(m;1|FeB~gMma&Bp=JQeu;w1tCiscC&P(ko3)pzUVS zy9_u_BQPZm3uPp+pvoi={E>RdNNvis{r>iqmkICknMK5)&BG2rN+D|Q;JPy`|8^0c zX(A2SMeF~HhiDC}$h?e~BuyQ3jc(G(v+`s~)o7phA5fB;`f=tP-r^Yj!2VxQ2a3k6 zCLUOkW*!)r5Xp_gv9N2DKS-~9E7cX}o;r6CFH=_F_}q+A34b2Vp^t3vq zp}Hk{4*Iy;-KxhUNV`#kAF&6<*RZVK&X+FUA()zu>?4S5j&0YzQoVI3 zVAZJ(b?ot2Rt)=fHgyTvYC3=HALHL7W5#l9N307P)P|S={qGUHDb|wtft^;a zxd~Vt2xP1;#aojp%P`@Kmg?!p2*HQnQEM*@JUiW4H%RT0L^^q-p^o*M1S48R+S`&s z{#1w&c0|K{jXi!2bDEPu^37wQq{BQEp*1u_mIm0E7mr-}5jOgbf#;B1GwNfv;?;x% zcf?2+DNTk&C0;fxyLN6-Zdeu7$4SL&%Y17_x>B;M9Xncpn$l>On7iv^_OZC=80&G} zmWfwQa&E7v45*MtF$XWoux3z+MoWkACzVv|{NeclNsT!67}L>tkLl040X~@p$vdR^ zJfVo#xMB83oX`ElkIeu%ebdxafvO)Ce6+2tb;T&q2=scThgp2+diC42?=9jYP+g)b z=Q9(DI-Kk`5=0nqg_T3BpmzLMzAeutePqqUnYenyh2Q|rkR1*mcOsSL=Ls$uFgWUG z*4LH|hwNx@c2sD;Br~nnr_nRvjqSB#8)o~{D0$6g+iXNyZR`_Y>^7tLdSx4z)5s0= zi6Vfrm$=Q?LiN#*a;=P)$Xbv3-RNigG6q0jKQCQ+RZm?>WOzxBNK4M_YwoKaG|xA} zRjyPPu*!@%32H?8bz%6L_|7tkLcb?#+`^9N*m@yqs$;;ydct&S%ND!y zuo3>Z`99YpCnDHx!JH~=jH`UJN>Igw6TTb71oP-D%SttgDWvWr0x z!i+5;GB~3X;>a_ddd@k&*YkURzvubm`v1PK`+I+`*Xw&-pZ8VcZ}_vj+{p2_g4~aH z1G*l)N$()rAiZMX!|nQx3Vw!>_FU(52;31xUy~02#pfQh8U=U;*ld=*na!B-4yjz? zsu&#q4EJnZn|@O39dKv2swKpzBDZau_nF}PsmadgNJVdMh?!{9ol@#l*8~ze>7tiX zjxMQ@w~W8w1Zy=Qh2j-kQa)J1yZ5mK=i6ruG{mEyHjyXf&@No}@5a|S^%Jvo zi$7J1)pQ;cq^Pu5zBK<}Y%iO|4T+gQ8QWj&(ctzr5o^?W{*0+TF~!42&@kk<-Pd{x z6A+_{J5}HNo;gX=6#1@VZK^o6D?0ge*6yQ`-g-PzPYE@*tPCco5~_OD!{VPBhhs#V z=fwqV0mpT2RK824Q5GGAg5P?8DjdfOOxsf}`OWEpSI49w?>zjWq#nb=<+7(b=bH81 z&3h3jWF@fxe~o(CJeGg7SA~SK7OY!HD!uk}_KBq~S4NGc_n1ezrHeP-x<@k6321c$ z+O1iGD$ejkIH5QWvHU9f63kTj(du|&UL}*T`9r*kvdGsrrw=%Dr1v0CmejdA^qYxADQd&i9MokYfV>WByffDTUwQ>wQAqRkP0g4d@X~m@glv~z>Q^*~ z)vG&0j3%pwiLG@c+kLK%GX}qCRI%m^P}?mHC5fh;L>rHI+MQ_EWf3UubG7cV3;G4m zxb7qQM&X{qtzRILlnk5!YK@o6s`^AI)CvrhHIvn&US@C##159UU#pe(B!{b~A#IbW z>e9Kl58W5usW%y<)~lAPMl+m-d`a}-jo{zlJKy72RuETzdtSVqj66CcD)4O ziho|P;n5VUTzU2KJSncR%~=49T(>5C@HFFy>oWB+t2LB4eBmYfyt{F1jxV;vv?NUr z>6QA^($CwmNn;j$KVPh8c04N5VmcS}wO#>^Ov;TOj?^(#{zmL8~ivsa!YdvC05d-NWR(e+G?u*dlaz=nIEBY^{dOxe+y6m@kxJLddJ`>B|dN z!8(F{1KA_p`dU7Re~2~E1n?$0+hiLAe)4{FL~^cVn<7z6p^bgcWzLtz%JkM(bf&ZI z1%-C@_vN#38!;DRfs6(VN=_^WCmrjcLrM*GDGT&+q3bn7;ud?ksC`FqjacCx$W19s z^0liBbR`4rq1fKt<3^dXvKGk{m^MGL635>43gjkJD^0)ZBk8%0i3Vg^`M5fP{;VE0!n?Ngu}dc%8_fTPu`3vx6RGj z^pfCn+%Zk)y>zl(hBBoh$e&apE2^AooLfL`!wS@*=K|g;b?KeT!4#b%s9!o$XXRIJ zr_(YYAvNC3CJMNnwlT6);z|SZe*ELQ{ zKJ{W8#@cw~AxFQ03As~+JHXL3-%^u;5byQs#s;f^+_(p|$9D2cO_g7i)EwuE3&k(b z=X9mv2<#X-gHI68Oo;=vi@8 z8_=m}J>!{c=Dj8yPXD8M^_-Zfffjp||L6{upmD6G`3XlxaYrnv%+lQtW=z#te}~8` zV;pJaadC z8pcnReH(hGc-B?&%0 zF0&Ed?f;)g?vK{OckRF0T6F-J8IRa-t7P{o*C}LAZl}Tuya-j|GgC5q58?Fd`*VsF z4Jc?>G7(zZEKl_LrXny}m}MGO=CxqA7?VE@|KBx#!fa~rd}D(S`@ z*E4P~;HzZXnS&T2VLEQ)O#U|2z0MQ>EW(<4j++i&n+gUNq|+rJUGI%&1v zX(BzRE&BS&fPNWZ<8;lHbFCJd$DOtfL{doMH>(%YUV?4&NUj&(X&Sd-;8)_NW4R*% z+^f6S$AT`tCe`csaCboL3V%t)qZ0re%R)IJ14IsJ$@gi-1Au$JLI~w77N_h_r^1KU z7P&y*{dGUEN8db=Ewd5dPQ_Nsj~f3+v2WSk*k2Qrs@|%*a_v+M|IOo1(B8Li4B^qF zy~|2ucza9FOa3Xed19gO{>liN5g3p6X9p|$acAQjCQOvQm36&Gl>9EN_h!$)O%pz) zV?zI{Gm!SM`^mFcC+AAaHh|d{m!xo2;A`tfZJO@k zL4ZmQb}X< zHL^+ll`*d5cZp3pB)z?;qI{sR0!m@-uaH)aSKferP+*oBUAKfih&sHk3mR|vHXsV* zRx!b~({P6(QCo$<85p+SV!7f4&=_(@39m?3Z_tSSx_HqLDWWkty&LWekCI_t&kgYa zfncgHd&%$xAF)rj(fe!s7z_sM#_ksx8L#}mFwBB}RM$T;$AR*Gg2=y#;%d=PVE2=K z?0H+VjqRL!8w63&n{c-q#VrP(NNHp3FU;WZ^P(}##S98#P`$x3htK@vMu2YB;n{or1~48s>vcdT2f@8mZBaY(1F#2mHj!qW}N^ literal 206098 zcmeFa2|Sfs7dZSJV<8f0kRd9S%=4^-B$0VYI?g#bIUF3vl&R58gHkF9iBiU73ZW>9 zQ094PFprs!9N&JLwu+29?eo44C?o2m*O<`01gfe2--zhhLyY^1itfv z*u9b$8pIL~ge1)}?lrztbkG5C2U4Nl7aA9aAQlQ2HX3>mVqq9!rKBKqAPl$A0-c3e zR;^mKdKJs+)hz7HOw8=-*;rWE)^FtCSkJ++k)4GS7H`zV@2`k8%*<<8*Q{k_UCYVJ z%E}2}SUIUx*#8WIg%=R}Do7Q&M~e`EXxI_7?1+Unz$DYcD})-Lk{&?`fM7V#LI@f< zdIrW-tC^UA0{mErfM{r!3fDpiIs`2Z9SuDr!zy}O7Aa83PD{6Sodo^fBS;Pb*He-V zoY!K~_6crVZ}`$yO4{x82MxyUp+=)!V<>_Q*ZrqwcI>}yjMglQb(h^xZSoQGHT`UH z4e`^sme7s3a~WZ;YClgr$b4P*Wzy8nPhI1LDR^o4VN`0HZI|2WPz|H?D1!9Zb_SRL#vL+6 z)qsfD4QNdhS$E9GWfJ}~1w!9RSZIb=Xen*7Lu$}?k@`BX%{NXyAvk1_@G+BCzm>Fv zma5C7bw8{_O_Ibs^!TRw5$jf|yz(1BDGk_LR@v!oG7b8OX)rllbc@8osd!P?L*-a} zZ`18c!U(}I*e$ADs^l=p#?*+&_>RZA% zU(FcLjpl+Xr}S$i%Yw?`+x;m?7$3ck;F0(%WqT$!-gwScd~xChw{KzjT=F4uSgnue zgQ-?eLortA=QndwS(I=SgOBS?NAyZNhE2AynzRl-8hnStX`zk;T|Dn3U89|#HE`KE zXT;y6NjA?j)}|-n^2sKpqgKjA`vy!(UtS7Xdn06R{DAe`@Lcs|%KWrnZ|t$+p#|u@ z`GK+32gaEEGdl`K^2IDiIy~D$GY3^W=IPJ=RHsk)f39PL4ImjYs@v<##8b%Vb4WhP zmc0P&MGC&!My0P-M|9h>x5}ZR#DInOY3X=w_*5heK4^il=AMyf{N>2KftDBi-8QowX3UnS`fL@2pi>i%o_Aqk7 zXI{bH*rmRz16ESP9eFHH^`zGU-WkzP-}MJ_8ne*voU3Fy7{Xxer<{r$XY zK|}+I{AdB{3P}320M%YODUss&cxaD|sk4ihh!VmPlNXfNci*hItZx@ptf8!Y<=Tm24Q%(v5SoZn8xEN8xXW|;&DJaL zl}~ThpDYOWl&RoXco261@!|qs{;;J@CYCcpGu1Obs z9+29#EmTMM$?DVD@f`X16aLk{Xv_Op%BoAT=ZpGO54q9yOx;KPV9-WnBbouZtwZoKj*h6-E zR$ZjgNMBRt7T@Lsj}{e`ce#0K<%QZ)cdhzUNPadSEWTAlSH`H_Xy`oUS=c;e7Crf( zX-9e;gY}hycUqlCnV$?xo3^+~kV`SR*D0w4iEvUT5NGlGr@8gKygRJ9P{!RzdsIz? z=8mEs)AFD`S3ff;LOx6INJ1hHnX%exx})YO!N=M93SR$J-|eR4{xH*#?8d<}XQon* zR26p29eh4*TgWZY;_BM?&SoxzbPSb1 zg%9Jeg>Rp5aNXJxQ6Mk)cIaEj=SPp5Y#wsc$>+qhXjjvbQFr7{M$QJ$u^mbMP+!ryI%!ZBG5M*V4-n@b;6e{Ens zPl_rDj>Nym&sYuwJSOFFw>|YRfU!kVz=%rg92^mZcu z#MNF>-gSQ^NxwrL##6GT3a>xQq;lkz$|gr_?&uNi==E8E5_}4=^Xtq2Z8n-slL4~j zZMwL)K`8e{4;N&EVO@Y23nxrJb@Z#2nCYj_cGSE2+3~OmRFquoO)H2EHy~WbdpV{Y zPj_tUt1BD~=kmX~0F4HWhWT6>Nt~k19@^Mdt&W<`tQ2%0sord!cDC!tP)(gN5u42n z^?cT%YhDthduDp=JozHGO+3kJ&qP1jzLxA?IDib)#WeVn0^!FtOm=*Mo%x`|@fIGmxi+SXP^|sK%`>toE zxDR1HK8T(Ymk$k>|4s3$fGTpkq=VxsVsfjJsSa-TS$1pvLs7dLnKSVtN^`Yl`v#tD zs+JIqk4M?NPc>Nc>{oc0RF=*Iyq2FiU)v~n8(Mr)(rLX zWI}1e9n02RCQlPRn=X|X1jQHD7!SUABa#u3w4>~t>AH`erZ1`v-WnKrn5q?hxbWS? zi3Lb2{H0Gy)`;=TlH#g#-XPVo@}0TCgS7tdf*s#EiayQH9FEEM@lc4a3V69&Kh$%B z%&VDA3y^8DSkhs^$3xKS;umz&f`jIF_GFAb`D$Sqx&SRyg8_V}xtaMWudXN870UqE z7EihP0G%|=?S2<(s(Q^E>)MD4jSZ65pKh_rZy(rwlf*EEJLxzH@Hz#YnHP>uoVb|c zNO0}2R{i*Cea<`C-1APY18O>`-E)UKs--?mfXOM&;#d(zu%T zt^7jl`pCr0>Qa+_lLhERV(jee@S!Uo$W`Bhk2MYLIx-xu7j4!$eO^v@qg*p{dCb!V zNWZ3u9Gd56=3_d~_THwSw5v)zw$CqbquCI5t5VM8fXvL?w)SbT773n5k=J1O^b6X_ zeA48%Lu%U|O&i7NYbBI^MucfKn>R>&WEe-O91QBSwu!4)fMT)(W=!nK&eyTY`5GGs zMI1X++bT0>T22HhmU~EKwl6>*7NE0at;tIsScxy<5fe6VzpaaHJ6!C8a5+BbFOl_T z>tz>$`kve>zqxZZheOiAL^^3h(sf*bm_u0teU0_wJMf)xSI75-9#*S2dNYbAefwZ| zW=j`8_Ejk3ys@zSCIxc^-h8e66l@A zpGoU~U?K4byet_xasIc>$p#{IZ~FuVdD?XEKW)9lb^FbI72UBddp#fAY%Up)Xv^%d z>tACZ>p5eiD>!i?RljdjzP@YBx!dn*1ZCe_gy`y5rweW!v+@XtCHr*5n4H+im!aBZ zGgxIa)J(ELcMR-5Ij@%PAJLm=jUp3piBr=*$}Wk7;25}lp1Rv z=N(q~df4(xGTEnPbX+xsf=!W9_61zmf*<_c2SLm(Zs74$pXlyFbO%ZJ;TSR?xEtBx z@i+>Ej*TA{KX*Nn-Bdiw;u@CDG*MW<+AMQw~) z!N3iVw==@H6J7LePoO{{3uFMfL3r@b4&sN5APnRV5g`{C6~yGS3=L8gH2ZMw&WjYW z*b=cgcdRo7B`9A_f!5Ho(4|U42}VI90xsVW0hUOdc4NJKk+S(j5tNKq1n=-hi;A6>uB|26vcj=n%$^ispCC8Uvrm zubMSN;i&lrZYXsN7y~7&MY-W!%p5SFbAg6po$Z&z#|jr10-E;W-QDp{IK1fWlH>Cn>oHATc{TP=W(j;PU|iv6vGR zerGvHl9F!;u1hd7lpI*|7jq~$XFwW)N7lkG18px{{L7++|D}u~3m7%Ud{I)chwXbx zoRUINxQ@Z;>T9?M%eM))N*CgDsxuj3)ffDxloqreWoR;GwjvuOR(9eTVif zj|(aphRbSCVU&S@!=f+*Q`{kV;39r#hn`Xfve$thBPF4;e??o2_HKBh%MV!$csHy) zcG)It8o}80DR}_L8YIyjuZ?lWxFOv!XpjLJd%G-6DNIygI0r7#akA$JxBt`p(-Pfq zKTJ#%;s2PUN3j2K@?t>Z+>Md;KR~gfFgTns#>-uYaKKpa(2{vvwUqq>@M;IVo6lYx z)_zG+Y*gkCEM)`E8POO!B#~mSS9@UG+MFa)&bHn_lp;~w~_f@miMxvPVGUuYxnI{)jH{?{x0uUGnCuk^oO>3_Y_ z|9YkW^-BNimHyW&{jXR0U$6APUg`hiUTMm++*a^Z20>=v2mCX@=?m?J(7;t|3!EYR zz;WvU;_l!_pu}7N1m}N&;D;n3aqyFZQBt1^sooeyXccGz{9owW+U?-(?xH9r=1dSp z!sn_)QFtdYFQki@gs8X}q^#!Ufh_%2$;#}iR-y&U>vXq zeB3ZbK8KA_K8`2_w1Ao_zp|I2my?SV#vRG;<%Dx4D0-;~P>?HvIGimez)t~jcT^Fe zoP6Rp*E8VPz`J4i6-32FP~zg^{BjDSXrwe!4kLrH+rtlzbV-UyNQg;Fihv_sGUDJ= z7yn`q01djK?Gz37Yb`bhQYr$Ae0h3$ih4?k;@#}UBoq`B#Ka|G8~{Ru;O*>=^b&C< z2rgmRk0GGkuoPD{Ka3H$VTtZ40)SWw7iiQnRu|ydpx{KK#4tFF6UNz{03;>>Ge+gu z$z>V%m!e$(sHgYGpiWN9XbJ9ynD4y(5-|aGOo$m`2za6!3Io6emzpBDYkB~7{;0!$ ztgPt)_*$-x#o>qqci>#YyZsu;3IK}sEVT=C!Pvn5*L9XO&0WmJeg&8Nu}*&sPbCz9 zVVnu@&cZU41a~dxKgFaLeg$uUMD0Ir@H=%bE41r}9smV^Z>eQAT$WBK{u=Xg@uINM zC`CKGn-daHk99)YW5iClVC-QTx+v;_F@px%0$^(ZqapwwZxj)i5s{QIUOL_=EDp{& ziZ24=(O5h0KLx7ch5;CV2`Z&1A-@tU1r&I_z{bSVnAQ2N5d4y|qB1MbU{XlY(^J%O zCb%P=Q5c>5DguBLQ7jg%C@Y7Om6bwDiJ+vAXc1{il&y$@t(>HYtbznm9D|gQm6MlP zs=ptPBEq8!u1_@~XgmtwSQeh5t*xvqS_UmAB7?$6ilAks?L-vBZP5U^thB7Ogp{3( zozxOGs;?A@TZxrO1n6Y;ONz@YNJ=Y6?bF)7PeMXLLVB;fjDq-nDG3>Ic@6m`1cn%A zG#1{<(!e1J1Qh|wHkcylcwiL2OGDAf9q9(P-N0^{9To?AjIy)xBgmpWjfl4YL8O!> z!MIU8fFzh=30PYkMjv=8DVu640$NBM0rLwGV>hHT!3F7taYlKo2pGE&zXKgM07?aD z2C@YND+1!sL=*;$WvT@>0Ok{iwRJc#|o(a zwB4#K2Ag6W7SyA}s3^o%AO?B`G;Ke|4eNnHYq{Z__$g*m5&H|>ld_eOLW={9L!;$n zM5N^;&>~1VSve6D%2q)EBPnAii;@*stPeD8S@&SwMFU0oEA^?~`oE=ZR0cpV{s(On z1Ja@U2HAU2szw;s@A9SCdqpG))>jn)0uo3EW(`aNzZQ*zthf{!3?Fd`Nm~(V zd9OlIXUjLu!7>a$Snjo>? zTPKi)Co?^y7d#CafpauqID_?@q9>SjLh#ciOyJHMGUZB z7#xA3*o#Qvr5s_=OzcD2uQV87F$h*w&hGFL85IFdFTA9P5ylC-509hR!lhy@q!Shg zCQJapFA3A|y#NL3!BzNwUk6@is|e`3V4V5k=u#~(gy9vbkvlj|0j4&viwGzA!Q+&; zgpz>~Ke$n%KQ>Gw5;T^ zASeTuI&$EF3lG$#F}0X9z<~z^Sap`h*~*oMs)jexdBr+yalxhs20D1e{<**cZ06%V zf6m{B1HOh8*}ttCe*1|)iroKSB2ePH z>Hi}VHB`j;^;7^bzXgcts!*SGwH1FQK5&K&OcN^*Q@i9R(f_fSf2#iv2wH=}fu4$j z6ktP27VsfWeYE^182O3dWsOmw7*{ayEcWpdCsN=s4-QNDi#j7MCki}eAgr7~eo`6@ z6?rMROB4Vr5;Fg2m&i*}30RWNG9wbeJ4gw?V?<(crSLmOmPgPZYnF^W-~(1H`G1rP z8F^80De%k$))Rkf0LzI2Gp?Wjp1UdFe`Nq`sK`kJ9%Q8f6S7i(i$w>@KcQA~(xQ?Q z@^UguBW}4fWTi!=Wuzn|!PG+`cuA|kydtx*lNK3~7Z(K{6)7-N{c&%=V;rcJJj}{J z+8_UUd`N(vg2R&fEOYXsw*9ppk(2>!{PSE$eqR*-s8x$REOpag;Nf@EtTxr#vpBs` z3RldXU<#yUQ5-VMo*6i@0*I;*D<)WNH>5XVQBNq@)ESlHBBHSI*Jf1eY_K#JfR0@n zV*i(C*A*%SEA`(Oeh0)unBbfA9ZF;9m*+D}jF{@UH~^ zmB9axB=G&r2G~AQ5%2^jG8R6vn1ii48{lTp(mA9_Ibgza;1JdsPXk_zaRP72!FzY+ z7MA>smEgsg)!?li@CpnPMR3tG(t=+rguuQYKfJ*Q#}@-Qq%u`ceM6^4RF-<2>D6;C%Rf22NW?I8Fn$2n6wsaNM36-$Q|+ z#?|3C+Q}Kb`a%QK?1FYe!|_)j{+kC8ekF$X5Qv}mz+yZ>ycWc_;fPLH5QpE_;&j3w z!D~8n@VgS*+%YHz5Eln=7B^!<@V*_LGWf_5i~WkY?TWZNcsB*WLK=7%;8_K4Yw+(x z?cxU~PUL~_)DwqscNZ}L4jH5y8XR+RazQ$KLlBi`xE8eTr_EDJHuds{g)89xC<^y2 zwS4>trOyyt@4n}KulYR>e;>T}1>TLLz4|@RHXMSo&q2_}=I?pi?t!yPXCWx7Y6U-B zu)eG~frG`ML}7t0hyMz&4EdiUE7}u<+gsjv=3nyX!CH+1FY6H9_`#+)3d1k*dnNwc z6<3h8f{s1lybT84e8=#c0xbhO0HC`UH*xu~&OdCAufXs(tF6EQN2z)ZA{=pKh|@+C zT3f{l(GIdPzv-E%7&gpFQF2s9C{DcK`l@x^aUD#MxiNa4t!~f z5wQlrf!KuLLu^L~BP0>>2vvk8;t;|RVSzY~utzu}hzMWA8N@|IDB=zx2Jsk?hR8v@ zM7%|OKr|t`5PgWR2oen~4GRq?4G+x@8ZjDqn!Pl-G{!X6H1;&EG(I$EX|B-Rrnyg( zM3Y7HlBS%dj;51lfMybW2#lGQi*_rm2(3J=2CY7=C9NH;8?8TWFzrp+`?M*vxwNIU zHME_ygS4}B40IfH{B$C8N^}S4Oz2Q_u5|u%m+0=$J)+B^dqr16*F`rFemf&`&WiFmN#lF~~6-V6b2~!Eln{5zPtDIMzUUhR- z!m5H*)vJ0|&8=R$di!d{)%vT^tG!o;td3iqySi%im(_Dj>`X#TDon;qCzwt#-DXN* zdd<|zG|tS-EWoV5e1sXx9LRi|`3dtI<}T)0mUS$xU}<6*U$bV- zjy39QEZ4ZNxw7W*n&LI>Yi3wESVdWNSkbHjtP!kPtktZ;Y^&L}v+ZR&#^%FzlP#UC zifwT1>a{!8YOF=B^~r>$n& zir0POpyd$c(BMFEoZ*P$c+K&JlYvu+Q-|{e=S9wB&PvYF_3PG4uQy%qwLW5f{`yWX zTCN>jI$Sue5U!_O^<1P4JRA0Iz-+j%A!S4LhUtym8`U>rHU@7@-B`Dgw26O{<|fBY z*EVHuYTHb=dH3ccn>{y2ZGOFZkeicRi5tZo%>9(Rc?->!-CGQ|oZJ$-<=vJ^9$p@8 z9v7Z)o?@OsUM^mB-V?kxcnf&@_&E4f`LKL9_zL;@`PcLB<;U^g=6}UMx^>Ie16zq( zW42aqof8leFcCN{kSfq7$RsE)XeW3>ut;!p8{am)ZNA$Qw>58HyVd!gEYQ$k=W0YVtWxUTg#JI&|rwP%d$dt|WsOe+VX|w%i zp=O=tV&;D4$k@?AHyBXw_&qE z+N2++KW=tB;rKVCKJq?t(pJazuI(sF19b~EfL2G}K=)x(Frk8j`&<~r)8>-La-PynB4K zd>)*nJBd13jwXg;2RTRHeqjXO5cpW#dM2! ztMm4u+t0$cg6-pRbntdOb5tdy^Sx#{4UO*4;Xt(5bF5Lz9q1;A%B*V59^QjeL#ivtiSpR=TX zEb`Y^8frGp4}tc_uvim-f;pUq=EuT6lUgVOpR=T~XHcg_fbTz2KWF(0VGdS2FgnEd zh0kgx##Ic=;MgJ4#+7r^Fv>YC)fP^s(Nq%$|B8M>Xm zkwFZrle;DV+q2^_w)d-1gfq3_e}p?={K0Y2;r z_=Ms&qzZS!n(%Mhr-2J$lchMK>_Mv+SIWA2F>R$jdWV?vfZVE)+=9*j5A`yhM;n zJAd&8#@G2cD|gH6e&H)h$1F;c{wLD+$S^j0_{|PmRig&XS>;DM8SxFbcTBpc$#7oH z`t+&1JmnSd>&z$p{_x}^Y<|o)P2It0Gvv{`4 ztEt`UT$jyV5m%9UFYa76-#Q-DjNO5xLfQr+@cA+idsEU($C(K;x`VZRdRnvQyyAD} zia-0AJ&&H}8&*v%%uy;EPkCf>pcoly76?7VSU=CbtY6r7fAY{;L~~G&!r2oeV~VpW zjeuYV^OpMX{=K?^f1UOdbaJ&eQi-;dnh4}0)tdB?D!2B`xnberIj>WVTXo)^kh1H; zZgnc1wL_nH>mNSDvC7@1p<;iKPk_wGZe0$NMwcexdCPPF8+iTX>l0<9%yiHjQmVHa zNv+k-Ccw=ztjF4*;M)1(ZK)8#r7X{zxH&qYMzjAmKW!#kC1F)v$4BD0b=p1P5P95| zG8aj5`RiP;3I**LBg3DI0pUq4AGT?_CkL++l6+kLQ(+)a;j!Zpb=9wG4fnN#yi=+8 zS}KFzZO%GA(j2(Mw0~CjdREvVtAU!mT-l4UvkmQhg8liQ7ogq+$aT_h{%yNiq#?Qf zeXr@jmR0^Yj=#fMDb8@V249?}^FrWBg`PIM3bF#_j92<;sC4|5>-K#L5w6YI-VJ_U zTJq8OwPN0f4|Lr(Ic0N^K~Owfu3Wx4Nj0!DlD>>lrE%YvlFr!+9CduvgJBt|pA}0+ z--#HiS&VmYaQcSz=7&skPbOT*5nbQ3+nRi&v>W9;}X~s zoqChh_f(2k%#O2c;C)5C8gCEm*8L{}joU|V?u&JKZ^zCJKHZ?rFY!>DX#uJrW^`su zxIEs|5cp3t0x?*Y3=V3>qU&8#rzXjF22M;P`vnyB&Yd1t%%Ww^vay}b3Qd{j$XnA` z=U-AY+21yH7vJ&XA8Fc-G_Y1`HqQgp?*8vAD~Uhagf7-h&y7_PTvMH#B@b51fqjxEk1BUap4s&I;A#J; zua6+Myzuze2=216GY99nUTwa(@o)2sn4kYPJ?D~RS0q!B#lB~6Oom!vbYOtZ84_P~ z&E)JH34D4tKmlCT0t)Zu2UHS1t!kYLob5){iS@0*iI$CmKS99S%Bxm)7mAv(E6)h2 ztR^Q_9W-0(Db@fn3YLG+3A^wyNi#lDJAz zUiin#&vTy_-c^o{^fzM@>tU1QEfh|zm~9xI`0U4fsVY7sjpWtB=NT)-`RQ#Q@AC?8 z$3hRhMaVsYBHSc}GmEy*T-4OZZn{alB`K@~& z)gcC>7BlYk+wQwRWgoCHJFmVhZCVNEg#3G9-mNDxbgMT;9I-=W8}W&MzI>PQ!$A25 zuMKJQs+(YFMAkT)#1KnjW!eLCKbnuXLqVp~^Wt|WHFl3DIo2PW@0u~Z%JBNF!=;hJ zhk>*qpPVN=Ru8USZs+HST47-XhuHm@cHTLblA(Z(5k9pMHnR|T1|pNm{;m2Q1Jipx zjt}4Y>N}S-MGiLG;hDfilc6kfd_ccQT5eps1&0sXnyJ??E*AN@Pk6)O`7QZ!aRqx! zzs8Z+=)>m_k zD(yixy)3F@;5N}_T0|d zkewd!mATd47fH0Q6)$cB+c|A6zNI&mjC!n3_?Ax3t-}s>(&eyc@kh}0 z*WcN^|08_|?BlkignW9IH>kvu@dC6&+dno5Jk*o3aHKXp2 zPq@iU=#EybI^UJNJ#}~rIefIC%p`7n!vte6k?Sz7z1FSV*|)OFaI?+$Y&k+Zj2EK-qcxjvbC{=^Y6-bd7pU8+D)8Oh1)=R%3QtTg7Q)jodlX zqaXN(6SN(JMctdy0#LWKqi^Y_RmC>;)bzmrLk7ot=fW-h*;8)YTCcwi!}94C+T?S2 zUYTSnm}I1>tAIEp)>jSrjpNE6GS^+zvu|{7^r;&)9=C`Rd^|5WK@#nXx*nM){F);X zNsaM>tBxUDL0MDEwDE`oiLrTs$R9T!%tSA5NRcqHESg6aI4r$P_NtB&WdDZN)WohfwU+e`xkr(9z` z&*_s4n3U8Tp>h~6B`tpc*Tr|NjJ$gMRo`*nd79_%GM9J?HLVgpukmemp7-6Vp%j(6 zsgs#jU}#*iYBi;H)x}I%kknM(E_@#PvyFgw zZQ66@C&XU0-M-=dHIElRmwHB8ZtV?bqaOEXt$Y$P4BY)9_{%mY&q;o*EYWfJP4FJM z9Ba&1Un4^Fjk}g7{IXesc1mdDmnE_k0i(w|0DK)j zeqj1Ha-mxM^z?<`5(bW`)?UQ8N^LzRtw`PIBfqKE-LT!J=_ zQ_?5rW--i!p1X~WQA+oP?i4gyJ{_11yCA+@#_j?6X7|t~*>UoXNX!H4-Q*{yP=~H8(LuQ*j*Dsd9;o0hyYsZbntXPTQ6JmcZ$b>#%$yS#s*aw18Py>S?8|mp+ORRD#^5dB-sq~K{`^fuqKFp4Cm}`}NXwf}D=Og%Wo3U}B zj_u6{q{BamWMzW3ce-=FNsq85N&3B#kdGhD{At&4f_$&{I@m*`n~7w zdkWd}ja)8pGU?x`{RLUOuIj_*oO5yyj{JG(;x!6mbJ!PgWp_ink1%3N^VB^1*gF2lRgcncnfWn(pA@mmujdh8 zc#?_7Q}_GsE!mvN#ywzPpx>^rZR&8%$lX;iUC5GNn zP8FFcAGo2|^@b)0*f z1wJ${soxXhFNE~lo;ImOth+zXHo^R)T2b<~|Fs!CcH`-q-r_{nAWQAK=t>Xzkhb-` zn#Q5x@nbN$V!Z-SKSAtF42Yti&9lw+Q_@TzI3-en?2Mrqn-d zM(ka>ZCL2nk%=7&sz-f39D7VAN`qzJM)8<}*Uf)Gq$}Iu`Mp0#VmZNj94+)axPwdv{?W4;^Scc7%FHuYPn`b5ljl_=&h z3GC{4q+pM?IH;059vj(H?5NW%*2~5>@UY%>l5f0b)OFgRMc2=)^BITe-fMBQDpo4y z7f6a`LsfS|AK-l3gZrcV7%r&$g~7AKx(B##?O}8H?uS=j`kj2}l4^z+7#n%dG~QyQ z?eQ)A>HuG{_}HO&z51QmG1)}{pA@7sRDJi0C`__S?jDJe8t>H)<~G?gj@x-;0Xpi^ zeX};ST&aglGUpXLP2wK33)!m@_=9wo6MTj3YK8554Ly^y!2W>gB)}%h-`YY-R_H=z zdNf-PKagblX7X@-wEQ98jw_kWUxhvTuLoRqlPA7xA3Lrj`DnGB_?xaH!b&<#MtjRf z$!0bt8)Ca=Hc0($nUQ|$v0BvKt@pDNXL~}jDNCB%bhh{fDEhaOME`TIRFozewK_f*TyZWbFD_Ptu2#W11v>ab1t#t^1Yt)qE5ei$0Z zRK9Cp8Id!WAXT%&Wl8==$XA~3C}UQTD=dw?A2wSv@om&~xcatQau?Sn()gHtV-#lT$NP6r+1j#nNVqlL~n+Bc`< z4wQ*9Vq{heM?W(ym$D@U>ZIf)SI!0xknsk;z`<%C!8w<+*?ILT$ysY9e#YJ9m*)?+ zc_JDr)FJRh?DcugCBfz|my5(P3y@H}9b@fq17S~NJk+4~e9Or|fROeErquiJQNp>- za#b$X&yYeC&S>~E_Sp`)@-Oa#FvK4O+Qx75>6!sqh4d1FfvdB!Q*ysvJk_MP?_js8 zaO7U@hjLaq-HG4FA6n!FpNP}I9%jB=`C-|KKS2Cni&%kcT1}T&s+PFiW~pVxH0N@~ zO+iG{p2zee@#2NX-zt-Xdd9CVm;H?hPXyWj=D?KzGi&0P4YW`1oDI8@88N(b2@@hZ z&Nxb|X)nBp6Jz6jzm)&C?t>lU6WK=zq}+kjozHk~)NV^6er#vITv6xQ%P_!wdKx_D zQbIsLfUKKQPb&VVY|?8L?%8jj|5kHLm1xrgs;b*utNI60p5O2EFfs_uc^fn+mBME) z(f{QX!G|=2-}%}xK9?#-#FzPj7K@;8d1ua4>rbBdmazQ2yOP|ud69-K7>B&jS z>=|2@RQ`*)>B&PR)y=945UX`1*(lO}k80bhHq`$8(G1>Q5h7b#K22(qw783vyaK@v zLSkD=n?iQzhp1Ld?^%_f}K<)^S(4Fw!4inX1-f zlV5XvB5mA1OSGyVHR!j+Fa9{=oq76)IaZ>%fm<{X`kZrQ9!Ol*psPKm%S-YF zklo)44BBn#S-dFC0b|+^&z06`3{W;me%8i9?dkF4C{pmLmi%soOt*NST;Zb<@0AWI z4~R#ZJD21e`JrXXo;Yo_27jc7FC`X#sOVV$o*lU4dK5s zCiY&m+V`SfmNdT|g%FNqErgurq*KPaLVCN>-o1HowX2V-V+*rc1NUvN@B#N8EIg{| z+Y-PTC?^q~R7bxGg=7)%7cT`!dy<*jP~)o6EkZA{GOf}wIsqU;$aS#%h?!EbQ}Z1W zttN`Gqc1tpHe`@=Syr%YfW`+Iu}3qkXv-J*MS51=5Lc(yTA#UXxcQtlDX4zn4P5pM zlQ+-TQJHnJe>bD^^zH7t(@!!vqp~s}50BZeR@pw46}G;7r*=4)uKM1$a|0$YW#m zI{)Vb1rlKrIURQ4p{7&b3Bj9oa29GwJyaBlT=lhewRp%EyQE{4It_FBO4>Tf;qwA7 zT_W$6YNp5VTAkvK7}Uj><|QGo7e!^AzcQ228hqvEg#Tw5yQz2&)0;J!m7nOliM7rZ z)lKyUEeFU!qR$DJtq1y=I9cCi){bSk6Z)8&_A5s-G?}CfY($U7C&$F5)6qI*kt#>t z9=d3y+2>DoBd3@#BuHRW2Z-8<#hu&LtuGqTOmw!RH_9HM-aN~MtNtWz8J z@V*niRV-sm2(@NC%6^FWfnDh3qR3X<-f(+qoxEw78*W2#Z0Kr-Oa7Z4A5Rj|6iQBp zcp?jYZw}ZMC#KvSzj&(k(-)mKk(B;~_^DU4({sf{dtTdyopKK|rF8J8Ch}ZVqRczl zHuTt6^f$t;xxX z*3c}Q*J=>lb9vZ5QQ(b%NSw~w(M)dp8cZ)k{mra`AlZ)|)&jb2E|dCsU(LJ?4mp|}&hmpruhY|_w3&Hc%I5oRK-;}GaEh1i z`?6;&sd??uv4RrP4kN9Tr&R(HOmz(B#Ppn+tB5Ade0t~HW-N=IwVLdBQ?s8gnwf8U z6c*SIfl;NoSSs#*M4f|JoQgkBCtS#A2bQ4^az91aHCTIVz8HKMv)w;rq{m8Say@PK z$#;YkFU>83^{p%+B_-ckk$RO7>gpgTwJbe%Z_72(Zx+q!&?Yk{E8LAcXSx~FCT~^L8|Cn$q|-pYl=Dq63d+5+4^_`@~~?7NSj~xpKXJT~Tcl6Jtia-hLOahL&_p$Z(h-lZHkaf9SVEaj-eU8-FC4v?~w4VBGw0_frAYvFFRKC zUwq8PQ<_lm+3t;m?G^2FTyDw|0S7I0)bk$7E#2kQ)?7L9sz>#Ge%QLZL_tI()W$zrJqgvBY`*QQ%?*j^@ufxh#cS6El6-d!ucFyeN=AuP!D60u-A@ z9zEj8Yez+xG8NSt`ZT{sG_~}+bxs@fp)R~C{7oXOSzAMD_f=Ly45lshO+5j+eHmhP zQM@Yms1&f|JWG<`jj96k!uhS-2gfTWeOnT5^cQC-&PCL1nQOKFT#Ihp&G9tl+b4_A zv{`M{=?$km$HF(J`10sZ7MY;JvzX2fCKk%qV;MSl>n96(tO>aZbI5+7f=y{>!u<=- zwmD)M4>U(&4SG3gFAyToqua%Qi?Tb*zm!QuD34a$3~`ni@Kmll zkacl)v4P)RZrw@JbYxvo-m`amtd#tun`BNs$=s}&T}qYE^0i|?>~;9{aXns_1?b4^ z?1cC1q<78)`RJ2x{?(H+o`DoV5Bx02Dn()5n%HpTC~Mh?b!NR}GBLg5h=G#l^HSNQ z_2(YNO=jMsP0y|hZ9_Fq5*v;kaZ$SSEm0|sCx`FRxyU#OZt(MES_!lr~2MR9k zUVu0=ZXOZL@lnYxO{@!gl<*)V9;gfS!*c`->{rQGkUG%)^&9O<6SX%iYcj?As*$22 z)1=Ks%Jv)#{1*dlG%m)5JJzWrO|BDEMO5Z`dqUo)@efZP3x00yb75}?rukrQD~e2( z27B)&tI{WKj zftxVj&5=GhKHi%BPz-ELcvSXp8eq=qm^?rHG0b_q_1k>>`uO{Icb+fapDjNR>mh|K z$Wvz4)boI&htd&lrTzZoErrq1_oyXc=Dj7nw}cE0Jl zC`qwI+waRntowSDAJj;_5#sRl{#W>{3!;#8G!k3YU)1Dxe6iZf8$}s&Wm_FN{NZda zr_`yoOVY;i4;brR_@c{45n*FFE%j+_tz`)vxPrp0-x{`f-0-Ljq~f5`t>mV5gjwkT zH4DCmIofx3+`46KQx~wmKuD_Zj(8x%Y*{~d=Yhp$EYAl_e@baA zgB=mn`YHUbTb$ct1)l|#r(Y@H&XO_YmO22@Zu>MIcDoy`SmxrdCia4Y0MXj;#iEUW z=*|58JwyE8apE|Guflt>NA+u?bzSO1>!CG|8ayrQvs;I>vU5*mA9A)6$f*|MoYG*J*!0kB;g-_-Y zl(oSOq5s5l5?GLdpiT?})?$ zMTJ}YPTo-vN*jEuedS9z3tldj)PI>du@>wx8{O*OP}WsOT7W!1>0WObBbFp*TMWk( z8Jz)Z4JE~f7C)|5l*Zk7#wQG6p)Ya8)?hArv8TqWk@298;~C|;?xe%S_mL+?`gC=6 zr=qZ5nx!I*T7+~PRdjT6XPg^>BWag~2)D@0Bv{f82>{KS@563iFS>DR0qT>qD-w#9 z9#bZ9w0p-1#sJIn?+Z z-DTkJ#lGJjtsYah4|b=GS2p#>^x-dV(=LH8RQ$hy2lt*~_B77T4Q|P_z4&Cszp#AO zx~kIim2!aJ6^hHNx??>2?V8GPX zj7|?`oE%SFy9BavS>ohyMB3-!i8wB#FPDoMxJ^YZ zQ&jB_#@*pdZtpx=%rDpSj(7|8zLo@OL~*Coj8d94ti;T9jY_Ue9o>Sa)e$XI|4#z_ zdn8!nOpj0_d~T(u$>k@9`N+gSy?tr^P00IuE{P;VUX6vjSg#kjq3=(6v-1PGh??TA zJa^4s)hjy~y!dePA;3JCFZ0WL%d4~X$w`gt?~u#$UWsnAin>hh!-oN5DN*ldYvDqY zKQbMt-%L$%ZhxPz`B}KsYBF&zm%h;76)fK7Krr_5#{T~4xD8@?ya&5^;D(xsQX!`L z32jRKqIA0sU!9=!m32)4byM{zh8H0jOd+q=1%Y#jy=}>4^jTNU?|J?565DqJM%g4HX2(58D*&# zO#Xy^^l;!682PqK{Ltf~Opw-&oc_PX@*gIkx7^$H9O~^u7nzK>=HO09ry(uCaciz00~k*to|0DdE@= z0I7NT^u%@Y@aEM!#Ic%JrHndEOTN2#F2519ja$8~6e%oS>2?Szd49|D)yVCTz~SHz z+_+uy{)eoSMStA1D0z3(3g79)3yLE_&N?2nW!i1{es_z1gZcyXpBL$cJ?j&`T!+s0 z4)f_j16+lYb0NPIQBJ-+oNRrkUZ#eMOM6~78~rl}KonU)O|1orSQWFT&HC*+5F|5U ziL_{=s&@Mw{$ud?(2Lg!?C#j*p;D`2b}D@5G-c{^T8R*cyGzzzWcD79nS)K~w8TAe zBhy@O1=sl`UdP+d2L;1EbeIR4*PbrM)2|Mpd|KNENZu~x--&IV)_h93j-g|MvYlr4w(aH8 z#)JBJCWqhEdZO+j`!6#88AIB8*rX3M2{iU_$c!s}z7lBgq^FJR3zu=n&$2%tI=i*r zy`7U|IOM*VDv}-h4@kXKIL5)dljoa{x^g@vDM#(TOmh770?K@aH#e zI-CnSEkYVZlQB2EbVSGZQ{#goFs_7gv07=;vx9S5UvzWuC@dNSvFRo64*p;foKdG`)M3a@1oiRzfW(Pno+gmCNj zmyqk;h!q8ydbE?c6pGXurWVx0QI@6WNwBD<9L3w%`l!Oucdd1;=2{=YDE%to{sJmY z@4GHl=!<-3s1Uk#_4Ld%)$dG%g zW^H87e^@(>6JdSRUv9oIsc1ZSi>pLm>vN|k+#vPC7qV&8irrLXK0|e^Rcc547f5eF zx9q=&{$~t5_c}c9t0T?x-onEIw1`_iUu{)*jJ|v4w>t819q#-;*0PK(5Y7+fMZ5aW z?M$Meqx=$+gek(0fN81_u2_%d_n-nDy2~TKCa$+O*T8(u@bQt%aICZG3(X) zXAf}_w6s@FjXvI<2~M|ro;qU+Y8}f3H)e_Ua~6X$etbZ$*tW%-ICF%iU}e%tfp|V@ z>&^SSRS=Ds+Ce=I>rN;qgICy^1vhZON2Zn zIVG5bkB|wE^#mn55fx$U^=pZ$Z=oUxTOlBP#g&g}b?uDLYGuK^6`}?i&r3`(L*n-A z%-np3^ACHH=oVwrHqDhQHhg~p)_}GBbJyb#4J&?vSBI!;0!9C#IRNR{dw+Zy7Pqz+ zb$h?PyAOj&p`Hl)^{gzf? zVe7W-RrjW71Yh6Syz`CC4SoTOl+(Lo(^`moq5O*OUC6&)CMu|*a8%}Y{WB;i1VCB< zZzHj4@VR+M)c9&MkX<+9vni-)&7x_w8z_8eVcY(y4o(CD3XN83obPHeij=m>-{hoAi^JRN! z@95-8+~3p8{3mxm1OR9nf8BO0Q(AWG(lS)`x~-^o_B*(*t33o=>|obUm302tm-iRo zwQz^9ZhL9-rxu?evCn^h<^O5?>Q0~yZ|J73uPR1ax-?RqQ^2J6jIk|8ghw`iV_6jeZegFbbjBQVzLXQfMfTJQ;caeVfeh%OR91;DqFta zEThx=tBgfjozg%_a9upt!6s7`&Wu?OeqJ9RQCLJB$;AKuI;#ut-vch~0dGa32h=+_>u8xld0K z|5G%e_i*8ssa%z5rN{PL#m8M0-S)?AU;E%jI*Kf3G50eMpII`czyC8~w6x)qO%tkW zl496`n)iU=)PRD1oclFk1BzXMiIddQNv823MarS*;SRp=qOcglb8PZ0#O8m(|CJQ5 zhQVNU!1mzhau#DPlh-8G58`c((q>&t-W}6^D=ibvTHnIf4^muzex|x8_U>bN`%_Cr zWj$2UU5-(#C^>1t60_=sWE@0(l)>5+y0GNeROJ@Wihv90Ykrxr_NC1~Q2txg`~Afqp?mI+`3EqZUw^LcP666id1NMi>?>tGS~goM@rAA`O$#Xo zl+;eH9ejw;8Rez{F4dqJV4Zb7Ag~*2Ge+F^ zDB%&#{J17g;}oST?X?7=Y=_(0L{mxZu_f~$1{JUXjeftc9>H9^ih%%)QNORvUs1Cc zwhci zj~&X7Q^{BFUx!xEdh`4aUOcp(JS0yvCOEy{=o@T4s?1r`i%my?aY-uyJxIR5w%RJs zK_)B0;Fs#hmgSM2;8z_s>{B4lZWh0usU1j%dpfIJaCt^0r^hHqFV`D$5_Cg2V-b5D zZkN*HTFEhYaU}!!GGxfqfD7?}aD^{Q_DwWKK{|Z)JAEKL0=Zl97ofRGzIFpLr|3ZP>gm&}=arB&Q8SRV z9kvx?OTIY14>IkLFCp&8forfs#4iBN1?jt{4^cy%q#v4m4DYe;IT6K8Oj8oB;nsJn zQ#WfbSK`jd2v=2#+|}jGq5iBe;;ZoEz;d}h7B#j^fdftfbB5(`_;oq!!B{vZoa~t8 zB)4g9CO#;w3F<+1^$QX_X09Y;Q!?&_BePE4S+A%EVF2BTB}Yw)CD_X+^vXTmhcDZ3 z7}$OHR-ba*nA2t~Ahu6@`+LC%^LFV(fZxG%BD(!B*|S+EBK49g#uF{897=O6`oWPdYZ1na|9l7}k3_8bSptV?gZ0ikiW;oO z6VqIs@&S#e^rNn%YalWAl}HYcMhF4>;*v9!N3>Sr3Wt)G-z7-eJ@-qMljwNcqu}dgL*qICX&KRbZ`grt|E`5!zF^c85+FT6guX=fFV4uBTF` z5>5w(VLPT;>68z0zG@=s?l;czoC9B`5dvHKBXY&E&!Du)Le4B*mjaWVvDKZ!FNvKw z?ZkXPPK4*cs+bInP+Cc2@v7>=Z}{DlM0+|0+h!C28g}hUI93dv$B%Ndvzx&`EnU{<;7)MAu=$!Z8iv4oT@3ukjhMcQ*rERWOz#m+Rp63Xl;ZWTH=?WF z=bRz<;CK%C3kcV-0gnuqv3*?%Eu~85UBHzJT#NwIKegGcX#=bkVK}-Mm#Nd|N2sA% zVw!d0wEu*H&4qb8uE0Wf7Fn4Z=smum_B!KT)B9h5#8&L#nNVI&*gi|+x-JXP=$>X} z+_R<4zF&a!)ASzK2T#V&5|YC!#?Uh3)))OTsl>oXg!qZ`c@1RDZ^ISCQ>WQ8zC1B` zi8rXWJR-iemznuP!fPv=_zk0$m-^@+iRQ)nNSda{N_qf+!O|}PfnIGNP;Y3YJkxOa zJRHfiTl;cz2A(unwkya9J@jqo;%fcOt)}G~nJq&0xFNgwzBZHO87HbK(FXq`*`g_z zm$}cf82SU>8d8Y7f=72W-;*$cAZ7P?I}3~Jc{|-?qM*}OVW^lULK{gkTXMOR;`?z# z3(74gP9dDz+Zp_*ma_!uLuK52GB2;H{JWmPLoa!n84PDb0EE47%|P-xd3*X-ZMz)w z?RygR&&$2jbrD9h@}(GcP};e7+B2J%-l-R6OB$WO0DbHw#hk^>Fc#Dp$a4tTKoBNV z)Xtu!r-q~co&f%q@YZ4gWnc!AuP)z%MoUG{kknvlNRotrgdkkrOguC|>dcqXl;)HY zpKwoNPLz;&8rU5_U$N`f3A>?69+aNV^MFJX0$XO*R+DfkdkmS)Zi5~b6;1O4_tBK8 zo=}G9TMdy(8LO5`86)q;6``5_H77gb(o@%;SpePB*G-PydHf!=pCYkKzS>GY`-2!? z5uY;^6mr5~MxFazxEjtZ6Wx80$mEl`5Kh6g`e+i$CuGFNkJduNRQptlNXVZbJ0k(3 zsLjso7z@?_-HfA(j(MOQ>Mcle1)PgPatsSjl#iP`~zNf;?%2ua|0g#=I81 z(5AJ#Ie78pFi4*){TY`%fLwk%wHu7}bTV$%%%kY2GIKsSt> zCGZto|0TtQ6{mwxzapF$D<3^$l%giPaXZox?LwC=bHfm$oU9jo@@+1d2= zyip%YNF|Y>I(K7%--fsbCaYe7H*WYIoWvNmzr(l?qceu5j5!12(i#w^^jop@+S0?@ zK^sCdZ8A+*svSI@-|BkarbOU6yU(AAM->#Lyff)FQ65J>8qnV1?1N=be(1wakA-i8 zTIZfVnxr7{kwx>%i|F)$KE0yzjkX{m$g4S(N&Q!sL$mOwl_{4Iv6_#SMCwl3H)j-5mkcEhSMg?>u}BRj;kj;N z+Tz-573msm$3C$^iWPIuxJ=6;42Rpuo)*Tay4U5n=d1ZJaRcjtp~z8GKeBZZvh`u4 zMlZ4)&>R_vH4(1=5@ey0Vb1?F4`H&Y}bq~c1a=OoZ&E<-h| z|4>Wk+Wd_-Ch1dnx`OIZN&)-mg}PT>3GARGeQ(;t(q1#sqhD-cTx_DOlOThAk-ZN| zA>m%L5)20W2FGwfQ;pbn>?@1r$zP7Hiq#Wp=mTV#kb|1ZTgY?60z?F95}!%T`wR|5lEP-Lv3)vNyjr!M#fwV zsVMhp&>n|6=n`@1fuv4wjDA~R$;zKMUSk(wiC%jGaq%<;l7Bow!@?1ho zEHOk}S@uFUgduj6kTup^P1hIc^@QMXykyl}_(f6rh7u>$1k`2%S~?L?I$>8jp;Y2d zIHlM8cB_J)M9Jmz90_-XzJ3`E&>J-wp8FSZr=R@-d^+)c7+g}yIq&f5B}OJqiRHx? zH3BK;J`NqOWIH8_-qr6iYt~QqO2?juz~vn46pQ5&v6WC)1}16Azpd{PObzMN0F7W2 za*y*!z4HzWOWQZRmVW`LR=j$)S_?f8*oT~Z%dK65QB--O<*YetQCX@VBN|43Wn0Wp zvxq%}Us-O6L)$}6%6ltbK=j6&HBywXnpIF8E;>gr3miIlON$kaRp1shAV%OA9et!o zhV_)tU@n$ET}q9~by&(LhL{1Bf@lf?0>ZWnU@%NmC6v7fJ#%|&*gVhE1plkI5J1pcXOy`%TF6&bF@t9`mW&P)rj1^QD%2%p93if zSF+?iXuZYwfFT_OhqRW*bGj^%_@IsX>sOHz#KlduZ+Q1L+9hS$YB#rqjraR@g1J9M zrBNq&VK4@)ZGL~kHvf%O!h=^2n+nrl44*h(O!?HCIJuue-Ajc}Ukaz1`)+fy) z-LI;hxLCUX<_9Hdk%nO2-fA+uUI`y+h!!B~|6C%zkXZj*X#Vg5!Br!JM3$18Q zM9-XG{Bs8C$q>1X@G8m0?KCMyZQ4s%`*>*L=a(58$i-a9=&$4D2AOX9-SBw{l#p=E zmqv6Ab`6U|J%g659xAs3Gn9nH!ahleta^KLZpX8&+O9@Xk=r2wbaZ-Wc=WcLtr z#2M>D8*som)t5aTY`(u53suOROi%^4?+^60RaQ8Q`Mi}Dp9DFsH22Oumc?*lNm}E3 zY|lcfCFg;Uq((dc*j%Ytb`r$kHiM!s4PYqCc=|OfE=drIox*#h%w3=KTqj!6iEoVr zug|&on-7s&FP#=)QJ0yP(oVD8HvP(qbG(V-9*4HtevF#6xslE>7ES1!LXS1C)F^~k zv8$H6@x64zQ~HBC>+XPk-C{46Q)2*{id~X%##^)PEkMq!GIwI4W0f~_nTP;)&-)&! z^lH9u<~_I_0+jVxZKNkPQqkZ|%=g3yvFdgos3=g)5G<(0X5hp>!Y6flK$m#Bm^eG2AnUudz?YXcA20f%P$SE%e%3q) z0TYpnmBd1+e9*v9mXuCI#%zP#b1D$c$xEvDMKzh`T8Qxgu}DiWaBku8wWxn~z}5wX z%pq*iZEukj>P>2+u%|srMNBd}>x}p=($*?Ros&FpN;*>MnsC-T))p%~Dwl}hio2S5d?H7@GOEDQxSamx`4DXcpKWU{eIZ)!JZd|QX1_0! z03&M!jqK{@RDn_^T3saO_D};Ji(=27-9LH%fa}xE<)gdAxu1zEG1nq>XWiAbd@gUf zN-{D8LtyH%teG%ab@d#Vg{{FO8@C#| z%FrbZcijWm07z$mc9CrFGqXpfdSg_ECL>(uYeq6aw8o6}qD{tFwDILFMvQdVX^c26 z;Tct&Ux1Q*ezFCm%GvXNYF4mp$C#O{$xLky&p4fUj8XmaJ&vgfPJ@J#1}7QSt?s(?k6kHNjdUplUb5 z$5`461UT}Fv23K?ZTG`+3^JW*et^ka-e{ku`%x)A7XM`pPqKC*;%QZGQUMpNITIly z3TDS=PEc{iI0MvhP+Qd()lBStK>LE4hkCUE@sXTES&_lE($|ZAV)%|4xJ~sC1MZa3FJ&=u}O36(j9cYvuV_3QRPhrATr`@mf=XOhQ zY3tVOaRj?}9*SH3inY%IjV%&PO~x!j2yb(zYJAQ4Cr|7CYPW6r%4}&jU48ZZs}i-i{kv&{|qXdn=hFcu6UosB@R(z+{V_{3GkMqx5} zSR~h5vxm%AUMV@!WAyfRA2Y@ARuZmx>dxM&(_0}LMo;Oae|D_0(lkR zlxgoP#V|9DI%5q)|IxEnVb7eNEWhUI69d}M5$m%!g(GP&)Sr8j@Eg?T3xyp-O%F1b z1n~Q4WOIDjIZ#4ddA1!?*06Qe>78mg=y{eGJXwpSy2$b%sTV`zsH3lMsH4oQyX3vw z&FI}i08}YsFAgE(DrR``OuqHAmMm(_)~J$nP3E%pRiTDgXr=N2&=v{iT;BQe(FjBe zYpn@Y6{xPOM#&Sa+c$Nn@yAG5pmej9D1)2!_Lq(5F%KT}Xi->Lw2sc<=oGsI2E!1V zF2M1jW#62W6{Kr|H8`JQC6E`J3Z16Lj}MKURd-LA5!?e~e#Nex-NR7Z!Dc18YW$_wc?YJbd#FAuqj&GAY&F&;Ob62l0FNKIhRZNHp#pC*4dIll2#5zFHWJ~X24;t7SM%9C`{5}s9Tg8I1YzxnxY}` z$1$Vkc7q7`+a=y=}`2jAVpLdOC7+1&-0~X^Ns6h=WCoHtC>hR z--@bF2X$Fg>tWfxz2roW_>B{~tX4TY*MSo=b4Fg@UH976jI%aNXq4+#=L;6vL@;5x zsht9`m_df=(IJAN$$!TonG@l9nU$)(UQ8icd4T&$|Hc$d1WC{f-KeZ#4c$?pX(s1w z{Z7t|Mg`qO?7s!*d%z{ZZMK1*Gd9g#xaf{?!!+K@woa*cnrIx3j0KIA#4H%8-=c!i+#dS>EbWB^e=-Eqe@Dv-WmaKBl z0Q3T=y{DeDXF2P(%`d^9_$AE}6kytud@^yD$EbpfB?VQiJO*@{_t0#(nmz^uh|Q{3 zrFEjRVmu~8d1Eyl#)b@f(x+6b)>V{GpUVbLg0>T;Q^pmW6IX@o0u8*7r|z6dQ{R|q zs+k+u;CSuhC*us>#z3DSn4(jF7EE{dlTI>DKN=AX9)1SKr_^CgIA_O(uA*ki_;>Xz zx6&fv&~ikSX8b9yE8#VbM~Rv~qF7Vi_i8Mf^g2WeML6=5 z!((ca8X;!9(GqEkhZtINf0bi8AmxPr0vJnc39~KNCO%u$`9{{nSFK49!OJDRPWbI< zJz}ovdfy!_4xqrOVzgCQbP^E01VO4&AUoficy+5JvDw&C;kayU1^L9fH28J`!YBGE zEV5{7X^a$Qv)gmBy`h$jWg{tgILuJ08hklEMLeSh7BfhR6rfc#lV;2 zCGC5IIsjmXlE8WSRx~X?kdzQg$|66f5fJ|LCX!sAOiP=c{~yS(%PWW}aab#;C=s^P z)R-L%xp1E_`I`V_WJM)_YApq|91dHuD85xBhIKgpEFvqLaS3x0h})$6e3!HA!fssB?jxSAUtBvnfVN6v6eP&E zAn^Qsmg_Ho?+gT%HkB!SkjXD0x2*k{tnX5cVgz44(%2aC{H`t!s$URaRiWAOCN}<) zKcMGvmD#QYs$w<}FBd?G9d;C+b;%_*K_FTnqcw4PzVvX|r%_bEVylw|;+fVp)KZV- zDpQx#m;pfp=xAWw6J$fgx{eVo>{QK)G7E+_D`^ z1H`fymOLyJPfm z){!sYNy)Qu%sfpPyelBVpPmO;;aTyF`kX0F54w38V#e z&$^k&Sz((qrbnwDfRV@Gii6+uI+5;8CuXlnRCI{7xb62yk}DU`7<3&3A6&Pp+!!z< zsCXNwncy8nRDIg}=x6Rv^j5)v@`yz;Boq0f;YLz-WzA7p`Dj4=b(V(y%fQGl=}6)D zVX-U9#h6SBaU_C1_Asw4HL-yAQL8jmNfMwh-?30RyLE|V+sbeYjb7(}Wn=WJmS%67 zceS&+B`s&AQICLkyy|cb>@%NlIxGBPpga0O_dGN-&w5~-ne5MAOFVla;;1AB= z)}~sV62wR-9_N-2?bLy-Hp|6gszeFn@Oq5F9Zi}mu)nF7DlgKenQfcNcgk$CzVWzZ zF8>7!6B`q02S#{V!6#9MFRRjXXzK3urp#elk_BptsI2FKFHjIu{T~|MjM)LI1HQVd z^DGIwQ8aq6kJYI}M1`P#eyRS^8gL5E&-Bnr0S1mmYPyy zFEJIe#)(7MoYu_cV2t8~DvixzU-lRiD9O|t$TcV1-hNQhY-y$}GI&<#Y+Bb^#5iuh zy2^bYHpi}>%r5R#e6;x8<|BJa=ZD(7>tc0_^sO^YV$P!)lanJ0tJ^!f zM8Qvw=5x7w4OU01TQ1;Qgajj9s2JWYl`c8Tl1g#{=bDV#CGlU3xArJ;!oR69vaqEf zR7JvM5Rmb3G#GXHfIwpHYOekp2CIb*dQ$STSK7-^1~ayQG)&YV5m6dQ?&AxJfb_3k zdaPDFgY8VoF@nVe?7I*BanJNC^`a1M@~7Ub@F1mkGLJlBGh7l&&QQ+HT7$ev3dxPo z(qGV;0Q2bL(zPZrisgZf+c;#?S`UZxF)G3Ir>~yuFj9RiZXAm1oY!C~T_HpUZOdeIVby*J|==0oJrIwv}nfN4pSa zPE1a!W+pJ@0QXqv)TDP%d((#!Sb~?GIq@n3+O*qaq3QUs*2TcbtU^>3rcgf#qy9{# zG@1=FNNw|Nj4lkdF%B@Sa$w&+3v+44D}qyTB5gI-n_-p~m1JqFlZ3+Z99h-hhs2ln zU9F0fzdj5eNix}NoM2tcX_O-=Nne?Jk4)C)%%;L?Mi?T#ILA>*-@0l$n}_rCQf6nU z)?>+m$Zc|w0nKQxw2WARyXdPP^-%V_&Br5wFLqhqIp1o1Y@oF>OcLZ$$tEdrBT?&L z(61me*3H9vF%sjGuW5aR?~zjW9Rm)wrJUyFlsEubb`;$*WdSOkuPNvu8F{dymJq<8 zlFP~=_5=t5J&mRhN2~=;N3&!YsDSC_NK%LldQICn!|mDSLl|r#M=-!YzHrgIg|-N$ z&(d;s+B-0{k@Pz&PtrW4m&4+ZohMUDXLeJ1x-DGVVODrzU#DdSZYZ#2yiBOZ6F*oJiq0=YD&teym9`0 z;rxB!N=j|N^&OglbD*lj7(Wtw&6bh4gjZHdCjUIy7bFoLUKCi6jiE(mF0x=QF=4JW z0J4BmSiHPqxKkniQ{+&e@Dh!K^N>}wS^b0B3A}_b8 zEI^e*APy{IU5be}gx${vF#9VrK^!!yWxk9lv$0Bwm~$+G`jEOcP8HWTGS|x*4G58Q zqHhSbUV_B`QP(v^QYA_}hoxfGV3rZctn&!}m?UQ~WkAMbBPUJ|QN?EllxzI*$;)R4|qcfoHb z1i%vV5nH8eEG{fF4wk!=U)Y3Mz=9U2$#HcWN_RwY{D2DY?YMm!`DLx#wNz`{q+f+{ zo>6?hfncyDI#HxX(X-Z?DZzz++QlgPe-i({BZ9nSu~L5_Q1zq%)G-3yKxgW~Q+Y)l z`d**IcjG21t$`+Xq5ZLMT>RMpq7PxalJzVSclqsiL<|N-eq5csYjfXzB4bY^rH*x# zrFXK1Dl;#%bc<>J#{1$mR;r}@aFCL%N{TVTj)7tTNeb=%-26X916es#fJI5lilxrO zRqChL3`tmAF?#h*twQ1mvskm{K?N&ZH7FP z&@nNz9lbLbNMoYOJQHXJh5gB=R&C_?S&ce(b}zc8lLh4TXs$qyo|Y)tR2urqdOPtEcmKX9qn1>43{1HQP?2mk zYjtcAHC(bUy69|_o~2Y+&R4!%ltvB5$xz8^;dGz>h_X>JjFln`i4Y|2UiG6cI9Mz9B;T?{J<@bCE`K zmGV1snn?)O%Ja{8rKT9Zl@{R~p3E+4?F}g_a`|IxyPSa5%gF>>7T>6H47U)>8Jkrelu(HySL0ITUr%2`n@YM9DlyNrEctj zOWBp(ar~nUp0MO8jmwL@(PZa)2~XqxwZ6MB>jPk{|m6iRq}=dqn-X% zVopoo5+$ih@zrcED7ge7KdWy&CJF+PO=@V67EFpWsd?8ef%;Rf8Tcelr@x}*vG zurk^3q4=oO*;Xd(b4qx&p_0!;?5#iEI&1e#wD4DXeCdVPHl1H<{LL>Dk9r!`{O2*s z*OygfdB5QyB{!DN30=&A-(6WAbvhdQ+cfkVS1#yEIqClV=p+IhBRcSXbQawt)ATb; z*PT3$<4)q!EYVP>sKa&GJtqvRSy)$!jPu?K5J?6#Fkp;T3;1@|sV(aYJub&uIeMoE z80okeiCps|G7S^C|H8ytm)$QQhiZdjbNV!N0o6|D=9>RovkgNY646v=Ptho&A0 zJ-QC;Nt?pE?YtMxK`9`yr58(@$QHg~?gRE%x12Tp2@y zn*l>beS&!FGcvYxSAAo@jdUKrkBvH3L;=POxDkFukLM$_H;&D==?>RtlV6U)J%qci zCI`izyxOy~d1~Gruruh2a~`nAfG^)j5UKFsf`%OzeNq;eR1h}pq{m%%SovnBQj0SVph$%ysrv zR*oEnBu+NBYRk+-Sy`A1g?}(fe}?*ovy0rvxbs+^+>cke-+m4Z^!rUK4Nb}&2v=no zUh2!9o7|z9dJBI|rJvW}TJm??Xx7Q8%zl~AG|K6p4bi_4Vz2sUpH+411_ZwsV~6d~ zlyi1ORYe$Xo%i`EPzyUtbLM0-hq|#`<9_jIH0IE3hKJfc1(;@yieo2w68P0R(`sQ(z$@GDMbacyS2b zhLI*#ny@OCZC4V-su7pesh5{)F_$L1{_r9Xr`5z%gs%jnP}NvlFSY)e=8iT9W{jI8 zB%h742byTsspM=Cd^7R|yDeI)=c2vAlY|6FZm*doX+v|iQFEe#y(fm4%~QCtLIcV8 zRBo5Bo%@c&o(h7YJ1a|3i5pzcsNlzGlq=fadEYoH)@rDS&=0pSER7Is^pnzv8NrxG z4V8ZN{MqpsYq6nLLc-Je{vSKmEsm{{!av=}Xk6@nlgi;L=of$oW25mb1b2Ff##HEe zAHmlYb>*5_XWVT9dlt^;*BZiK>wNIzc^b_7bKBS#P%U~+gw^vV%Fa=zqm?$CNdisD zdoLR>)}lMOLM7Zt!kp7zu1jaz&T<|!G*m1v7SesQV_}R@qShF@4hYElsNukJF{5(3 zON#vfx?|%2)7Q|HBKgpVQ#QZl(f6z|eM0%Mg;`nQ7ohhu28pZ@m$JFPvs!S8vA%nW z-r|z7jM2LcDZ07<8=0JEN>*r=E581sbcjQl8Ha`MQ=Ghp>FSbFLKN=jZ&BgJs2_4T zs_9!TMmgXU%1DO#a9VRPSp1AKXhtXf>W}&bJDhKIRt{xZuLVZ9qHGoX0?1~o1)~j zI2zR|!B;PsUi}bXQ=%lN5!>##Bp8Y{*f&rO$P?kdkqRr*y|r*}e-04h;$qSm%Hdr(1H2k!`mq>F__S|Z$XJ5Fs zSFR3?v9ZzVU+(v^J@<9aAQiqfd<0sm6pL5NekdGi9-XU^vVO?7S&u?T5)Bf^Z2FcP zI&btQGFPN#KqW!$tu^=L@h^ZJ7%YJ+_+WuXHF{?+Ou7M&d*-HlV#-&XJkkQSxU;Om zLK|89YKYb|8&9iMbNMw&(A9B5%D@1P^5^mJoClEmxzmqaqeLCIYVLShleI4gvFk%l zu^Xer1O5!^(eQmvAjw*Uv-PIA17`Wc$4x)ZrK=*X$lww!2`1TGF1h+hRQWA=dcnMp zNopR-32Gq*eUR$V-F`t1>&^O3Z>oqu4DhokWYWq` zr;Og06GWy%y^1ru#Yj^hx6N~p&lxn;K|U@vM8&M)e1A^Je$$S3m)wFFORrs-b8dyY z;guskrd-h-F0`)C=NetYt4EK6O471Zl6IkP=c!Ap*YVDHiotcHY=Sun1|qPYc&~B zj92z%5QJg?Gr5^?sy`OI+5+A=<;;)2Pjq3gVc-`GW>Z@$X^7Bd!sXI9Sq>|y{O7b4 zQCdZu=S%1z2=WXWH8t=g`t7LnlKv5XI}|taxsVdFm|O`Hq>2)hA_A*m>;mpdq?u&) z7b~8Pe{8UfMVw)7`+?kKh-mSeJE+OC0fVkKxtpu!v2pWM#b?BsN{M`0x$YJj`9niT z70u3TE#ccSeCO(4z0IW9@Hu26AEksJV1VW_SbZ*c|+U>0s<$0U8#)V5iM!0R+ zUO2uqWxISURu4cIa4@Z z#889mG`~pG=R~tYCDf}>@hzv@ zmRd{w1dwr@)IpHMDpUgn9Qw;UV_@W<8U7PFr_fpf3L0YG&~1^Ty&UtFHg+4Ia2x^0 zCoU%?`IgHX2=!$+dicUsUs=_R+hpF9&^nXbtmtXFal!O6n8C|wJ&=ay+b|uae@Dl6 zW<9zch8F+Gk)>z6g{mQ2kU}?si<}PeLD+tKV`8d)c$*2Ra__^na?)bPo#?FFVAJA* z^VVwfJdibxz-tFasH&~Qr$wxHnnKsWbLCpOmf+BEJN~gV!W8Xfw-M-(4*+POou>aQ zF%rLI!WK^*3^XUYREL%4xY;eGAn`!U@fi+EayYIv|M4Dvq^$BBj*={5{79&rsNHK# zpaS4YjL!sDUaMbT*DDEiP-dr3Q2!q^6PK)E@tyXV6VlPox-`@hnE@i*eq#5zXN}eS zEXm?CEPI4ir|P(f8uHT2u41~jqvaretLMwYyhagAwQWu^`pNDml|aNdJf9KA8nE8J1tW|4K=bY8lIlHR1)!x4i zN!im2W2Q6aXXc^TH5Obg6F=3lYQk8f%RL}f4899={ghejj00uG14uUx#U_dQ&-~an z*psa++vE+}sH>Jo6Q}{2)qsna^iG&=e;J@nX(jzCe?=(?s#t2Ejg4vHO?5s`l5X%P z?LUB26b5Nu``a1Q4Vc_a7bm#B^PBQa5o0r>x)L!tR;{+sDtJ*sWeCHNmr={OF@Z)( z7)UN9pCyAmM8w4VsZIw{axG23ioqljk`ZgwWn~>RXW&rcLOoIzf!D%OiO*6g&lj~( zQ^Mb}>CPQHqr6MvGx)`jLJ!6IiC)M=_g|`*L8t=ppq@2T)YGD}gFSFa3I`!k^m%bh z>z{0W8KnsB*hCR_%`&!UR%P^fO02n96Qvb3+dK+F+(3p}Meb+N&4p95w)jlyY|cW3 zaVz<33<%A9Qf_>SquTrO`Vqmra~G(FCkHCJgu6%lW7CY zPdF-#C{DqVK#bH9oBa@`92n6Yr&dB=7~COFLs@9t-xL6y@&W3-aQOBTvD%`zxOB8b z&oMa`N}O7GpmktoStsTJ=Z*xZP zCWAP!yFwrJ!pb*IM#y2rli7c5IWFK>Q<$m#AHceeB!v8(vTq>+M%){fx~V3@sqzu} zC|$}awk^PQm|?1>fl6nY=T`!O|Fn17GB|Dsc!@U`5NBcQ2cCh0(mmNmVy%fUHD3|+|2z`< zrOqkBfYShH%WP~%r^K2IhkFbhFmi|cQrwJitVsQ>d|zfxrDH7ftr*G7c`pR|t=zD( z^TQtibVcC@t>$Ph3Z}a@XmzurR3YuqtwsnbyO_wZnnWBjAsZ&E6M){1=* zDAe1SySiR=Pb2q9HZ0+cU!P^(j8!KCq0QCsDfx_}G;*bshqXx(*{0Spp1Qvnmg1_yCX*Vq!Qpj_ zdy&t!cD30&kCjqJa5#qcnw_%HA(>7z^j9GznmtzvDMCpUv=XV3-M6cvHH+JK^>taKB2WMd$r zT$na16Gz@Kz0za&oaTh`^{dA_jJhghCml2P+1qi~998;aqw3O5&A$GE7!AF?kl>5) z9!v~ZtjD)bsPK0g;#Aba>)Bbp{qK8R=7T)>8VJ8$n%6@;xL4uWv3S1r zIyOniJ&9>3K~0ZFS2|YfSAMEHK)S8}PNZ@>+o@MUi{5jX!J88<2Zv0@4~Hnn}lTO9hwMnYY|d zl}enNy49Rb;9UfO0`@OimH&5S{j#mcxvyi391eW^hpgw)kH2CfWTbk4|4M&Xqgjmp zI$J~= z1isAxOzH=}^4m~W9Q%W|$qf$7&UdlhUZig#I&kfwA5G?20ddmFIwjCBAGyB#l{cHk zhBqt%^@m%!oBAt}K=swiRP}a!j~W0#Jn)|W`M1PK;FDTBSbKF^ZPeL@NIw9HmJ3HG z8-`VMo|X(%I$28=URb8)&QS%jqnnhKL16^k;31#eKWd=(*oL<8!I!gxmR{Ygs{#{34TJKCwFLpk z0&-+HWLf~eDJy?O65ytyvXY+jo2%Q+AtkJIyxOlJdMt?Iy*ASeWA~M%gq}x}4U*77 zpHR;Kr1n9}U0&|y2$JLZD)rCK0dCm&v@E;fnlfJ*KW!wog`U?m9j3t*y<;5lB4@w0@g9=Ab%k5@ z9T=dVO|U#Ae4;jVL^h+=s#34I2SVk78k+hrk+`cG929ZPpZt zss|qkE9vh9CN7xELHsi^(jXSi8r)adndRG|0|Y{S=92@gD(s+0&o38M92sZ(n68D9 zkf1s^j95JUPl9u7DHE@lw2%s>G<+{68H@C3Lejh=nw*dv3B2Dobt2WyHXm~}5JFQZ z5PfSCP1M3Rbjd9c_cao1Y zR+ug;@JU;$6e@B9K{--m`rW221GQmgcZ=;a)dMxE=MS=f0OL*&8H$m7oUR6!sHV(- z_eHFVmr6VlI@6t!u6oM`wQn2swkm}~RWHX-kZig(TUwQ(sxfZ>B6Y!}tES)azwdv~ z;LUQTcIF-s+#L^Z{0@MxGst0+-NlA5z``8%X74foS-n?(y{=S=i!H)StNP)eYiJ!J zb4n@YQ45joLFh|J;t(rZnvM00N}N4`tP+ZsT~#;l2OiVSf^=2-Q>p4metXeTVFq5N zr#iM{bHQ6O&D~K}aceyV1uS$JwmB5;vLtjFkh3snkqBK$FTO^>tIf?`2X5EWJUD6AuJAR;PhqCs|&_VWawFqpsl93ZwU0stG5_*+m z8cW&Yo`jJC4Nv?xf7GtDvOLG)Jy0obUG0Ehjh@w{-7eobDEWZDOl$Ci{C^D>IgBS` z5)g`#q2Q7pgutj|)aQ2F=&YQ?B|~X8#GbD#S_u(arIi&qrbv`QSW0Dl(-nUJKirdq zP#+`y`3dv51VN@yUy@o+H6(LmK0i-crd#hHUhyDIfnZQCj^->h3W7YXD$G1mQY$vARM>xAST$_^kQZwo_M8Ej9yd zvkli-INVf~>TS`;psMB;w7lQl)u_uvkadoh6GH-MOGNk!o4tce_)F!wBGa_lZYG}2 zmPSq-Jp-JzZJ(nn?_4?8P~qW~!chO(8Sg~XDl{@BDbc%#|rD^-z$QW%rn<-Ag% zc}J-VJMgrt7SE%_FJV@y&Dfu55u({QClaZl#{y*A$&phL=C_22 zoVQaRD2Iv)QVxhiD^LDqLMoh3_XnVrLC(%cvA5?ERJ^Cqz@8)hu`o`b8hr#Irz9~N zRo+T{Togd(OsDu5-j9*vqzdH=6c_r{6W1^9_m?%!>Uo&wk^=<~4a)dKk*kG!4f`0SLl?MI@V`qy5O-BMIKC0jc zckB^3?ZdL^um!-B$gKpX%d>x^)(9$1VkO2z6Q{tWgsB3mQxR5=lD}1l@SA*(Du>cp zWG`>fbksVlSGSB65spj4cZrY^dz2IF|B#L19!Z-hi+Y%d2Mcu~f6|9k3j2{zU>hxu zECqQ|d)+f1=s$pa6?y+B>F~jqpd<$J zFA@4*!es}@Tny2S(t^JT*{h%ZPGlk@(qPh(ur76%bJ@i$FZpCl?vh;^4(kHp-N9z7 zQjB$DMnDA5JL$srTI%sk+ChjWZHWF1%{|;;h9^ChmazeAbCNv{1nJC3z46CyY^~ zRtmhZIQGXXWDz-?Fd|dz?6e^gXtX{wg0T)`iSNq{_4xfu(+kbWd9l543%?CHWO|FC_(t02aD9Le%b(pLD`2ov^clKUJxe2{jHVOBxy z(e%yeN^uHXRh{F89rDSEVmlC^j^dJ!<|jM5&7CUk4apU0a)IBeWc$7>!G}#_^Wwq5 zGE%0DN6Fj#3X^wVpz`^URh{2Vk5-v4na^QE9mFk&t@b418TeI+KL_puHcmVeZ6?yn zQCUBY*ncjbF=OZD#NWB_vYM!&g{6F9H9*XRA!mgy_ZK^A7!1{WAVUv?qop@Gyus(F z>ljj-+bsSP1c@9V<0wq3v?;cY#POrZAV`p#twT%pEI`YjD?knxkuawtdL#&BHc}ao z)+%qnHp#Stq^6X&|NcVpp$wY({3jMSIjIdq6rRrm z*$(bOn2mNHhWG7n91J#x!S;JHQ@W4h+yP;7G7Icb5Ktz;TK?C0B3``!PoHLsy~?_ki0XwcHD)uDMI0@F zR%xY8ig74ql?()YG1S1Blioy1P1tJh@cRG~9RxOFbiCgwf2@B>aRYyMC@G4cXu^|8 zKY2{vipIN5G#X|=heJ-O#lTsn9)H9OJzHKXI$u07xv$D}5jvg7>p3;YP3CjHGbHw) zlE_!7=meHj3-76KWEt*r8~PF;w!wgZ>?_)lQjnLKlhp*(Il;ye_l#v3C&+hQ@eP@b zl*l_vCMlvh#S)t{wQT4C$JLgzXB^$CTh*j;6!YEUS+HUaFrq` zJM&s)yE!HojznP)*clb+ zw>?a49jnU0FXJrf`p{LTiApl@moKbyF#$vQ<<}O3{yuvv;p~h1PbbVHN=NlmQhVS{ zq|s5**Vp;0&NqFN%eV70NLsG*wEjKy0C`x% zpIMwIua6qP%R#kQABYC>(x<~{ygtHA$bsB>SK6FkXO3$~f4)`D{dXt7`J@vcwM3IQ zz{%#vmRlFZQJJQtG5cPMYtiubbZ%N`xlxk$55QoGXlG$<7&*SB3%uoG*C&gAKocux zAk3Ik!D{4c!RmeI_<8vl+mwHsDbxM!rRuAi_3|T3tg+&xrD#&)qkJCZ@bWE;DQr6~ zTi_w;(4H#A*I>v1ik+DkX0+`WCi(7nxv1Fxd-?x2YT(RGx2ASWrfglE;G$*)Vt_8PhJ8gSe z?$v2FO|5ibB%DI*|ISBe&o{^hD~C$r_jXI>{fC}$gt$Lo40_}f~%CTjt`G(|OZ=&ojOO#gHM?N1yB)lwVSa94olXVPgH zIUkgMFpiE?A2vJw(Wl(nO`ndHLen&$5>^6^R^X^zM3} zBDS_f%Seh^pXC{zLo))*8*53tB5r&|rwBC%S)B)I|KH8&=a9*<%#bs238n6$!*R=u zkZUqXT{TfAd2gXrAy+CwbT3M)1QE1JWiv5hgvM7rcBOC^z)8z3ly* zsRu)y!J3r2;|^NrJx!3@;-h&I2TSAYJ(cj2h)U@F5^dLcU2Fbz;TulCBA28hQC(^N z5USw|wi<+A3Vs{!CNVdNW3QFW&@7s)nq6#jfLq(>n!Mvj_j$@qiErSS8u!X`RRgKJ z>&n_FRjA&Nb%<@mk8+1srb*nf>PBx;B$)G=B{CLfr-~)ZQYTPhjP22hHAEmNE2w@1 zVT>1l5a%=4N`_5C{`HG-$UcnKRk?v^V4AwzdBj$A&&~V`^h^7<`uH9p>;B2h9JotO zsFsW&y9DZi)uNnevG&Ig>8CA(yLkDV3}z8j(zInHQc5>%Q&EEA32*h=4=Y!)yR)X;jF&i)Pa{#mpew?Q0#oOh=XjXj znv-49rQ6H66YqzS+wO0sPn$igXMGIAQ;>=oAs~76MfcD& z`HA1UviymBWDe)^_c@~)^6tByDUW#yw#F&k#L!xgazaiJNU>%hpaBNnKvnK~*^kq* z`AuP{TlYNl!s!Bk0k1+ES=@XYccDz)CjHh9EHEa@U8VG#+UKC}ruJqb`~?O z)(Ww)WRZ4AyDv8dwupEC#~&+!nM-{sw5ih#Fa^VLa$Dc zYD6)O-2}87`e?6gsbhfn!|UY@fyD!s;lOQi)=k#^;-^hBZs$i|dl zxh`tmUeoveUlfgRmZbP2|8|a_oN%8!BwUkA%7V@v7%W}apoGp=3J?JG0d}=qhKKbt zGXe)h78QOqNq%W^<0YJ0y`(*#gXFfLqh6m6W{X8==PET({f>Y4Sc_}iI#|lDJCZyTKHnDpkpjQMU_?f z7sZ`O@KAH4!^hrCEk< zGfrvj$4wE+OtYK6!0$vsq6gS|VfR}#@nP%4+0X{$&ka<2Sf`NUJHL!7%E8t-7_li5 z>+@dK+||t7mkt`|(aFRs&m%=PvK=)2zfO0N&LvxIcjHq`L{syGUz(Mf{R% zo$``Ijq;LEh4RuZ%u2den94}|1cJgWtuN(;Aeu$^yY04Mi(h(VxDYVxJekCJU%PAl zmesnmzajZre${S#-wa__2nL3&*BiOxSa+qn&Kv<-POb49;oc!fhwV{S2?gNVi}!a@ zdTWe}!N86mK+Z!@fg<_B{5%K^#AP~tciqp=S=VaeP+D9M6#a!5Ql(h$#MnDG^8WX$ zdW60HAgcrXyFfE!8Et*-F}z>n8?UhoP)Fb@^zZLpiNa$-S;_h#5si{&$5i=!gL^E)FRaKp;qSe`lmIoSrHbl;HTGCn=>VO)4E8%`w zNlm?9?_A4)OwkVj96MK7^M@9jm2MLI3GZcB8-0Gok2Xdg5-TjF;0dd}p(Zl7(@E>B zU&{Wy-=2g4dmF9rXwHpoh^@^&PxcM9&E9hUteq~7El$zu3v1!>RRYmnr0 zg4q-01QJ@-OzQ1fGuE$4DoItOyz>Vj^B`X72m@--CconDug_v_-*e83pQ{M^#*&6J zCu%qgNVEW00W>RL|8MX`4m1r{-K}QbEMKM9I49TLv!*$@ocMLAFAYbGOv|eXS@FFx? znM&8Y;++|_TIlb=op?h^PIfjs(QjLCkRRE-)W50+fgdLdS6xeSxwuiXM)hb`#oa@+ zr^K#VUwcL-oq95~UF%z=DPjoCll8js_|m8w(vsFIi}vLk|@Da7BCHN0%<5plq_oQzh?gB{&8DGw1oleF3iLk zEt+}?sB8`IejwSyixn6D;QZOxd8Cm$`w!FDy&oxxO%>62)>9KxE8Y2QVD?c%JeamH zmU;cXIMm8yI57Y8^T6#AdiV@IZ7pFwJuMRBA+XXp&pZHFcTsGkERNYC^ebc{+R8*j z`Q=-qS5ttwlA&U=XGd*5IAQ@0NWI^g(enM;{^Wh>ZmZl&?`rhJiJQyP%TaA z68T_PO#$DSXsV}{&tSAyLy}$i+#dN+tri?(=9P|%AdqKjBpa_^O@ebpgk#DCGMCb_ z&T4cd{~h<~P_oGK6%H4RIubRIf?g>h;j$v}<8|(TmJ}5OTvS4Rn&YOEaV)l-Q~j6+^@b1$1Y&!xb6L9o>sB|Ci8`_+ zslzetxvXk^e7Hc$0YOk@XkOebNFCX|CVjN1*3@BATa4JB%v9x8X~5>$9UC6@V~|-Q zkhb;cw$EdgyA5TDN)tuQUK@o4=kYMmI7VyK2#0G??KrQ#8r`qP)D_0BCZ;YD=-bmU zIb#dm7Bqa$Dq#wqU_WdsA`ClTzT)C$`2*O+Q*=S-J^hktjh(z+|Gm|^u1yiGLHIIS z?1$@;)}H0m2>oq06VHdzq!L(rzVR^M(>1Q^*2opu_@jfG(rg9Lf&E#NnNAZtM8?x1 zO6uB9dU}h#7I@SIdH=54#6$L9A2}QpOv5PTWk?)8aQ*#uy2j0EonB~IB_VE`9BjP+>!CUm3y<$5!*7d4FBb5!gWLJ?@s{6N*o-`rG07I{(zX6!}|A3wF;GdW%Dc zk?PiUMKY&)%;%FQf}N^{j2Kn?Y4E_e2w&~l5Lg0 zSUHcp2pFnrKaUc=ZG7~y1dh=|pv{cN3V%dx32wEdZu5=q=y4=K0y<2^MQ}(&Y;RQIt3mO+eBMpP%W1gHWRlL@>$Nb9zQa|7k=1^Ve*hz! zo2rUKL$AZOh{-iSnwoxaZfJW2u+=JJo#u=S9HlQOt&QQ5Y_|sC+;0YyYZ&Mm5aw_C z_7JF5>-!XLzbi&7geLdh==?wy=Y2s}LC=eu#;_LO(Xz#DbE!YrkI!j!_?Ciwb!$JT ze%bA=)Nth5c6?Z@=M%b#q`Tz}SD@|X{MGSNC3LDR3$&Gq$X>5i`oZEgdx3?xz4s4* z)W=I+2$vwpaxnwsNy3G+Z!wkKM-SncEY)>211Gm4Bc|wRwAzx)nJgWjHAsKV@IW@n zg(F?r&TbaVrSpkK2dGjgt{ceUy+}!_r>4T8l(KxDloEEyF$IaAcwQg3erVgDc+vA7 zc^N8g3?m+Pa9@|Rqe*AytM8t_s(YjM`tO$@TJ?{LVvEs6PpGWqXE_F@rZ_XXPc|_t zs+DF|Rf4p=%ER0*gukQ$JA$h=co{E7nF3bjki!J3^KD=&+%m zUK7+fZ3OT6IZtXKQ0c1V>xhElXfdl@`+5VA+?rR=|2#CDTq>omMvukGtuq8BS?yIO z<2(lGF%swM^4XpqNm}8wVO;qV$CVemu#^&*Y5rx$z|bXl11FPfuc6hF1jtiN zEj4zHM5q_wh<={3q2Wt-&%G|fxu+#M1k)(JEHP873(nouirs-U?bvPY*k3wvEHyu} zsv6^%Sf(kLofQPj_p;TNoRyh15(iV}eg4PCO|m2)eg&7H3};pR7;@VQIYOnl{N7>X zEUH~!DS={rrHq=TDtl{*!@Q5ev7getxFtOyQi&YWwsd)9P5Kr=j4$GLqMiQKe-V*7KJ=yGAly`K0vC^mRHZ!l`~7SSCz;v*0kU?Uuf|EoDg@iM_5+;@{0KT zTIEIXExz4$TQ_9{nw_dXhZi}K-(wSKAh!(KLS(Th=J6-F*mODr%a45XAI*+jkgr=a zvw44b;Xt!9uSj1iR9eQkm23B6GTh~Sg9lrZt8=W2a@SErwrZl6dYj?%7|@I{zs$jy zRx*#O`1_V->Lq{tO`n*P1?B6f`q*u7ZwZ)nn9+9ha1>@?&-S`C^>{`{_Ml?FH+5ib zwMMtQvaj{nQJtm;jImZo#A|dJCp$j)kg@*<@Zx(0=6um}we@4=J{)|gIG}%CcVw{K zk<}vtIUA*BhsF>H+OSfv0^#0+0}1AJ6L2$ZQPeYZ3ePcigmK_@5V)#_u&$0)VZ;&o zxE9rCYKU%qjJ<%La@AYCGAwGnnNC4BPJLPunvQfgUK)_v3Fu}QzN;e)Ig>=<@7S7H zW@TmV{1@pj_nGqTO*i$9SJ= zILh=4*NEjj_=~qGx^^FSRTa3|Z{7U3qvthG$YyZqFsyNKE(}VtKSfi8|KBpn%nFF*c}TWgVwIa8bmvP`vj z0O!=nftTlS4s#c(BndA{Ubp6G&^bqjtj21X+C8l+=bAuyt-9&&r37u~bN_c^4(C3h zub%GCdvCgoZTksvHpHcHrFL2*C#fkXn{3sQw5ngm{j5Bt#x7F^$AJu~*4w)SS8>aR zF2H{PgT9Q_NeBE*)3b1A=4T_4_N_o7T|YgG&yq(PD`4RK{KR5~5F^+5u*FmX*nh-_ z!T!RKJ|?Im)%kqfecdxPt3%i9t}IP(hN&lRHvG;|#2fR0Q+^y@9_e_{DcFGlurVVA1Y>h~?1 z8Z$rAcvkBNK_d<*_(c0y*A72<7--md?wySUUY$iTR`#_KR2o!Xu{(xaQY6&`BvaFh zCS>QpC5Kn@HweLXNWm~lWk+YX^?v$X+25LM+PWsk34_-1r^$>nl|<~DgwW%ugH*57l>o7&o|`Yo$HBv;32ZMl_WnOouO28ZTK zd%!qq5`WOXh6$G!yD8-#K!VolOXLA}463l*(N=!UUe~fL8M8z5>aM$!F5hF9b48W? z)nMAr#bTT{%x2Z1TLblxOWIz@u(_p0FE@M4Y? z(yMX$Zg)(UA4e;>SETDdfihvAR4d32cF@t$c44?mGyBxv@j#c85#s7vMf<4Rz`VPT ztkj-i|Lhx4)kt?^DVeKXMv*dn<>WGB8M8Q&{j`h=qeHiL& z&@BhKJ&QyRD}SXE*p#|E!&|MNhLl^?^2Ze1f)qpP?kdfTinVtfhTFjN#})(|`q+c* zUZh?9-#*{{%KIIb&GMKw-@-kn{8SEZubxmN#pIOyPy2HFTw;dpo_mc8GFieV`#D@P zI#ERzH@f`!QcrfEejY5TDi~3G@JxOpNkDu8zrFBSP(cExPt4cQ`gdhyXi6IlpHS73 z;8xl~g=W>a8@P}8Vl)n7{C@uXxMF#)h)^g&Z-1T9eO|M^waP${MQ!OFGLNZC*Gb|N zm1QW^_C1UHNHgDkajB`1pYpGenb|?9m+zB5tM=O_Eg+SJr=lJq=P`nIZ{n_5l1#4< znDq%J=Ee`tz$FAdMlRLm-A{Vyt=W&*F*?PrY}OHO4FS$!7C`Wv@4;pqenJ^+$KI@= z6yvW+0WuG*_Rd%l1USj-EXn`Fm@!oT$0tP9kdz{znc)OUr4c5Jgb_ zWhf1jYV45k2sjzvu;3#2(wRAPavL1E&m=iBk*tXr=tU#PtU&3~4NeV5F_Gj=*}LTzc2{By)qzk~hwFzqw-$jX%>IgcZL z6lK1bxdY{reztV!uWaeuoosIV!l(J9n#}~qgqSR%!?%L}lcf7sse<_f2o~J-pX`aL zZ=tUpl8tjlXWAhPdPhLGh+@c)Y_ax)%;3KN*657ER5SWb`y`-}q|HuVm2F*8Ut0)L zl_{w;FZ?taRHluz021D7AJ6-&zTl}T*t>8vSOl!jTUeYKTc7#iA){*|WRCDOpy%i3`)WNu z>-7p+UEAjpcDHp*o$$PNh{yEeh7mA_L>$16X$1MuwzqMRoHS?0N+OO8mVNH=nkdtC zADLEmLgwaZRVFH1usJe1ciuYtbg{kZ)EUZ;QSzYTw72x{>m(RoydDe_7MSr3y3ker zoQ$<4+8mV#3=Ft6eMkWsB6{mfq_M>UaNu`Cth!!6Q9!DK^WKeuunJgWw*mmV(tQ#5RGE*PNnE_{ds{4LpowD)ERNGw)QDqKw zs}-eQ3!5&Gg!&B#(85%Xj8aqM%+`r4lRNq3)+s5c({iRRe|<{u)d8J9iSYD>D0`H0 zRqbiEMn}0d_+m+l3tcy{V+8jDInmTid)|!KGoMXXip@`i7Hq%D9 zTX)|%L<>dcy3}^JT($U7{%a*UDL#l!N1#=zo@q@V`FTR*%uz@?2A^#ONoF|2A*@Qd$mOIW%x^Qi)`; z{__#5cJ`4)mkPT-&1}6Rd3W(~f0ndA2W;$arY`GRhU~TLc>My`f@ZLr4VJD6Ih)B{ zvnjZ$oJP2<1^nvg{qI-< zV!-I}r$Naj0vd?Zr@Fi`hmHxtWYgF54>SO+pH$JD(r8?#!3HOEuFm79-f{(V3`%mJ zhc=U#G7s8niSFmFc{n%BpNNHSrpLQw4IZ)MJT0*UbZ z*;(UqOW}ky2ubT%&LY}sgPN*)a8Sg(Cd6y}6h^e^6+7%+`=6Kk1IX}ek8GzgXK2=J zR2+~2@UAoC1db@Cxy-NS@VKq*_YAn*eY!n#jrJ$@RZXV2Qm*L*hs_>>BbV~H-AR^7 zkr-oPESt=SJMr%`AFAeCXO``&{B4M<_bqL#hL6@_$bN&@Cl(1@jx&-w3_iISQZTpZ zZW-F@{<~(}w#Mh2;&|6%DQSFsGDsyfQ=&UI805W}7ejsMU6E{2fMZT=*(l%(m42aZ zR9?etmSww5d3{@I0k&t8#*p54oUW9tPGTa8A=EqNKeh+-Iu6K5 z72>^3c5_9x7W$6x)v;Y|_(3Z=~{C<`r7HD!CXm@ot1yf)B0m#Yo z-^L77I~L|&@o3tLmOh&hWUt{BZ>cTK$Rp8>%L~1xD&?0+l;qc{Nhdd}Nr!i;v9HmO z*-S$g{Li+h{i5{-hEZCF;!oa9bb5WuLN!MJrFosi3{Qk?)>^pTV+vvCR@TnQ%7X$HLq3dw`dVeBRmOf12xGtyIh!$GA{1tDnZQ5UXm`Qm@ zE|;yqEtUQsx&-zoZVOo**ceRi~=D^tmZ zXd_iQqfJ*Xhc1O)3>vwP7$vgD+PNcY_fjjecv*))2StodAZnG!~QnQ$-M#W_887AHHFH~IrO zIv|2f;z?}!g3dn5!AGah_bEhK=kGVKY7tD8aWzp~4#?*;7OYixqx5PFCjN=+9j&2F zDzQ z=00Y=5hvly?|ic(?My)W6ASObRQhfnHw41;y}_m-ivF1vfsUIQwU0|*>($cs4s$RL zgVa#0qj^42dKH1+do%Z~`QA)fKAJ@B^@37^wNuK4G#!1YD_tS>T3MzK(4aJ`$L>D> zDxz$N|5Iy@;%ac};*oaNRyA6$3opu-Psl|OcX4pGRxXB}Ra5ell%jP~hW2tnE~70$ zjk%W)`}c`KO+=`W^u_cJ%YB89wxVSWgGpN)GQBsMLtQlU8J7?)8On;vw)udZ+Fn+s zfpLw^dMZh?BtzhT0;=d?)IJscXZwznp^)1$cvoA}49n-Vb=)5-MsZ`JaC(omXpq=w z*0coibgB|XEz~Mxm6S>dU(>vyxJkX#UHiPrP-F=Gb|q>d4a)vkOJ`6>nv%&_E3hi4 z-Y@l%j?Zo;h};#A1Jh4UkXahyUMF8G6*Ke}gl6I-$y&CQ879UcWv2!y%cIs>GAy~9 zZba;zyeZl!73arPOqF%5$WC?#y6!zrlM3hy3o3?M1Hhk*%1Ng40 zc9h4G+q0lAoO>GTJ7H;?Rd6z>Kr>VQpFMag>+j4ATlQ7|`TCm+`bfhx7^rUAAzdU9 zN6^$Rp2Q8$P~v^TbfwqHC!3UZhQYX`W_HJyqOY&(#xztq@vWvVVtf@n9&wrcV#{_b zK31166<;=XYW4_kRGjnrz>OrJ!gftBU9N^UG|56C&3k+PfzaEF;{ygiwC>5VcmR5 zbG7t3W_;%>)6eR&gA%@?DRjC%jxi?W5hjs@OWr3~#jPachnDn;SC zrG=w*so|Y_g_t1MhncLCh^ZwY_1<)i9r&X|PYbPVBuYvfm-tPh|_nmM;Mx zDt0{9Qy}FWOzE~`J!X05Ilsv7wv+H!zQU47ZFA$oBWw@tc zEAbwBtAV+ZLXQb8kHEI~li`0XhTh-N+7p5S169q5d=%a;$2Sn$9hcQ8Bi8WMSJup0 zAjv*mGa{sBGeYr<(}AYSs2mPq8<$rk{$$G0y?B~XfhV842ki2-WV(g>>3aRl{zj^e z-|{D^8m-#cE~I{od{+z6+67v+PPPw_{Ohst2DJMfRq`Ity}orhcgxnckm?6W{Q1~h zORN4$Ryllm>GwNtcc*&;vCD0F)5VAD@t+gROMF4(GIDA4`Xxdi>UrxN6 z!w2;AX(Z>Hmxd>YZIAp*pRDpT#JQGnZNmD5R_@o`=JTQTki&Bn7z?h7y0=AneBp-B z;Eq*>Eu578*2yT~=rZP)vf&(*UM0JhY*ZfV72am2dBi??_tkz+10yRRwIgsKks%Qk7w1|Jc>HfrFwW? z4Ox5h&jI&<(J z237Y!9xxjV0&>|UAnt>0YEI|4RQ?|8v{%)=(xeVre=Baq506@|w$$kv9OE=8EkoHm z4z*20=*6Mg^2kaJ6l`o6TcN9>AzF18Q`wbELK5%&5+mnQ3D{&&n&y7gAoCW_pGf%< z>vR-zH}_>gBS+W%fB?G%FJ}Z5l;v`I#eBS(cw1su<-RNmimnIcrCD-wL8=_-pSY3)6Pelm&gI2X7ts#g}`!>!7nt2eFLn$&oGQEbNS`w zZw0N%69NK{CXI1u>6tP|gC zBl(0%GBukJQdr%50e_%d6m4p=YItMs?%x>5~OJNYysQm}xq*V|>lBw8@GGll;-0^S{Iu9|TrAwxj4c zBY{6`Pl#zh-}89n;DQsJWVc4FMgh1=F-2%($P&dv2VE%AYY4Qgma%gm1=KoxUo$b0 zp4^^3w~cP5b8poBD||*EO4$;ho5KK8@N6eL#|-Ut$B|3v@q52mv&}XS`NlYJpd^b2 zag!^LDziD09y`=>O51Bv`@6_{OyzDRc0<*PsDE{ug8zrTw+xEwd)7t=m*DOU?lL$8 zcO9Hy!JPm>gS)%K;0}W(XmEE4PSD^EK@)=Io!^mL|9kE|RqtDMKixWCrfO<-&E9M6 z?p{xKKiy58n(zn=W|o_&LLzk9AV@$N1D*$3_+DsQ1r3iEIh&&X?5o;iANV zA6-U^2)U6{{Q8i4BC=)XA1Vh8ugo{Lq>JeMFVR7X74yw4wG^$Z-yjrOpM5_suWp_>mlT`7oslmUo}u3`YqFi0e-0z0eT zOUruDI@U~Xb;eYPaYp#!b~PQWZMhURC;Jq5M$#YPuY74kCu|^NtG?cIrYbf0kkTR8 z^+P57+G@dF+2z;o#S*@-Ww9WAN3nv;H*}yv4q?xR!GXU3yKg?88!JDk>U9%W@UtZt ze)y19iCgdgk_~F)Y<7ioq}U`DU5{sbl%w!;W4>=n&-q9E{y}iK{`K|=Z}=D*Aozyb zqx|9@t&Ey7@nLQbwqWmmX^=6QUOFp&t-ZSx|77Ec9alVYdfSTI>jQ+V!3a4SRyDV`*q7drqMUNIVD~nv)CR7M4Ehz(%)j%=ZFm~|F{`U@(pnaEAM+Co>qZy)!a_IMtC8R z@i37@Q+VEl%14dltVP?g_?&kyjOON-G9qolBIC|36d4X1@t^14z1DyEC)>1mcB5Ir zwO^X2{YQ*s*z5SSFv~P38eCiVS#ts!x}rh?(mNH9_C4fmhJ?aelr-Ry`41ZIi*GzH z5r~D&3uY0VNRxeY*DkIpKkcl1A7it><>~lRJ)>c7c00J~-ZC*>TYMxC@7cB5A5qT! zO@Ofi10m;P&EIc^lKj7YISDxiO8LYJI`b$L8@1PoZMj0+&E=58p~e1S_xGLZNj`(6 zwx46uxjo#3e!Yt7>92pZ#T)625X$7;5FYxHx;w)VFG%LfZrqL^V=+CEryZNcSyhU6 z8Q>~Uy=k}`&(rBSoIhg4rudGQN6h)0^^T9`kQp~cwL(cX^Gny4VUtl4j1|u=4_~4h z&DlrS6Vi$E3bS1@GB#^+lkT8d*BFY`CWUF=D`6QAygCk8vHN>5BG3L|Okr+4P#eu< z1=9~PyB5QRHr=;e5vy&<-_iW$+6$ZS#~EA34ikoR-?&^xO!}EPCN-&aD_@Re`2cSZ zndVY1FNU6bn!NVo1WxHuYDsX+}ls@ z>l8jjR<61iMYfQ%l;=Ye-nwz5MgQrnUoLHx(LQfqEqbk+x?tndYs<-}IgQI~#k=jg zp8w@YIu&8&UWr(AFq1Ly57 zHE`9z0vDJ_ut`h)9EXK|e056BRslfJcT8D+^09msm&gpVu6^VTLDHqExKgK0JlHgr z46!VOvvl^UoNsZC)0#jcvO*fUfF5}nkgzqNotuLEk8vJDtoi~b> z@3nhhIFE02+~$|$t=9T_fc=keEBW(&+n2BfCXn%sgc_FZy=Jy0ft;TlX?TvX>Yp5` zZjMYgwOs!KbZSPMl(sA=LKliFT(aF|hUmURrz6#)bHAR012;dI9U8T8VYuk5c{`In@fmy5!0hh)l?@kXV1fSe@4d6WNW8J9EGzsSq7C^EJ={vn`mMuP;AVBhuj91; zQnzcfch5{dO-QMdmSON~ExF?4G74nV?I@kZJU-zvItE`^9)i)#rkCXE%vyT#Zi%~8 zBd=-UbjJ*Z?-z{~jg9+Kt}FRFi%|o|o6WASHZu7wxPq=iA+0i&OSL#}V2pomW)W^J zF`Hhdm-leSs3Te;W|n1l#enlsC0_-BWi;uJYAowh?(^UB1qDv33sMy!4!EY@?%#D3 z*6bx(ADGb8?~Mvv`m6^#NeKQT5DK+(=e4^$M9t#HN{SlPIK-ISgRG}e$1ZjTezKS}53;vBM|0`!nN+~`Y86P8i>}+Td*H%I5FF>GESctQF zp25Iw2^fz`LlTNrf<^o9k3!T&40g~lTd<)|V=|8sItKV8MXmE_u$+0j+H_f`R}37t~I5w+TPu$Nh9BcLdC57jU&Qjwu7s zR6~HL3u!8v*)-*mp$x)7I!l3K66r!L&vF;ou6`$l2P8cQ z(*jDFKcibW%!5{kp2KeegQv|$VCeI&tO&96F~DmffzIcK;t`bX&%^p|R4Jf(Rzu_X z2DRAFdz6D^l$KP)cU(8AQ7 zL@G^ci}L-^?@%AmFIE%PfY_`wVxm?%mIae~aJeVYFa zCkYe^eK}k>A`)Hm-~FFU{%2qQkA=a{w})SV)kkOSm%^^@zbQ9fDSDbU;)9>;FZtL4 z)BOZO(cW<~R-d3$&JT4*sTuOR5J)l)J}6*PrA0e5KjCxkk{Jroymv6^(2X9e1#q@q>C2QhTqRI^-9CaL)EAn-nqI@j%Z4?F z%&NZ#(To)wkl8JGuW%c(9VGo1z(UiG?HGDHtNTj+(a=rentEgHQT`9e{sijZ@>|gK zyf*^O8G!T0@=C7iEscm<>RYWO3GGSeXP?IcqfdL^7H`&jsRAoAHy>{AMO%N47D5l& z0!3fcIXbV8PJNty{LjVzvm5{alMNzE9#{Gkrfb$-s=^5hf>+vKQD3|7zTL!|y3Jqc zY`?uc@E0lN5I$9Nn%Gwiv>QYYQ;x~-iz#tJ_|ePFLrxm9bV=@ROueNs%v28%N}6RF z2V&wl+p(XmPiGn$1l(M2^Ky|SUne0v68}-c9P=|KE(mS7S`Ur9Xppbr2li!5(h1lM ze8B2X`Q`fq`f~gv=Cb&D7f{f48sy8`QYTosn1Ld0><-pSkwE4+O?adCo_X`Q)N8;Z zL)HyBl`u@vZ9;rfdqV$-L!HI)yhXG;sd4cplg&j$QPaUH6@tf8QeP`}zI$3U%Tjg3 z0lHDYSAFW4%XklgG2?pBAj7&z7NYmPS=z_NEUg(DB8c6?!}ny(@VZq~2>gu5{V~7-3;>fYc;QR`kR5@wZ%o+@yOMesI>y6cUD9+?d^%0TP{6}}Gckcx3fOAe z5NOYfn<5^dGKkS`EPqs_xa+aRd#oe%<%2DtE}!OFgXToF0`1GEHYfcflgO)H6HHz8 zS}m%_ym5rvv=It7L`>U|55L>_kL@rkO5C6B$+5%vDR>4p4TYd;LxTw13cu#DfZMLG z^;t2M;P`32qNv{>L8M^p-Wcm^2k+v39SOR&>K+xJz>~lgwwNDp<_d*s|VHC89EOR5Zs)ICl!szH~!k zxVu!*|M7$N92LuP62YB(7#VP`#MvQvW2hTEZHH zqTqpzop@{gK0A~$JK-a$r}_qr*GUC!*};#uL}r48Pu?eR01A-Ks__lOp*=znY@}W# zkNgj0qV5Y+!Y4@-ccY2EmZM_aEra-Ut%#LkuJ=#80?WYC7~B`NF@=?%S8_uL^_U_$ z1h5&ztn-FeXJKkMqI>6)bJW)QPpo$et9s%SUw&C9HL(XkeCf_7NsMZJB9eIMrcrM# zMD?qyfW=+#J!n!BHF4GJzLSb~*i}rlg#8srJqioHVr3Z>@PUHyiin*lh=X z>n`%F*A88iejiuo#!w(Z(J?Onj-;2{{H>d{ERw?P_;{uNex$XEMcJnjqT;;jdc5}= zTPrz(t#FI8Pw8p}@+GsVPkLQA<88`PKBT$$pfFS%^b7z_gHxzYzGOOCpbMo?C$^9y zQ2~%3@fo%yHnLM=V*oIU2I$DnM^`*n^a7i;a!#_XSA!R=h9&3o3SDG^S+G2bV$#R9 z^hT|UhX(zrH<(4NUs9HYRV}zvlHSTpK<_l(o5ZF3lGv-7gXyhrm4AWTVNI>X3gR{y54&e^yiDoq=0ipX@$MA4@vC-FUy1*By-_`?Av`} zWi_j)>g^QvQEfS=(9@?^b(j;(rhoWtv6B&Yu@ZjzR3!(#XwPA}3R6W%3gC#HZz{%G zezjinUss@zUwokR$b8!@T3skd%cYZF`gK8qSCZn!RhR}tw9iWOZoFs%vlI&=haBT%$zBK)zJ<(+7E9L@* zW4lC259)d3Nfc~W&-)A%rufYW;LUL7mP$1tPV=1#_rehWwd@FR(B=JNZ|8zm_U~K+ zh5j|M%arYht~b6Y5zgD+ZcY2?CdPbs432evYw$+|1H?QAo=Zu9YFaSBp8h;? zbG<#f4Q0RyxtRLPXTJ+3jqT*2>|z^<=@RGx0{)DRMMP*G3)Fq$P})Dl_R;uFr(iBP zrk~<3AoqFsc{*zA*_6l<$MR48PrR}baF{e`R(_ZX&2L_k1P9SXQ|R?z>_C_OE%2kk zq6jqNhOw~e1yi?m#bsyVxr?xlWKX`BZg+7g@UwS=MbJzs^De!aPkcP#2h?rlDaK~P z!opz_pXN~}bJdl4&P;Y)F z?kQ-Pba4;22bGPaQBPg=^D$Gv&E6veU8}dj3pCM%a2o4@^Wl9?Oy6lCQVTp|aZ`cp)t7_x_6JU9xa=9-Kvr+MGPK*P z#x1n<+6YD$auYLwq5%cXYbDg=RHpSr}Lt|q6xh0)HOri5ZXA*JRk7Rxh?dQyw zGCjjgMN%!6+Z%LTCtQ(l;i4pvrUCS|e^yeSMTJ7>-9NtD!T?q8pbSe1Cu$7^#_M%E z5v1g%2P;w>p;pbG8%ILrA!dFakDFy>u_uVS7Mo;FUYk(s1 zG5_QlyK?PAhT>pMoZC?5M3NZ4^LG$qV~H?J^>Z>XS9SSY6_mYcPmc);I0+xIozr|H zbghq>i@IEiJA%r{B@R=lE0agxUXETsCeiB?k&|-ih~;>VJWnQJtk0g!1XzG9a1z4o+%9d-& zuVRp3S}DGMsPNWXTYal^f;0h-g+EF;kscw3?#~fXg-i!*toCT zd!VK?t1nZQqCgdjyh_u)`|p)#IG<4UY}lL8PhMwHNC@ExOk$s~mraP;T@QKhin>yL zRLH|k(4_KEFf}Zq@l?=OK>QE!HttHPmxsqRti+VDZST}9V{J>*)c*yy-TD>T=S15% z1101W!>u*&^&~NjS2X;hFs^O5FGQf!K!y$;mDVh*3>W!wmZ-&Y!anV%QQUSLXhe2+ z_z4?V8cUB?3{FAyUDvFADeiSS!-+jAWSjW+c3op0a+pSfj0L8xNl5${!AWp+5de@3 zumd>2V;y|?(1*MHd5)hKRbXs))DFP~>A*6)Y7rqLz3M!r-zMt4=#~N3lj-bY;!0JJ ze&YVSfb@+!mRML@1N)_RNm|Q1RY_}J6kMY9VTHtsTsP=cH%*cPQXLeJ20?Hn2(#mj zN)Bd6A^OfzkgT`uez1?Z{F?Jyz;7X@D=5t$nfAU<)7JUQL*a$p6&UfYL}s=_h|LAa zEp75&J$M)tbr9`I;}7NAStwjtGi5PLs+fuvOg6}t*=-u9;^`;F@bQ6RRiO%^Lj)gD zX=N#-P!#F(L0Zd&Ird@_u?vHDifq$DhB`y5(EumXFXogtKV%2 zsW}+xW&r-wkMO8%O}6jls*%LtOx}aoY8=vES>9djN7i(1v??JZDJQEQlqnvIplx^G z-S3o@e4`rwk7yrK9%<>%K|=AGhkV%57P#h82S>CN z@O}-WV?vxRyEch_lwG^0Ss1#APZqr>MHUSV8=>v)MC*WhhzwTIt7H+1%3hiHI|Nkz zbFV-nzt7c-U;rllh{NE^ZgYanl^5`#<5%-tFwLbi2`i9&=oA4T)9aQ1=FJ77V^WuS zv{8`(cA!1=L;(il;2?Bun*%^FHn}4O^1PZ!eh$H*hfNIkmhzC_9 zn%;d%N&yf`FExbY5y~G%$YKAdB5mQJPE=aKTk@lO^~$YGFZ*OxiiTsQKoaXQo~vO~;R83-TixgWQC0e~ z`}jxq9Ju)R6JgTe_kdqvJKodsjgj(*IIf74z~~0)jhP%BPwff^l3?3$=8Ao#8lozn zXTldP+nWU)UV{W&AZ%ZS&rj&d>%%(G^M2JEgjNIuR(91X`EHoN zi-tGzcb0)zA_>xOzjf6`HOS>)sg%so_I{<-S<@`-(5xuNst7(w1q_Xa_Z73?Q@)*6B-f-ePcj2ML8zAcIk5%WRNkD*q`%NMzAy;v=u!$-%br~6203PrD z_P+qC?jN`7!&f*H$s*tF1LbCW^Lc-|ChdDUJ$2arF7rDg!%yKWoEhADiqj=CP$>{S zH)@tVj9syPo`3s?6^2X=p;I#|cpj;${r6Metj z?mil=)l-t6-d}ocZvwc9l^Pn#gUX41h*dz--jgq#CfgZ zx`36)CGRHC@K~O;r8-P;lhf4b38}+1T@hx;rhYklmvX4@E^f||8o7BNpLz@ss%D?~ zBiVSJ*Tsu_FVOxWHGRLmyh*g_cE3(gSXZM{pe6|ktQ#1MGYwg(>EM(t92elMN33ND(T{ugxIgA zs47Z-9&3OPx(g(E%r$ORQg$<*Uo9~Bb0jYD(Fsbr(1L_KaGNi)!0T#jZ&oY?!Yqo|2F}eU#lJF zzp57Oe;;<6D}6U1IbgJ2r6WlwISc4RJw2xWowI=Ur_`uG?M+K-uvrsnTG>+^OHfhy zGk@G_P%-pI9hFXVyGBe+{U8y59Q}2V?=K*_OF#=&mjdG{A;B`EMo*Zp(&17(W+Dqh zqsu?(9dU!dn=RFTToedIIa@?LU7P+o&AXRqkGzkFp~_)sRl|aaTZUtUx`#}3Xr`;g zq5J)2fh%-PL1-QuQ{}0Y)gydn3n(<6j9@^EIX#z(7B1;e@IK65{7-WlZ1$dZXH|<9 zUrK0F{D`vfSV7f{8ZZ#cT_TNh3f*rcrcw+AzHV&5!G{S{i)u6}6A3pT_@;hv^a`9I zy1mfb!ds~;gLN!wHbuqlZ2&F&9ou9mW4`WHl}$_-USmAIZfD^AW>$qV zB6{fs+eTuE>pI2J-AgtI;Al3PM+7tA;sl-}8z}0HW z9&1Q@+MomsHAl97a$%5sscLqwoA{$Vxw+lcPA1`9nbk*EIdHu5FP5PM*0y!>poZ&H zXu?t~UCkH08IrLHNA}vryd3vDpK2;K_3|WsnQ`ddmkgo=-WJW67hCi1VeSE%)$Plp zlHZYog@O^{LT~r9FC>tDR@Obs|7LTMQhA$Pjhw2KjMF+GU*oMy?)ZT^r-L}}gpt+s zeI8U*_2Ml6W`YsyW3zsX7)dTxi|SJ3r}36OacD5aMxZaKJgNRchT;!jG4 z&Y2!rwj;%^Sy;bI2MYu_|J@$EL*BxakK`)BCr=7Y`4kBxdG?J7w7oerYpW2Jq9Z0F z(DJCkwjbNlH?^VYWdR&d+%igeHs{l?IjXY15?Ftfl~H+r-E{U}CB#qpY(9pHl97a9 zvele!NA+i2t7=(Yk^};W9pvS)T>~Ys(;Tlo;}zLE`^5HoAsMLEzh z0Q;VxlevQP+hNs-C*>C6*D|ZS?>zYts-srYi=9<=I$7c6lx3b{C7WeUuAl3>L-{=B;}J5sq)EUjRVOn7o{ z7kIKxjOv+#_{NUuD)_u?>HM<74bJBn&4$|{u{Mte;e zmNgPpFAV+&%k(K8fxLPqIt~ljPyu`vQW6$5yrKuGK1$5=UqJC+)y)gr3s-s9s_`|G zJ8qIKpDS4E+wCn|5;H}ut}%qT0>oT)`530T$a2}Rbb*Ouv9P8-_Am_&0A7cMe3ybn zwDsAJ>6AYYaqo-KT1tg-%sJMV)Ye3ixRp`Sh->3L7T>@+z6Of2!udF8zhrWMirA@4 zQAA|J+rmx$-I)IHz{Px)PdB|2HW#Iiq_)$siCU||17ek-CX<)N0Sk@AOj3f>8U`ln z&3IZ}h_nmf4L=_JCKTK>MVm|DqeeW?nx2qBB1d-mDs8l;TMpg?rSV4Eu1QMMBcn5V zqt`YGJ4X)gFX5HDnH|f_ELU(AqBU zfI{viJFj`oO4HfD(P5$M9PO;XUPR$NL?xF+F~6m$S|ZKp*TM=#O`+H&ZL^4rrJwd^ zz(O9ukfgWEnenya!zh`HVv!V4fy3+)48eRKsjE8#6meZ1l_!DYMk_>Ff|AxX?(fd1 z{wPHfbqIouKnGRuWzSoVfc8kH>ZK@bM5;5;aCsjDKX)6C?d5|jw5t-{Bb%ilFCVLc znYX<;JLLz&Zcb3_fQ7XN%}eF^*s*&_=49CxLjAy`8Uf`#Z=LK1nGZd9*NGpoaHM`W zCu1**HCmL*|4!FW)H;4Ed-oJNS$;wz`nADcR!2=usuNqaUpZlUWl2e~5Z66XYBFXK zgS_T2@3SrmD`xCltEqgezG7MHvPOPZ6(kBH{imfP9zeDVIsYU$J9*73)ic$V$>mP2 z`a`*@2kr0*wVuSS_U3Bq#9ePR%uavV@QC>NF~A`zJRz)@2cvt0Oy_hDF%+}Ktlne6 zjj(@})}K?C-t{V{|v$0v|nR__Wg*cCKs6`sa@@4Q0xk3R6jp;+% z#@`nWqdmh)qZ5GFIM_(U^eDl+RJA$K;Qp|Tl}ijlJRIOf1(XdKc#a4Wz$PFsFE$ORl~(5WB{%M z1!KKJt&o&ebPc3*S`TlIuc_16nNrJrmChAr3^vS*#b&xcH{0iy=a7iz6gR$tk8Yrl>uq0{!4 zD$e~>&ph>=awo%biP-b*5?c9RUETcZKV>f+y4B-{JFT9eMjJX6DE(e0bGVFM^$O*% zr0ewi)tP1bf^mucFM9aMm;qaXH9Du7B39QGq(QR5dPU5{%RGQ(~(I!=Hv8B?eT;hTjZ_N1b3*!M$uD zUzHuhQh2Ypl~`yu93&YOqrSmate$?g74fPSD99U%$tYfKPP?}DrV~%q@x@G?DCxG1PMTl~u-x%@*0p%c5X zD#40BuMt;D5w{!TvR6+i69H3Z6FOCM$cZe4VI?a*>q(2Z_-rqb?A41`T5cB&SQ@Ls zsHPXkC%@v~-Jv|BzYIJQ{87hrH(O=pA(&=KMng;yWr~E5I*iDYtza!tHaFoZ9WYVB zxti-jg_bMTamh$raTEvwSqjvtWYmFZQ;pV0j_&1}E4} z-Tne}IsVJ7@JQ0$eCoxotL>wzErb)w#wKn3)=x%n&ATL)LF%e4=@Sh|w8ZQdl$|EL zK1t&V+hLj4aYVNEn2I@2$@OBr8?&ZU1Wu8Z_B2$ZTa-L>=yzr)+_^-kQL&^)V~I|uhn1a*)29NZ`NN`x zs0UyO-tugHP;m%>l(=82C`dcbMO}piX`F-kbRbi^XyU&~%2k$t{z=Lu;Pm|k{FirW zR42t>DH854#sBi5ivvc|t4D=#^kQ&)WZyOZ1;}YGq_g2*3*iT&_1W$2hduYiSR>0# zWuTl;pzY#|YUZ27QHN4iu1a=iKlWUgE#~Un;kM|ja_=Gbc`{T9Q(8FC?Qo8gC3d4= zqPa?iF-Vds&Pw5yv|O;rf1r_H(Jq9KYA#PQ*q16SW?j>OZ zZmjXwfbTDV&vc;9P6WQeRQT)>DldMlRl=xh17)oB!*S3R{jqqgi>GEAE0N{=joQdD z)TEeqFQ#zdzv`DnaZrZ(hRLH|+xL$@ls>Z!0sdBXMX{A zE7U)NJ1+-;Z;_N0vUR4#l@c}KFZ(O_c=AZWBVf!tQ zCBDT}rJ+=q*<4_R^NR(!ztz*+0*Gvf&?6)Xm zb0;PFTGPD|>r79u_ziN4uxzIKIeSFaHhS>tn}0CBv@}qJS0LsZfR@>=f;NNv+vrwS zbb_Vo7!@+g0ZM1T!YAWb)-F}?T|DCiT&-jB@{j;9!o%-V#DXFl9}|4j-S1aB+N|M-t@(Wr$z%U zRfz`xHw1XiL@(_l16MspUS|3zrXZb?rajo)c|6Jg%h?$PBkDq*V9GM*_YW0tL+2s= zRaLX4mPyU#QlNT|Q{J9smywxOXx!XSQiA(1zA3d%QCp`2$lUrgEg!D`@%hv7D*gqtxG8i0VC^Chl`@ z(1M}dXW=hpqjpRLZ+Yz&Z^J&FKiO(0>5P&FS{|E!=8NyX=3e9^D21lzA>w%V5Bb%& z$f!TGc#|z(ASpZ3OTB)l47b3dHc#?M=3BvGXQk!N0M)Uni@$lHld#vf$k z6azW==>YDb+FtN(NVA~yaURHk6~vL-iG|DbBVQGdPC@)MT_W!U+*@l9wE=nCZxP$2 zcXQXb)<@~&HYOF@X#0%j!SkABOdJlfeWSQM907Fx+0C1NnXC0R+Qmxx<7K4wRY2Cz zX}S<0wBv$o-5*|X1gDc)X)1BG>=^Bp!xeb1<4tslALTR5tHMG~@DZu-?Uj6=LdvC5 z%kFIKDO?LQ(AHU^AICO(+ZV*EdyNLC<$f*f5!j4*u$U-s6E?n!Z zFuKgtnp@SfFJN8~=f~LZKu_&Dm_2zh(ZD9+S@Y;?`4u@kE2Ngr%{F4+wPqX-CP8=u zv$v(ex{<)JXj<}l-B*O+Ao;%l`r*HTC+rtH!K@30S->!)2R*^I2$Mbpsg3ILn<^!P zgU$)FrqnJZiz=JQWX_0OkLRka5AB)W9HKzNlxB>q&FzY4M~fMGS$Ur`;g%*$+kB7( zP~p1*DyshgTZlLLbcW6?@G3W&Uw_Y-tJnDHfc~xm%|R`O9F^7}J&%!IsxJieHt48z z;0KdIs^UqG53l$6LVA{Lxu`9OfH{Aw`(1E*2W&JX0^j4f-H2Njflh6Xra}0fD!gl4 zVFLr-sc%=ezPad&)e&TUJ4V^?uD`4ILroD}9IyREslhRV5>O_HZu8F)xCN?nd+#GD zsy}JJ0Kc+EE+e@kpwZVrT5)Ud1`;yE;21{6N`%vcvGIY?l^q*eL_;*6%!UV=Z*gQo zg6FmiysdwE^;efbY^;65Z|a1t2lXuB7lPJ3d4WV4gIwfvYUK?G-25b1k!HK7LpYv^ z$Y1OGapBlU0f3=21quP-JixGqS(jw}qeu(C~0b5p^@H{|X1wD5x(Nj;q!<-x( z$$YgtW)mSmv`<=0Gb1)sJ_ZkjC$#O&c6qVgyu6$kgr=Jy!;llQbHI=zGmH>#A7<)t zxx0wc!=hI~@d2XaWj%_#;sR zq7s$=6Zmpk-Q}HO!x@d#_ax}g3FM_p_5mNM=X;n8QLKY`NQOp${f%wlo^5-;xR!7B zu5AM|MVZ2~qw|lAlR{mCj}QA9N9hK7tw>ab#*|nLN~h&?MbMO#zHJ*sjXV1rS#nlX zXHL&59$h9$%NWQ)I=oj3h}sa^$)GT&hlp9;WVbhgoainGSN>jhn#UGYTWX$-=?yQS zLK38;-bPe-5tN`Mk!3J1C@7?gz@X)U08{}D_9K`ie@yIYJ$qRxsYTsmw2{e>73(ir zR1-kAkzwJ1Oju!z;3Iv^9@=}MCzMUM2R-);7#zglyq_9>e(Ng|P)hg$_sSsgu_r=Y zdk{vj)Oi{HS#H<;PmRSTtG{v(BU^qtISj@Q&yv;X{Ba`IsasQaR4=4W+)ffQqx!r< zsq5w*!ZI{IxS>Xn{jUV8c%3_WiRb{+PxB55^@b?qpne5XTC$#B-B%2;s-BA8acG$m zxtYH7-%08MY?LZiCFzxYG<6^pR*C~|Jo6|WtzNUt3rsn6`cI~>x-O-Wjm?pLpgB+l>yttk9HkizQ;6H4^UZ1%C9+il{J)vJVxLNAlfsQ%r^t-OH57>k+Vyju^_>9*``sL@NN^G3(fN;*<^^=X45wnnooNCAr!vHplC38lr1urcUTTJzgh%nuvL z9*8BdaL=)Ogb<#ylE!&yb*X5BvKXx`XDl+(4^}+U#31YJPu!WA(Rev%0Et%m6Z%~+ z6N$(f}zJLSvPH)s;)({{pRqueH5&KC;Qu{sQ{PKk2^n`54enA>R zEyp58unnCGUl${j0n-;CH$Y}fB(ZTbxyBJLUnAyRxI#WA*i#PY(1&!<4Vm5pY#d6p zd_I#vWKDO=JVm%~oA(N$MPgMZ)KLV`1IPdf0OT=f7f*=E1~MkQ@)WX`XAIt;If*?; z>l8Y*tfK3>s)kUtmzpe$XzbGY7qHbWK(BPy4Y{xK=M-U;* zg{$E<7Z=WVy`L6S>j`2kaNgJ3T7Q{*$7rX_ySu>o0MCmL#7as0emi~Z^76g7SFFGS zgsEZ8%8RgLCfUC@US4mj6m^&WrtExA30{H<6EMMR`M@BU>X)sD)+q@j*?{=1n1OekQn;m^>q(rx&8ap5afh(LoYxw~`tf9}f z+=oA`7RS3}WnOSR&QamoVNF|RY*D&k!`S)#jc>UZ0_a+I*$mHv` zJDlQ~%yf&|6i75!88%}~q7j%Fv=K^%OJB@xxl6dQ7=5Kmx>{y?6#T=;^%w9x=Z{itcaZ?|w0M&m(V@o%-#OwVh5 zw9+xP#?PyX^Z;vqMvj-VlJ=BX{3bDB__EWfU=?Scxi_xn8hj&cAP9KmJuoq>43FNT zu3RLf-T^8nY4eU7*rYN%!Njtw8jJ|m<*LD`I0@14*w>IQaMMpHQ7{%Pt9eF=eNA{( zg0SAB9b59Zi9L(B1Oyl zV||@vK$&@-)Aoz>jEupoz_1_YeEJ+)y~pX2E@>B$-jyUthGwoAIc>$xR>Ire7@ugzfMUJFIzT_LHX_he)=_|Bs>#q z)>5RlM_-kQ8#9f%FuVL5T|N5?L&G%+!lQD)*XF+f$7hE;f}eI6BTF%6eEVhyR2J3h z1T$6Tj(SBZIm);h(MXLk$OaD!c#MWvc9`r%j9`kNO$ZVc(?*I23)!CKRbb9b;#wtACU2*%mxYe5f%sxjgzu#O%jG z9Zhwx#UOj@jNAEuR-hZqvvH+HWahH%()+7o`RrtVMbE{apZ zRCWAj@lDGUR*9%I&1P7kif2$mBOZSmb9B1}^yrqAmijTsW;)r56xQkWGmZy3qd|$Z#flqSXl;U%- zufcx-c!gh0r3+bhUfw)c2k!o!Z#KDsRlfWt6lah~`*(ch>UmQA+!|M?^7Q7i3=syy z?%!aFjD=3ez{pP9uuK)>Wn^9 z%34#6=fiI(Rv&Zh)%nUrV~N}#=?B89llJi|4tiC1g!st&LO6rbVH~omL$4jH0-n-= z?+mz>!tVJwG{bXBErvey0TAy2{9rd+*qX43cD=ZiE$F}~WK_9}M>q0!`amI=WS ziso6Aol-xmOS9EsI`B+=(U;KHS|U-Z7GRg30sz!C5Oj`cFzx+Rbt=}RE)vWVWO3w6 zk!tG;l0_hLO0Mvv*s3%0=8jvP#*Jgak~)QCF==D^2}OOnc(4O982F@eA2Xfb(bKS_ z&BFfY9vsLiBD8Z1hB2e=Cd^_c*k1nv8qRL~rXX&%&%g6-uaymt!VFD-P_;Yg+z&(t zT3H8f!;ypIFlf3CLNsY{77J4%lF{QgS1vyqHFajB>_mAS7A-hkNdO!W(D%;`H*}B_ zEX(I*1V^;!H@*ri(BRkj4hx`5QZsjAFD2E$faSG~P!naXcMAYO77Piq^e=<*B9iia zo}KWD$p?m_A>!f*HZ1v51XJbR%>7Au?j}iQxi`-pu(zH-o8xVh-R=GP_}a_VHJ-&7 z^v&m+1VMSdH-GsBNGsPKrIkOFA4iHTo~W_ISMt=FkYNbbN{sidQbdFAh1(=89GsTF z1EV^Pg+Lgbjv@vOM8v-R3pgVcsVLhteJ6bXfy|&O#wNW>n^PR2vtz6JLtQSX3&D)m zRELj6=GmFZR*hgp7W?g6i8z7uCKaW;%HclU@czEAqGmOrz>8?8Te(;?)w`Xn^dfC%dedhLH;I6_}^U!4QjlDSr?5`pC$!K{T(quikM>=7UtH4gKwxg=^-Y_=5 zS|0f8jx?Uut1Uk63p7fMCanej6zZ507tO_9bb-qMY%$*#gDZ~JxKB`v7$MtV{yB_$ zFkRJwmPzYA_5(q%xTc$g9`pCh$HI365J+~V{?Z!&<0y|^CSIR9fQA|DL%@CqmPDIC zFkvwaR@9MULNL#!iNZxTEXOyRCpIrbJ2?Zq=sykv9G25NMpI~grGo(m7sJgllc;Yy z>;4yeZy6L<*KLh9E&+ljI1Mz?SnvP=0tBbgZX6Qa3GNZx-5b}Yu>^t#2@ZkAB|(D+ zcMbB~&ikJ4ob%oKe%z}2>()I#C|32Z-mKcJJ=a`g&N0R=VrcHm)(K>jXSggK{I;X6 zdbiHk(0xp(?z+xiO}HLD^iJ!C=;7tzMlxP#Nsh|U&tMHP;YR%2fLZIG^}%_``!C2Y zoR^dFo>XMndnJLWE?zfq4TCrIBrz|Tqx%_LQ&*P~cLr`ln9?Mh z-ZT7^uD6`>Ye$v9xB>cvIEOP})&J|i2VMX*=cMH>-8YTkal_c}52e`8pRGRB`r3?0 zMoQYU+H{K4cDf~Fu!Lz=LZ|D`&svV7HipEC#oInhwL7caA<0`r^sQwW-~H~{HpfiA zLO3t1R?X*&on%~uPXhHDypVB)slkuMu89A$BJ&S`NT(tkiVRee4U7a}$xvgA9R63j z>MI}muk&~hq@-8zZxt$ybn#nwK*et^s1C{-he9fd>e=DrSgXo%Qr`-3kpob!t3x3r z5}bsT-}2^K&5ZL66oIdjJ4W|4UIRR>IW7vOm(tRk885;Ww}iO-hPs;C5z{#iX0n?U zlcHonGYlV&s;09|;_=qe?3y)TXYn`i=F1&$RJ@)#YizlsD$v9bW`y)Y@BKf$Ls-QA z>wo#c@$cQMsr6l`e&WXNtmN_jKpsQppY+4u{D!q0f71U>92srpC?D|ob|Jp%+sw3>JNIP>R{zAs z`&E_l=f8;_)8-P1O($Wlbyw=Vukbt}D~uq+c9SXmWZwqyvSvTqPh z0cVc8&g0J#Mb7@aqbKs=H$IHH^RL_s`^&{m(83}fLG5a6j55V&#>$eSw zm_4jjL4u4DRX5+Lo*EC8>8ob~#SPn72r!kaJi+}sJrK6dV%st;bx36929RtNZ`Ih_ z2*zHJgDBzN?A?^uzz_@KXXiMaZ?{Y%ePZDSVAtf2PJJFw>u+Htn8s$y(_;t6(x#wb z3RYd{irgb0q*RZsUkyymgoEr!GiIIha%Dv%L%uF{ec*!5p8 z;FpuG`n@9~I7GjsPJ;rh^d{dc$V9V%E z(b1QF3YJ_XQ2htMFZ`WrP{th17o%1l)Uecw_I?ucm&PEx2M6tek~L`_GX5^z=A}Qo z;Ng>j(44uzd`c0nz)CO>-Mt@#7fY@F{oS17BsoI!rkD2VI%l`hG+GaGe_?6L2L_AO7Rj`J4D%9~;|_sF^;8YdAaj zPREOPHMSP$RS@J5P5MtF(R5(pvTEh%*00$aupwC%MMz3ALy}L+QW{)8&Y{iB5#M{19li(HRMo2*@QAM;DLEM*#ms`dizxPDH z^VJGqJ!23wz^D|=87`AwJDT`B6wNqp{1~%*ROVVvP4}ZZ!&z~u| zGB9mV1N$oO+b0l{_wH$|Kzz}VijOX_N=+sewK^S!Uo z!Yh_%EXml!d#!xKQ*uI7IQZ~H9yr`kr4yCQVqpaS)l7I@8B`#+lR-vq{>42;A~_Gn zQP`ERWjwmKJI%GO4ggt8;3H_{BgYM6_Q=n$k;Dn~L`F z<%@gdou`|WM0*OwQ8YEoO#b)+!m!@#LR+REB|``&mL{>$pN9oW$>g%_0UCBlxiAXG z`dmeDlX|rAV8ss$m#&GjoG1hYC22;t%w?GwOw`r?ip=fxgYI=wHdDN*aX!zpx`l(U z)A~mB734!d4$cB z1xP|p_NI4gs-iI8`Y2pF6w?5vRniN(78p_z^OA*o-tt~uhUV$9K>FTUxg7`gk_rIE z$OIpf^WTCG-~WQ{&NdEnm?@(S(t8sK;>Cc+G8AHoW{Zcdix=U7DLSANG4BDZ4UUpH z<|dN6vpxP;`OLIDtM^p^5CaHH98TSOnemwo5m7nB7Q^g1Lxs|(^~T82v{>7D!3E#OUd|pdhKi??`KG}kAg-~bgh2_yHXqw!#tr~P&eJl zgN6vL$?Ef(wn57Vh>O^IgA-Ce`h7E>M`zLdHPwvb@a$@4RShi&zqc-zkOSe&@P76< z_q|wU4Sedhd$r}X^U~*ii~rj$Tnl5c3t3XUICUbv7{RDaFw3{%kI5GQ0PZ6m>ZMNu z(h(+{L!>m;0DF}1u$9#P<{1~1;>^NoS4(KEcLfg;&fmH+VrQJf^8S{Jm2(5v$q%=U zN-E!}fu_?L<4X>Em8&5QMd)Thl=4OEn6F0R)JntaHw3u^+tISSS+g_31Ud0VhT+gP zu}olGF*!TyZj4ru-r#Cr9t9D9=F~9OT&+PGPt2?}#c?@0y`2%a4{9Udm|-?_PkprR zm}fa3^G5j>JI}{ev`rwfdDFAHSo;YbSub%9j395f=#q1sz90(h#Ns`sH}{wMWc7FN zZdy*e58v4RoG$7GpggFs2PIniQtX^$aAingv*#|S5ZGdC;6}OYs~4jRx)>BAM@nH) zjAVk3qLTh}En`$ZxYu`hh`UUO?9bj>oUu{Xq6SwWfO%E13Gp2r5XU`Dnx(v599eEZ zI!vgJGGgA?x!BLC>7LA4aOG)#>!&b0k`>|dml5uO*Qi*EO2DX zlRDiy$sh>3bJ&~GMOPc7GcKQ=Ot{#^X)nkjL6;)V61dGzjqgu`5@2QezdM5ZpAM1w z_|(-W&ADbp=|CGTX(Q}$l{DO4Bih@YE|^pjX|V$V$^ZZ$pu#vgs`JBX8Muq`l%SU0 z!ApVRmuj4!jurud=99^h-|x@J{Z@>%$_yQ$W`l-WeErzbp@Fb5fE!l2bQx^4x}2b2 z>pCKvYD2q`XXi(EhF;3=imqFL%)gJC%#+_DhhAwa>ImcuoBk2G(bkJ5m?^y-3h>|* zVk)V7twVx*(NTE6S)Mp=Uq6&*3HJ>&t$jv2KYm7u@PFy2b($ur4fQpJqY3=hCF?*u z8W2XmdBxjDl)FLcwgZY)`{$DZGT5;K3#QL=t3gfNtBLj~KI$l4OKBO4rn^`F07z1e z{6%lH>dT|J0s%h(iDbUinpYm^Ru7aeo^H?I>-x_>2u^=a4uJ8$4()Z<4F8T7X;dy* z!2Y_+t|(p4(?2mPh4urxcE;W1ccq`(H?WR8K0co`48=4D=fa(&e2GMWz6AiMQ~=1I zw@}f<=nLhDRquBf-{2S+=z9BmfT~1vlk=vP*>>c~hgk11%xHgNE8_2IV?&RgkAMtDsqiOXVVIBnBs3F%4&Bve|=N zjAzc3jWh4V*MfXAh5ueY7U3eTC`i@-U_^m-0rp`_v+|{BSb~d2#B$OUzV*}FbnN5l zdjCohUXT5|p!JnSl1CT_6guIqmlaE2B!DVL)6Tr}^+{rTr!n=3=tRJcN@a}yxc&i&Nuc~w8^`##Iyygnr(-VQe){ckSRfe zSiTF>M#Y-?X{mWUs1kQeUJ7(3!DPF=Q54ZLDuhXdaNs|YxhmAK@T%XSolrJHdQ4ds z^OGm7nJK^E7aPLe{MC3ktcEka+lqbn^i8lVIW?wyiQ-XH#RW-x9T%F!qmYva;i#8e zbcvSI)qylTF@-=?_mg^uMVqB34pT>Df7T{~_*@w1iSbp~G=c&ON@o?*adZ+8@Dw(jdW zq;XX+CX=jZpymj5p-SB|jAAfDQQUo9F4mQhKvfhRh#N+29NX4AxY%F)WX^I53WdXK z`1F@6VkyaDyT}3R_iWOOY$JUXdi~q!IwawGoK|ia@g}fgen7B5$i*Z7IDLJ?G#En} zw}51(rrjjneB3!cs19h(g6cLr-BrL2TTi0Kc#+M8`F4mfu9U`v1e*RToyp5ZRTo3a zf-q1+FzZXL4nFXFaL6Eg!P*LA&T8Iu+_i^Pwlk5a7gcmMG5a7Q)^={&$nx)inSpau zTz%*8?#b);gp-Q@El5a@8Ic#dXALOY2M&dtS;;z=1R+P9wxg?l(VK^oi23#Wm3NeD50TR2ywu98K+C-ml*<^v?=_q{5N~|JxsCOC|22hs^HymD0 z^mNL6y$}6btWa(Ubw`eXTN71Y@Ds|XCUQi<0S5m7dL|xd`<~e;2%cw7+3&JZvVt~4 zs0`x`qXzpPaZ&DuS9PhF6@Vp#g(zafqX4#&Y)}3w`5AH(gt*9))q6QU{%8%EsY68c+_(g43rri_} zvr12w95XvmlnC)3)g{9}YH!vapAuY+Zfh>>ltUgd~BQMIBpRiN(CGY z7A7?_Tqe!16QISBkvcuOW1A^454^MZuO0jk<#>w%GOpAB7y6mhhUA6W3E7(6i= zNze%i1E9gO6B5#^#`8)la-5yYL{ZGnLHTfi{1GR#`dN7NFHY*+127xg&uB_dPE6X; zTtZKHn?+=WMw@D&#tUD{Us`1bRhzj~n^J-(qOwm~;Hm$DWC0ijt~PB=rLY5khk=Wr z@CH>*Fn^LvG-XH$RcfT91u-|5iUI)~rYi#p%t^%%2XG){oC48P*`w-ws^E^>l}Y0& zLc7gCty~F!#f8!Yi~noxTJSUl81^8+cNvOAOn)ofQIapavuS$s=MxDk*aP~qRFn={ zfNgSm2Gbc4WGl@pG&>^x%jA3-`+VwzrZQHrqQq8x(VJB-73rM1AQvwct|@Y6Q{7au zVodxVJ!-j$f*>;aU?heNdLze5iKzS(z3at~?MV~4=j?0`b*@yWJP&;QuL4p7nsEl+ zv%$}N*->6X&-$s*QGrRrx+HKcmFL=jq0%4Cv4Sd!+4Tmy25BURX%}Ds#R1*4YqQI_ zTygw{WV(?c@<#vyH~^lbj7uBmNfHav4dhbnChDTEu{YFF<%A3sRbX0SZxj#=Vn_*M z*U=oW8xfnTw(v9C0 zxP~tg`+V9CTAXM2wH}sJ2HwO(mmtm|2O(}=j*6v( z0k%Fe3d_az5+bTE55^yteZ=oOfM=>R8S}DR7=2o)3oG=J%Y~Wfno^}Ybdt*S5HZ{9 zXdw%5v;Y7MAo}2q3m^i7sD6onqR|pYE!Pm8$>m&j300Qz0Szu}?DnFxV0hs6CyaS4 zEY?S=$zcirblLB1h9;xiKc}KBa zF8%4qOi{_ExF+Yn8y;N%ZPhO7dpwce3VeiXB?2^|@hnQjZ#F1nMX#40-KNB;THYb!ptS&|P5Ssq=cBLsS9r9k12^qe z#VgK?XKUA$9!`A>L^I*oWK4Q3%zWd%Y6W79_b=_e7@zeWMo#?rn_%PJ`|LlvFhL{v z;mZr9wr|}$nYONf+`pbUl71H5B#P20qNyapXT9|pm^lPx*h#XD(?9-@U;nk`B^7*} zrSxMsY6+C-BmGd9XNGLA`B-sU#ZHn$-0q_@y12Iea^xfdb@TG6+~R_OYNSmSKQQ57 z40C=<#XF?i6r=NM=T@!6{R7y`OPmVP`Obz${~~gwUJhiamOo{&exdwOMOc$5AuoEl z#Qnk}reI1l_Jhr@pc7Btkvd9GAw6nR`S*I;p{=r!=sCSZEE_ye1g7xQtYsP}bL z(Yq=eX^>7`@w%?H>`*VsK3h98n*yOYrLs5Pdi`_6Q90Kkli>5{Inz{Y7xX)fYnJPX zJAaw2<5nKp(qAM>#_z?{$(Atlb^p8%vSg#8o@>ol-9hwQ%AY@>=Hu^w0H4wP3jF!jTel8R)gW@i@3+p=@fMM>E$1o@2dBZ z&6x~uI^Y-)%D~eP%=UMmKI_h2w6sy$Z|Q`g4KH*Gi0g>jT|)7L>x?edRb7WE}SxtrNW^F zNI}Y$wH`1w(+*>2&I9xXH#6w#Wg45I&f2jV0yqQ`by3Q9W_a}1hMSTFaCiKsJ>Ix) z&{=!;xw?({D6sW<&V^&4?Ol863j5od6Fz${lhb@jy=8HU(NYiVJi`+WMN?ed({_Y? z>kF<)J(7{s$AnhEN3jTRbD>aue@}I0d1PPR6BNNXrK--nc{Pc{MFwU$82l4ug|S8x zIg71QrEH2;*eI^Vx@cMXQEMr3l2qd*fnaw<%Qi4-$yB$wd*!Nb4NKOo<-~p^lDdLU zZ=kIn$MW1^_G4UkU{#~~bxPh3pMQ9Dd+Z5X9|uk(l{G1cH`Ob)Sa+r|>jYrYqzqBU zg?5&Gl&$W%>iXJ8-$7X4+$ov0qKkON4M6kCLLz~C*mjbSKBP)q@KA-ngWC`#M4nNQ z&qUE##Ge#_8eqa@;tWJ}voI8+a&?;{L9{BQjAuZu>LlF;sG$fQx2W0u8d0L(HJ0Zv zm7>Xo*6(BG5f}(=+-hvfm7uB98;b~*TWJxgO*}pUG*N0VGU<=aHn8nUd{?V7s~b)n z=)_RrJ7a~0!q5R0ReFdW#-jQ#5T~@}z zTO2RR*v2hptL3P452Db!4L!NaG+@$fvZm+_Ph-7HEh1gJ}alnwX(+H4&IoVd* z9!$N*&07)>;;1Y#qR<(duI2qy;(^oNf+#Xob$2AmyISUVL6P^Wixa)Xb7hH{@I=L) zA$ELzsjNS3TZ8SkhR5X*d62BKXLJmbCIs)NFXna${X8~OXXZVIe&On*w~Hw-tfy4& zWr!MXWl(N#j05MsSI)kn(b0j{W^PN$2+(da9f|Tqdy=jsiw+jWf8PxMwa^hWt83Om zYc~bLibazWJWP6ETSO7{?2F+rTbyZ2XFlZ_ zIJW+v-1(Fo@XC-wi-JQdxKZyjjM%9==em>dH$usdwBAL#+OQ#+hNHh*-aQS=o|FdC z)k{_oS=PceLGuQ*n+2B(pH$rtK0saEMsU#sRf^SVUB zh6{2UYbz+Xoz5;t{DM}~?b-Wa<_*vj$rdHb72c&{sA;8^_%>H=v)b@;DsXcqYrPjb zFLuMjBD_MWeT$C9@Lo>>V)QGo!MlZcW3oOzs$!@L{ZKE$ZRLp!5yCZL6F2KjLRo1D zVq+2e1!zGLSttKj8fE(^U9B%|PHUT#x`_(doLvsy#OFr;p z#k*;2<+e2Q-B{7_)UbOD-m(?A_c>oSZWUI{7+0cXU)M&ZJB9n%KBLr(=9(yXaQ@Rk zEQmH81Y1ObJ-XY^$dA^F0!^DYlmTwZVhGpRVT-N&?maw5W~ORl)j0+(AgU{aK*N9^ z3qH@%>`1CKt!49~~f_t=# z%{CuP5(Fc*w7g^cQRmhbUMKFXir!u?aQ9023_6D?sfszawt zDbYbJV(d^Y>n2jSanGm(FDDIvHhK%Fz1;hlS9ZT`TW0!}3uC2Jlvt?3sZN(pMa%(v zmFm9uD>*bz@wJF*xN}C7{O|rwO^DWFT1B+p`4?NzAlOexJdXlj(eUJs^^v2whg=rt z^qx@vfEq`oE7F$84api3Pg5V1aNO@o<9Fj~LwFlBP zdH?xewE8~HO)vlDNN&t&07 zgG{&;Q+fw;qEGI_=f!@pGKSU!kmRBN)^-wKdJ7m!`)AL+`4 zDq*w?70EkzcX^?%t3%W!gD6`6q#%>?+Fxja%=iIaHH=XG+YyE=QwNt19*m6vNIK}Gm6H%u|04rHw_^b&T)q_mGtK6lBtf@Z=0|n zEUUyYZMD*@w@g!FVs5@CDQi4;!4ZGQLa>I*_ezDM@gSKW$W8TH+46EXldpHgJ&JCNXaWgI4z1X=b@m{g4d>enBFL zgeD6*f{@4Hv2p%f&MVKN!>YuL7JR{Hg^AG7_qJpU(WIwth7+=h6JYd1o|Tl|VXqYb z10c=ssr(CkqV!$tW_2;FM~Ac7Aj=eFEQ(H6oiy$=15nily1?Z#LE#&%^+{%DXHqDl zE;$we2VJW0cO+*VQw0B-8{!4&1n!UN$k)=I7Lxzu;N@?;qzncK*b||=(w#whK?ML1 zOZ5zwLz8go*Jg6wR7j;-KZ+guoQ6~;XV+j>JgyQ@_F4%Wxy2l~mN-LT6RwCX3Q3FC zUI=TbEkOd31ZlGEElZ<*NZS*!{PLSaB4!?Ohmm7PQP~sYDRY>FHy6J-Fi{7FYw}78E?<820JKyusYX&<-Jn;Aotu0x#)?>=m=vbN5_on6LWyN6abYnKKH#4^(QTA@D+ zKg9E*grjEN2dl16zl_-rn7sB^`@F`8-MT#JVEkbxFtrO&fD51}$`RoHwox4ND}DTWG#M&VVo-o3T*hJ@UKigo_D4=my4EAShD9wn{^{~VOX`pma^x97wweBxlR}oemd)*8M_T(A-JvP@wLhf zwr3rL4=Lur{!%$~O;be(l%s-fVpj?Jb~G!Wuvk7#k`?L}OA+l{t+~F_=ECLWeISn0 zH!oklrp+&p8rvGC77ggg^q-#~`k8;xK(sk%qgaZ`=6z#7M(7w!oI-1GQY)!L9nk72 zK^rD8rB~&*m%x`lq44O|`uUU6&^{F=e414ELPdQnEaobQ{8ez9PfSLqP=%VICK^}%Ie|l zZtpz9&6BxOo&otXNiT&E2L>P8iX-YKWVqRs9LYFL;dilAE!DR*rfbh;?uq3KYG~Ok zm5h~pahQ>%4W5uP6HRY;3yp(QH#q@&E2c z#+hO7`dS;_Myph089X*=g#MeZFH6c6Vl*$m^_O>UMP_SRSMsqDRgYI`0;2=I@lzdi z>D2XBeb>V>NFwwmi%%lftHaTyK=0jXd7n;vkt;84Ei4vX98Fp{nUs7K12j6I`!}J} z^y`49EK`;PJSAtUScTYR{EEPkf{-fe;+CBnhoCYQG zW<*-1FWW>(XL)K2GagoG?B*A)rJs8K>gt9Y8-l(-xQ1+aoRHug)qx95#UpP>)fBr1 zT^GD}lUqOvqqInd8`epiYNg3QsXghsxl;JePzAA-eY%0cpv0*4Sd70(FP;nP#@{1* zL{@-is?%H4&6v-^^i&YI)}2dhq9A#UxiH8I8sTs{{&-}9YX zQ+L76VKcEjW~wX1jZM`W!lY#|4TT-YZFsq?{m`DPJX5>1G zDg2vn`J&_Ae%2b}3%L-d7%sVZy1Q7E?4Oo$`P~#7`qeJbhs4c@OOI+1q(%`Og5-RJ zi$z~XRE1l|Du@vs(EkTs?)-+?jDRK>5ML0C&n&|tZ%^;qq?f){`kVq=w@T@R|AlNX zJ<)rM%jij!Akk_Af?TXo3qRn(+oAWSqD~t&Hrb@uTc`oKIQMS8v}0bK4T8OFA@9&@ zlA~`%UT_(w(AXgojL>-NoOl+k51$;gB}(QZ#9URFO2p0YbN197D3Ay!7a?X>-~hOX z8>4Cv75K5B8wls;!bx54V-P4%ibnTB3as@9(PG>wFFir=@$kzc5@OuHArzHtqavZgq4L7~`KL=m<5+EytX2Z1 z?T0sgQ_O)(36qPnann->>L+g)>VyQ2+qaKP*^(r_<7x|C6Tax4*x`aAqF>d8XSLQQ z!py<^>8v!zu-aCW>0JV{c~)~=c?Nm>a@A5JU7lTXE1I)lNj*X+CrDXrgqJ#}$Y$u? z1Kdj%0-hf7V(c21rIvLbXcGj_&wv>{G=*=h#Ibo@>|ElszNY2=j0@UTdzS21$XneH zLtrA&STyzeRL_OPMll!_k3CdhT5JFujEXwI#$wVgYm+m6N9F&OBq>=&d5@+<69QY} z){@dIp?IPGiXUKOkIUi?PZZD@x=6~cs0$8@Op<@|uEJJCq7-b|>C^H1^M!G+2LtfQ z?Cy{UH-kGiMLhM`?(HwXhmDmknx8eZ_}mbh2RBxeSm<*4nC5Q%7G}C7l$MQM5mLuv z^A4=0IM`fMAmfjOJ;^jY8KmHou_#U3V|}4(i}Q$#C9>lNHtOJ|s?*P@_r>21Tn^2t z5n}hXRy=eyG0*}^!MGklc&;}HGJyZ*3HGQs88Z$*{FY<&r1}{yNXcB{P4VS`vfDd=+GPYtf4Y3iH zoa3fz`7@fXak~;$mS;`Cy$bUo2Z6`CeTi(lSc{}voAoew2_`O|H=$e$t`qocPvx|DnDRAMiFpPHio1Jx%X2$RY-;14a-^Cl)*DE9U%1S+ z9U_=I7jN%g!6|8N5U1_d#w8+MR3yPtDQ-qvBseCB5nPoJ>b8L3GF*(E-YwUomsiW& zmx$;82QYZlC-K%T8-|!%$EOycji=YiDQxmStG+1KYO#<_&F?o%R>QPW5#$gEZ@IL& zDknZN_%f*3$5U7rR4I6hTE{G_Z|eVwV9BbZ;^kpiHY9G*v2rByxcH5@_tDR&P`DeF$lO$&Hl$IT_I73FQ{hP2|bA7bxx3OqE3 zyF83mLt~PFxHXkRV062wZ|J}|X&D5P`C!}5mcLvc?&vd^6__AlIYxcfFS&yrjAou8 zBzIyRsQU|JO?oS8S(h3f4$`=DL3XMU2`jbNMvptFw}>4BAXCV%x}gc2^CU&=B*he8 z$|bDU)oWjkAWzEi;&G%_h^t>!UBkv$F$Dg|U>wI1&W<*aZd)@4{ia~to^A0$gs2G+ z;)y4QsQF-dLG6z16DC<~&t>se@zFl^<+m*;=o=h7(J!L49Pv-WzCQI9BrHhQBqeRS zy~cBkp6IMNX&E0lQwphX3}U|5j`r;Rp0GtXWVjie$urPNi~G6HKW-Hw+@?n@aZ+hd zpNY=!M4Al$jPPa>hGf6`5YMp^DO_2uXv!Rf?H&>t-#A%0>5S~tRA#}b3%L z@9pmI+u1ip+Z*kUt5W{~G_YP0`95(Fcc8dsA)K%(c-(22m%%&9#^8LQU$I~s;|W<2 zLkBNyo&BGHzf+~or1FXSQ_UMe8|uavcF%%;%`RvFO<~~s+3Man%FMR*P>v&EVKwmM zbBP04VDQD`5ab7e6kjdXVm&i%l0zI05RG`zp~>L$V^>X0G&X&J|c z*y6nI0-0&5IIo~}R@{U?V#Wh=l|&7u(Xj--Y@i#j zgvrRZe`a|^5VCp<)uX`4Xk;CECKDk~j%h$5d#E62h0Bo)0*4GIyp^0h$45{4@8tyH zc1)1{XZ{QaEa-{RyIzN*kFXxJWg}hcZS+S<@1l$yAF7q?J;rFz0Sh*Q#Y3#@Lge-=K3=ySJ););i1Wci#J0CCS_i) zU;j>m*pk3+I)vo9XSdWVLEETJ{(QQYdngIos}YWn(EK3zb4Tj>%(CQaeI!2RS70Ov z7Zdy;!z1ivkNSW<@57DU4>rv()qZQ$-0+>CvQM5mH9%YFlf1DR=v_y|ZQof`Y~%D# zbu8k?rqL6I7D5?-Q2zAULGTo!P!|TMr~p+Im;nIjO1U}l0pm_-hS$;B#T4hUy(xq) zOU$eg9_C<)kzULar6Zt!$Mmb=qmM~@{~^yiqvU69P8<~lxa|UgBn|Zq3iP7CF4SCB zO#8l-l$}Q4yo_zH?SiIyN3?v9U5L)7YE@b++j&<^aMGF|+_=0vH(jY3Hv?6Km#S1G zuRcnYTf^0I+e&Cy`jcyLYx=8v=6{C&^Bn$vum~D{8Mmd#oyJL=f2UC?(|uAzP6A9b z=l7(4NlqWp&?%FSXBDs3{oesz>{bM@%=*Y3@?7};pGOIF&eXUdkngwe7o|q4Mx#6l&)_AIAEM#GfZu@zl|(SVowT9e-=fzxN&(g%DIQO6uchx zJsb{HYA;t@3sX1SwnBbEmw(Y_9UWs_dTukOI(VohC_uNYY7-Ql`a7!R{CjUwmor)i zvGD+xkos|N1oMIGbMO`A1LuLlv&lnOoqqrXS5$`chg(!Ga1AQH z$onO7#k3|x)S8voSX!VzCisY9M@@QXF^ummJ9~oz3LV^bS&GY%yRyWpX$pcUsXnHY zR~|ZPd6(uI`<&gmyPrd}h5>x`U=AXONhHbxM%o(lPWSuQ=X0u5F-*8|y_+s>@q|JG zrLYx=rqSLDRWaUgyXn6=TVv2VT$?0>A87(CgDSZJbWWETUnITuuZjm=kNN){6uwII z&{TT*z`xfJP(+(~)fu0}c`_iIK<`QYAHR%vY~)WiL45-K0dHpbE49c062j|4^1h$P zU$$e%*)FoN>tzMrcJ)ujud-d7Ht8IU(+~IPDYg%=B=UTYpgY%a!mNsLAeu^Qh}rTKzE79 zJ~c4Wm8RkSQ`mERJLkhbHUmdpgoOZdvEQ^@{;VlA;ACa`t%vmDny!r7cmTCGD_ES) z{E&g+6PC`eF6XQS-^&-SIt59zN*CT}FCuo3fJey{9;H_rq|sSYj_|s?#z&*$#|q~2 z@NAN#!GJ6vtia86u1%E}0y{87?XQk-&Zp4WS7w& zv=aVaBHY=;Kl{Fk-~Hu0^4J0Vn=D@y!I7c$u)t!&-odic{_x>PtA9YpR+@K-)mpB+ zdxRwpT7yXNO5EU@IB4x>z?}(pjFI0PvXyu@Q~-HG9rRTGeO7)l>;Cjz5|d5YM&zx? zOOS%;{4|-mj103PF1@d~EE_d}WB1GbUk+NOtu(B=^vCFyR|a|r+JQ6O5(o?s+8>B` zU6>-c&TCZo_Sy3|dE}ZER*`Fl-g8~P;2pzce+~jKqf#|5Xp|TkQ-JQyUF}u->-=GA z7ytH`>C)HLQ#Y1J(SL_G{~A^-p@)B}Cj(Bdmfq$MGriYCe?8ZXL zZ(8!0U|B@M%%;sdtORAaxCJ`drccfg7nu2!mFeMlLgxg$7}X~p=DwZ@oTuc z`3A6uVQ?cjS1d@7{{ZUWe?r+&aCwnLX&9}(bFe}b#^?v(J>+)0Y0X8>l)in8HEqla~Q9C+wC8;0YurDUv9g;ry zCH>j@A3(Fh3&$AKJ%@N15r+liEW5`Y=8C5cI=F4Ai~G2oN4!hAj>d~Dx~1Ag6Fbvd zgS(12hRSA&iMH3yKkV-SMdu$>T^O;`W&;m1FK zGfU4~!|D4sH>J7NzjX7fc01~1xxM^O5%1uqzX#!^l9l3*JKwfyZu}bg2xs%jpM9vF zx!_f?epYE>J2I=`VW=jX$8=wa*2qF@O=Z(n&L0-<{z4t_9)7L5x7GhKX#|%*mFM%> z+x)#(u~JLzm&;GHm#a;|Fn^oLT*4p)b_J(AuaqUuwOoFmd6|dQzWN`ba?C!9-V;(}AkP8=t1j0*QKJ#ZCYC z<|(`e^X0*3n>&da5}h6zO)MrZf@`rN`Z>5xSVi58H(_)^ zx(&N3C6O8h5YU@T-M?S_E$aDqdnY~>A@=5?;93sDJt4@RB;^$qd#==svpW8q(YK?; zvndDQ$!CRz1BDSaZ|zFY1tDea(ors55@}`$jJOk*{VkWzQn}p|-}yI3L;=jy(GhZF zEc77)2*Mh!AuHL=;1|G<2}-Gu%hR^c*vXRzWAh@sw5X3`yJcE8`&0?Wtongd67=4R zQsrHWH>q zEikoDU4L5t0@^HVo$H(?R?Di;1n>6V;1O%ua!Z!GRI1hokIMXL`Ul{P7=ZN&e=3eU z{^|QXq5dk*(Pu%w#mp;OcpmXuZx|??EJNM*{`b4B(tzj$yt~*468wkF!3ucL$G8dFsrtKQ45Pj_gSj>C2poJaPVj)x=HsT2uI1T3Ol-iDW|hH_N|qfq^-6-8F) zx+#XBE1B8?laoDR_F6$Mmm;Lmp(lqfm@i__qwc0-#RT=%O2&R|Lf@ zWX5iHvIrss=wmzLXD8ROBp=wXpCbcs^J==A!sl$`AAeQCj$(srKoY=(yY zNJRgK;L;k}XfmhR$MOo#8=V#1omXC~LaFsApL-=JWuo=u^WmlS) zQnPuTPu%1ttjmCwZm>I@REfqlByO^pR7?}H#Zj-L%u8SU*?Y$T_ri#7)%T`CRaQv@ zA#+<2x-L%#m!GP>0DPdE^PaZ*MH10)bQ}Qw2cWo=e+qkvwg0WKdZbDz6X zCXdxXKl%0E*FlUd=lCa23zY6d_NT&r(app67}&CAM-WwK)YV3(4xSAaglQOhl;6>W z6zccL;@d2q;Z|^S*Dj`B=_=mxq)?g6`t9NccASLM$HAPDKr*xt=&jWAs^8pXm3o;E z=52}r6`9Fy=ru@mb#;hFAPGQlyP)Rh0IQ|x>>f9MY7Hav=kJ37lQdZdDZ__P6E71m zHdlTfy_asf30o1G#_5wO_+Y1nxjTei6H}Rc*wwU2@3b?WF01zJh)X+<*c8$euu--O zOhW31ygL$hYG4d%Dr>50VF~2!4=(%wEZJRIY%To3t65p?5Mog2s_ugqXH=-6fUYSL z({_y3m>$4{TnJ&CGr@9Mx{L;FeKH2q+A(}a@l@F&nzr*pej}Se>xZ~_qqZe&zZ1jB4}UzD0veixwze+@ZL;6C4sG6!#Q&cPLQYp-7QH&=$8s zfnvqookFodp;(LbqZh+bs@A^On{rqNa#WJ#s|%Lm51&pi4UJo^=2mKpUhY5Hk7a} zXf>#zfHZSs!5Fcay&eXo->2)qq(_z5Ru>G@-8urOpWm#art*EJgim zU%wfNqM)`PY#_Hw7i{vk0W)gd&yJuBk{=ZeG*mDo1hkLu_79&-HQ{A&XuvbUCTavqeG;*)HF<*Ayz1{L)_J4D`9xo{(m)48v4A6N zALv5xDI+e~R_0)}@e%k2^~~Jy)m+<$921CM>Ib*ca@kyF!JK1`)`&&`rjMe%xjj^8 z&8IBCQa;b1Hd`m)2KT4y1WYS=fP8GO7NK>i7dpMMSSFU0{+{{^o(_l3b%lTZi^G?k z2DF-nZvOoYfxtPDy^oCi3*{M{wo93ad4TIgHL#Uj+_JT`QHZi2fWlyZp(Z-e*TOaj zRbkejwytX2Op&^-?k(-;Z>qCw=iZZw2S(q}5#4)?%@&E1eG+AeU|u zTE{31>hcHhqt%DLtSsC=h&+2fub^32J`y4XDN9if;IN}>bVW}1$WOI4mthfdDu_L> zvL*?9Xj-&6ztRw=%%zFd|5S%)t>uI9I6Ja4zABtE`LtJ@(;}MUr6gnTo5vD3vjGc7f z1JQk_?~0U2_11oBU(odZ7r&5;{c@%P^O(Tn?>L(1U-^*U^7QH4jfY^tB`V4wabGXM z@##31+Ku2O1r1#imS%#AeMIYR!O4Jt!8^FfB}g-+B>MLV+pGqh#tqd$uF9@f2gbeI zS=EetNiHQG3+xcjt>%0xhxy#i^%_ET5wXJy>Qo}1FB8NKvo=VXe*6AwuH)drwN|>d zCj>SwE4WwEWh!MNLfTWN=9^q5G87)SC2;-N)UG!%}2Ae~25 zU)R*Kz8WFnYj~#N1>$y=a-_}MDPTOYHo!0=e8!-|TL|_GKF3Zhs8w^B-uwCCz4D}5-9{p#Bp#Kci5{Z`)GPCs zQ2|rmS;wfjN!~(p|3p#?oNO*T3tR;y z-S#=DG8pnkul6F@HaM-q(wXVaihiz93I=9+;*^ZA-N|*II-IJTWD=;oW}I)gu@v%@ z1L8t^#+888dPW(52z8@7uog_s;zQ?3-els!MI~A?{kQnQ)|C}D$5(cOJP!4N0o%4Y zjHCvJ#z;&CgJ=#v2RPmo(y3pq7Fk-!F+3t?U_i_%fUoIbAsyaFjRR&BKS}VM1Idei0GYes}g=MtRxux$|m8hN^yo z4$0#%Rc5|cuCcEy++7*hW@~F3>n$KzSpdMeU}-C3TF%}&OviheH24M~^dz9)Pfd9O z)`#{H6!_NaOLDcDD)-6tL&GOV*$=?xN=XIQg{W(d@TNpQS4%{5gN}&hO5|*tSCQr_qk~je#wc}M8)^H&pSb=V(uRuHA>gO_nqa&_imUd>3SZ27+ zp)RF@ka+CKfentqGr$<$Y&UWZIh!>=)@$ycx3qaz=<_blr!}WP*fP&c6Z|*kPVM}> z$9C6>^vxNE$cR?Hswyta%H}_PXiAByVybkX=POZ=EwgW!WI>I; zsaAkZ!G|C^Vl>o5#?-ZHAeF~iM|sQnnnu>?iQScRfm$>C+m4x<-rHwyz3G>-Jp7PB zXY^`Ar3uX(aqX0Tv`Gh?MnN4DLSo9F2<8BfvAks~MPS~wm$q}efTWmfk`4A!qvppk z*x0Mlgh)AR)vR<-ak#A!VDU`mi0(7!YiSXakurSAH)-sro39;;VeUCo?Ii<<5beB_O>q1mn=IUgD!?s- zl@da6=K6|(jwyWr%k8hn4i-+A=4bFbh^nub%R$M7#fhKasOxW~1U%J{6VFFmlS=)| zkvhZB+8WZQTM+kd9rFH8x}{c026q)!?_C?OYRM zc4K39e{uaRHn};)h?~|q8SiLy3Gv9Gh1Tb|!TXWGR9ag6T^4W%{>KAZZLuC-i~1?P z7(C}i6v?S+EsJ2bcwaexM%jNpx*CGzn&q7-DO506i42tdeaQhvsrE%vF*e`)2 z)0{wkQkzpWb#uMTB+=nSy?Axk?@!>yj^ljUjXeM3hI9y!!(HnI?>Jc3TW{q~x*SbS z>74g;@de?$d5|$H+P3rsa$q=!KH56#`TeWwl7tU0brLr<2L+w;)#_}#Ky9>mjNxQI zEmS`+Z(ql3ue^3Ob>C|aCHhV`mWawIuhLQ&6YS%uKKRa( z)FLWj9I7S2j6=90m@)zmaL32{buvGL^?RVeTFc7+^{j%y`VggoqKo6}t?o70XW!7y zqwN+L?wl!skbqH0Hs<~2l?mUc?v5(a%xWcfi*nlo2w2D)6Pp$nRUyn<-q^tSxiB1PuJw5L6Onp|p&w-~IzTh@0fRyv$K83-CF za@s=Tzk~*8D;$AYDQyG`7~`NR0s+%hO<;4Y%idB zB{9$5e;z)MoZ7aj+92a-g0hKIhbN9_H@bT2A3G%*zy^S>x`{#@5nhLSI@P(c6khd% zta88JbxTY+8XCLPAPEG*5v1Iss;{t)QNLF?VCDaOVy!bpEdIQDSTGuE&EENCvT)rW zfFv)ac`x6CwDxIhVFRCdKF^xe~3SGzSMC<;#ZmN8n=Jy8>uux%d z9qAS*e4JKG1#|`b7=Dn5-_2__q@G3)JE<#QyhNB^`-!Z7=-HtE)CpC%7Nsmbv_c9R zlCSmOt@!qPJScDZ#BE>KNGI=6_5XYeEc21w+VmQV_jO~QK@~z!gFk;XR z+Rw2_KXz=V@caaONj@QSm)1JA5_Ni0 zbgj;H&lWTp7RQ>AZ|(W)%yE-w^olBch4szK$KNs;uFod8IZ~I=B70$ z8z1G7EmKmEMmnA={QL;x}B}1@TX3e!Mf52|s!_5F~cx`@XWD zoc&CT!>@BUO0Oc$VK~pb068kEzjbZD+_^#n3>2|(Ci*@0;Pt!t*yEelhZ~T?>}X!uh240PZd+HT z?WES}MM{<6M6~d+i#Qvz!gw-P)7OSskyL|skCRwUEt{x2)1NxLhfdsF@uqduYI$By zGg`vHDM?oKCy4Q06pr~?1sZ41*JjatBT$g6aK318d1svshcT#?+S$usSw#X&O*`Z2 z?oA$r45`CXYEO1_PSL1sFxYSXpxiFmmn*qOJA3+^4kV-3CRYRhJs|!jRjo`$ z{muVYj4gGU{KY|p;I8H4&njsZYK9u3|HM`N>E9AEtE?{XyV$cC4b!;7C@DCbKK$JH-3z@o!@6*(}$w=Zpi3t$WD zn;?>@zc5$ia=Tp&|**S zG#K6tXEO0KtQ~g?`A8sIMIad{(|xK6F?csJ(wP39St8Hr=L4aiZ{b6mnBy|!m7$e4 zF35E9>(+B13)cxxReB>IOrg~XA8)&Z14jrp9>C-zSk*u@W*~)}h^J$QG{dcH>zrRt zm5XTtV=-sn5Hu}%k439DIA5p+_Z+m!%=Q(C+kPkh z-RWcf#O@%uK}0TrYw0KX-4ieJzTV$5Oopw^_3#8`CbSZ=!FciU`dKN_fo3dO&W4C_ zE^YRES_5gy!`Ry`0wV0ZXORoZjay&>m(s&&4 zc~A^;Qwf{1Hwo@l_@e5Bw)~rX14J=66lm;qs5Edvzb0l|#G9n49CoMzH`9MJu2dmg zMhhEs9>^{hO*rj%`N);Ya5HjGXzcZF_l^JF`U$2N4ZZ~(6_+1R)F>J>Ub~DG-ZVZ9 zsE+qFXj0cNid@iduT`2UgB5_pF$)bX8A9)nCAY=X$;TFsz|co}W{JQmP+?+i6$m(y z2(qn&z<^4XIQrw7q%`{+Vpi=S!9cxtBF?M+BuF`U`^c41>kq$P0FXU|RE2Cy1>+CpHs@L&W`OLs_seJ`GN@pL6P zSh7}mKisLLt4khsxSu-Go5`W?NXWsk|Fk`$c3^LOy+EE1W-wDKDn1CMaTXYPLCY3s z{$tZ;zN>>S`hy<^m6v#qo(gmx_QPluF=orKinLzNqwH2pOHT^@va-qQQZ<{5lpqpa zfo;L1H0x#U-@+$<0BOa~1K;{RUASq2PE`rd)+A~UiPs}lb!={CzO}Dk6~5h)b$Zk5 zU6=uHhtl5+k9gSC_y%^U%m`DaudyPtHZP<%=FzG51w?BodkAu zK}V>JAT5=~#`eYe`b<{3T0}W_D%yD~mxbX@8d^y=)$RjYBR@jPqWPBN&HC?nvx|&t zwQ3bBA&KH%s%7m<+I7&}_z2k}Sa>cq?Wij+t2XxogEwYDDO`r!E_*;ilq(o{qlC_N z26c$?Cbl(`5->1>eH$r=piBF1R-@^jNY{wYu~N+H`c3@@<09X9O^^X>p29!~HRH!S z!9-!a5`EL|ouPZy_1RKqZZ5Buvz!c_n`G0}8|dar!_< zM_R8&$>Q?axk0Tu)gAD)`f-6NaE=EI0Kp&hx**=MXtf0bnt9%6aHrVUtxYWhviT8) zb{AxER(>#Pw%W%Re*iHh)vKl{JZBt?dZg@}m`4u!{@_ua@P#g=at|~OMl3GIz|h$2 zVUT$P1d_&_mx&7LOoU&>>tk(CxmQQ}nN<-vb|){wn?7=b{s2DJT-uq0>&72qUw%E0 zI&jro*O_T+foD33-+tA5hoxdBWP!a)q@3N_Y~pbyq|{1L|Y-%R>%7A?8*FGN4x zOsx`yk!nO~7v|&2I9ahk4w?qK9GxIRZ+z1PV`IOyxZ{ElGH@XOs$&g5ez9ufXZ5-= zCVAMegY_BZ(ha@r!k4Dt{-koLTP$n*>A3JrX`)l-q`H(n@kuT_C!+(ZP?jYjC{mPy z2!SzC*ywLAlIab)v37e_jPaA!dD92WCfOIEMDV7=H}<=*H*9(9I&E=#71Ek&0DtI4 zJ9)g7QW{5vLQFYg0t3oab2aQ?I)G+J4J;H@j)SD;=OL#fnFzxC)~u0vgt0fDxA9C- zZ(Y;oPu2b(+%bo(mENX5AYPTtzyu!-Iy=cewm_vvjw$EWj>AlMyFo)kJpmkDxq@Z} zx61rl50BqTz6nG4Ly;>3NV8RIcJ3Z9+>>~=lHj_)JbCJi|%IUFk^`b$$b~QT;vQbN{``RX8 z!xO)jT0_f9pU8YU!@D$CX<(qh;9PH;G=nn*=jf|7Iwwt8z&5P+v4fs(1FU?7{KHhn7M`arq) z=<63)t*xoP_W@(F{&9AeTmDn7sSSs2RpZgIo)tbMR&55`mMaNYg1uVDSo&(cN=ZDmpuqx$XT@VHsObpgxLJb1|EC4BgWp{zfDy{t%>_5!y zHZ(uwStFT7DcGNucxJpDJbmH*TcsTZn$SZrjEKW#D|~h`Cl9s5g#!dJ&kr2plTVTK;)E)9*Ic7s1$Dn9hsRAlypxO0@XWeZsJfC zj)j{zhn;Ak(u6#|JX`B6-k9`oqm>zp5Fv43p!CU1RdHriEFGb8A3nNV^{3#1_3Wo! zd0Ax5ID6cxIe4xogNNnH37s9|fgnZiz>=B7s)I~QHC1(+RKvAk^KrxN$0;~8M3 z%v)XKK%zxRpjgMDbpF>)1ZjssERu4iX(3anhl34_W}yS1{_Z6wiE7?@IDZmUWv@beDL)D&u4yA->csKU|(S_qk-G>ow4Ql zt}j8`1LIm!OWE*r%o3+|j~Ta=)Y;(p_+6wZ9ar$~|VL3$x>qMEoSPAOqdSf=|FYJS}9v=3zv3iEMe$fSLBfO4H=YB-A zh}kE#VMoY1yqFjL?`6@eSWxoY894xeIo_?WUR9`%e;lIUvW2 z^wn_r-9d5u4aT8WLY_lr%!}Bc!F!YD?WWYh%sOM&vsR}y6|~vXW(*D- zBKRX|9gh(MTP`cM^l%_Kfx!^x9F@>6>i)-Lmo?4xdV$9Zjx&iOvb6M3bQ66!RT}gi z#bD1Gi1%}(_U2HLHWJYI42flox`+xcDMG>=N$!hZI9B|FbwnPD_PU7j|8__e4VK(N zqt6-hlD~J^zIjm=n6l@UFYa~C+DGBWNc6;T8xWv^bp}zfOP?mDe?$Ey#J%t>=8ILW z;nd#0z)v0B{@9*JyffD$T!#pobT)~|<)FC}(AOiJ;Y7c_*%sOP;U8-WYKzHN67CLl zcms0GI_|7#PYejO>`@r;JPjUUZNUJ`_2h1j%SWRxLH~j*j{n6X1}*$WFV4H9Z^k8F znN(a1Z3=a0hwyD&X^Wi=N-R`jV?I0SGjh2Egl`Mhvu6J; z2xum!?5B>aDKJubm`2$A4ztUcNaCW`btP&}koPVq`cwBQNP!cU|SA9X*{4gc7& zB?t`K<;CKdB=(l=-dKja%n3Y6j>1gJ(cJ8Gj8Y=*vGNanz9~I?r{Exxv|YYXp7$Q& zYjCVLu~M4%GQvP#w~DXAK(Rh;p}dqUROFiHfhPrJLI3>qH0#Df$xMKqCm=QT8&H5p zCQJyLh0aG##I8zXfk>A5eYg>~3NBLcfzg=i4tvw8TDu8`GqR7mY7tHUK? z#@!U?DBAv+j^g07q$0y=f^s`h9wPIUof4G*>=-S5I{*i#4qBC(J zx%}!Xo1!iq*7lqKu~xbu%N6OJq5&f9)Xu^!kHR&94+(D66`xrA3>uDR2ZdsD7N1yl zb6kNr^&wTlm3C^sRHv_tpYD94K((rN2~3NPiUN!K$hBYhRX5}E+vp0P{oj=1ePdHeAZ=yrcuz z%#^lXO#GA`6>3Gf%C#+(_Xj|M7>D)I)@;s{_X?n8Jh&`9z04unmLeR@8J;eCp9AY^ z6&jG}gdFD;KzN$jI|`r&z>o9cvoS?pNNm)7<`yY?E)kBKvNX;`3}fR`45Ta$XPf7M z_i|Cb9xZ1?d{9i^rEyrIKE3WoH~%$EbOPUIwK2oj@)wp1hhe*0ym9RG`}CSE30}=+ z<3gP;^=yJwI7AQSq5=PhA6i$Ju>=B~d8L~T6wjj(69Oxs@qXK6(J%EF%+wULcKU^+#!Vn)OEYM{Y{4NH~>Jc6?RMof! ztXKp?YvOjFtmfM{RlbWIrFDN}h0BXXg;MSc2ic&*_(~0ZP(D?W6v&1ziw7{)GzG0G z8(k1Zss2FM<@drq<~g$6A^wuTy%c!`X=F?gf$IMC2ob`uq-@>xoUu9l`EflshkX=B z^Bny<0z5Ra^zP5LAaij%Gd-!?qDO^jN&IUD=}vh`y}1e3-$j={&~93xnSq{2>7z9S zP8ZIUyBHJt_mf&O@bYC9hL;tfBEO!x2R@+~GKMY>jtBKy1X9yj=f|}ge>)pgs@)E< zl}hB(H7E2KMx1x@u2r8hp-*;SUZ3=6Y?rfs{P_of)Lx&5eiX`4BXndnK!W2u?||6FVkI<@%7jg9Y~a5J zGW35oke7orrKit!lBVLHckahl>J@Oshl)rDb{Jg_Ei01zhWD%ASz{i1(m|ijrp_m* zg^`j2Y){2I^M(^YrZ6G||JjK&|Dx_4f5|gzb%-!x=MnnL5K0`cN^JuNlf)Eq=c<&M z9cGN@sy5$g8imKD-8Q8CXV!0S5juJ6B+@KgG$=xPCwXm z!4)flU9a9Co3HP+I`*4w<&U?r!0_UWES*{0F+S3AgPeY+!hU zk&gyr@dE20ek#ePF}j!f=bolVBDAs*;)U!8>_HPufbT5^nH30nhS({Lef^$KK#Ck? z789nx@7R_ z{~q;AkF+pjg%%v*(fh66Lo1j++^HN0U`@Dr1ArijcVDa;?}`qXPd~t2^jH5m4IUCs zYF`Qvx0r+5X|?H9pvp2jNFj8ZRX$?|m6-O?R|QcIz1PA6#@O`2WH7?NB)pCfbH*w| zTi=3KY@qPyOK0#iLIx^=Ca5`Asd4v_J1fuF3lG zcAeO9TQhsSlxvj>Y{@QB7)sHrx@oL4L^G$(>0OnRASal3D@ehv7|>h zXl1XdSdfwV`5WXAZ1wqSi>1Gc3v#IcV}21838g4qHNb=}98AmZ4PksA;|`H?j0wS=YFHTr!@QFeFrb}DlH-iS_QYkUem?~}?4KBHrRfZhrOEaaC9{Wn%i0kS z8ePze4u_?Gd}?V{Q??d>KY7m^UuqtCXL<{@p%~kq{AVBezqkv~w~$4^H$MH#5Y;Wg zcYxgnL7y=JI5T+~_Pmse4SHGQ?wptgb6##bPxy!Y8_RBbL=9+foBHj=5QdiS%2vg1 zbEe*JM004{Y-sro`;7QGU%|ja(dVQnxE-uDH!(}c`h-y^Lm88;qU!HLaTS!O0!!_= zYygus^#>92y!P`S@u+WejxjSGFDd$78B2JuGgw^EFxuEclkmsJHGa1HH0y5{ukc7>#S&+qBz968LqFEWtl z*AsI@g{sbv8R3Rw%iUs@rVD-*$ZrZ0rpZT{8ctBe2=K5hu9%H#qDhYlYH!4G%y#<% z&S)gIZLo9auzURxY2Eu#k0KtXOAp&Alz{19mPI^HeGuC<-g#TD{#>Wre9R?#oGsf? zDbM`9eqpzf+`JQU87BW?nZ&hh9~nOz%rLmXT*WqNzvLe^UEf98x4y zQ&ubR>7{xnN()7wABD%{tM6j&cqttE{EV-FRhYY&re7!=*|els+aw5)vaAV6ZGbn1 z^z$ppm^np*r>Z`?Qr1$DI^V(#Qinb&f1!1-Tb2ISS-+54=RuAq5cPw9udsc>V~2_k z^w(`>f}Z*%bjIM$qM&@2l1XE+to6|5wG_Lj=O&&X?cuZ?4=2uV`biGG7|k=%3II^L zE%)+KKml8xv_)Tu%fhv0?AMUoD*`NduiDF**+-2e@Q0yDrhGBS?f*VU&uF@^wG{s; zEyf3~W&5bTGNKBakFRY_4YD7nL;r?eQHqAqlG|UI8wo$S&!0}@3K98sI;fmh*So&D5^lodgq?wG zuzfMidNtGMIR%)F8OGicr5-BtE$uBw*>$|ZauHY%$O?yhgeuWIbQp?%ibSbmKMO|W z<@B5n<%i^)ZzAbMe(8Sa@YiXsYV6h*d&c)fJ|Y@S3s1#-V|!%OLS7rBY*(PJmHyqRufe z3H|~{EoVo3n{Bc$|5urc?;7g>O)<`SV8t3A*T46I^ipivr8|56^l`1+X>KF=srSWT z#jqE1r5;IZy5H`lfdQE5@KEQ}?Wf-z&m0i_9nIMTGzaJW1=Vl9ohZNd%y$x{JwH(f zhD8WbOZEJ#`VfjI#Lp*?-1g7D^MuUehKDJZkEwuMGW>26On`{<~mtn-`?JLgeUrGM?LBcKJjC;1*IJ)dAqU(LLY= zxiql$Tih8UrWlkR)tYeJ|L1ud|NXrD>1T3M6DP#p3Seh0`wi+fu}VJufrt8pR`uTs z9TMwAh3S~ort7I`RWa0|tN0G%&``x2g3xmDj#75?t-tk~N0uTIpD20@g2)>8oO2af zB;*Y{EiObT@MJz3!SU{kFUfP^?Kh_CHRYMQNcU`G1CIg$idlJ_dc*ETp~o3CGCS+D#VdyZa^?Mm^qW4}?2%Yk%FUwJnrFcSTn&Sz{wf((g ztJB=1f$PBxr!F?Xj7hT&i(H}o`{F3rm5+b}toxy$6Bg)bq{%Vu?d4no)yQ$M<+l}h zih>@3@`2OxU6aK3_9q^BFVIE=y^t!bnYS!T*@wqvU++L&f|} zR9W`Ivg2AD4jbG?uOpG-HRrGfy}Uii%}rzAPNwOquRcenH;#KYb+$xzbru&{=qK0$ zuEnj;VIvAv=jyvVo2FXdMb6psffOO!D>gFuFT93=KckuBF-J`Ia_%xqx^J(A3N3N$ zwE^gpbUg%!0nGJJHa(tx745Uc;)-e?m1ftd&*xTWb6-9Ecru)3U0IV$eIOa6f*-+T zRm6@^fJkS4c?U$wQ4;>Gnfz3NtURL)Hhpv$*QbQq#`Ko`%+2}w8ffwQJdmI1=a4QW zKW8qem4D3#q$^1Tm}iXGni#}ONFOd=Bv^-9x#mKjD_p3xtG zODwLTl)_i(S6=v9V>WFt=6#p`s3+B~gol(-mqY_-DZOn~0&||c!m8>i9)}5;hid-s z>giFBRtLnDYrKdCNKP8IlLU5H72i+eyaU*aIqt{QC^k}zuzP&?%BP(r-Rvg7N38pG zpwukA6od<|+|!RK5$k{BLwQtA(o<-v|G0v=5j&hw_UXM3GtXGC$C7HeBzgrmU1G^U zksU5QQaEM;qH!$?(@J4_^tD%{3g&tUmy$YQInlI@`Fft{o|Q-~CVvg!oq1I!E;pZ4 zPhT4VWjbH=^$U&`c^#XaNWnn$qNao%fS0 zZD#keSX++co#QQ>QZ8+P1oD0=ZAINbBg{c*D&!6jbXcXpV1G$OQ>otg!n7Iin!ktW zD?ACwV{cgt8pa+-R2=xw0H*r3^3qdh71|YWqxUf^qF2j#@6MBep=`_vduW z{k?lfX*LA5_SGhfVrV^H`*W$BsA2nm&(cqRi4Pl^f6P(=K3P4}ad#0GL z>9aU#yqR~#9{ZNYFVsW6<-9~_5$OIXYQDBSQ_I2*Zal;NT1KamXRQA$t-8|YT&bfH zk8I=X9k;^Kf^c}W;Mj!S$;8RnUT*Gs?oxVLuBs}5$0GNz1aiIO8RtqeHXY1Kn#v0_ zvgQdbH;)5!hafb$+2y3qYaWGK_&*9tew^mgajMA7PWSb_|eIG){U577y zSvY-4&e3NvWTH-Kf901<_GJtSL!|g;v)=8T8QFlDgxa4#mf6-!1>OSbYY>GWn@VLi zLC{2<&!A$@TOnvUv3l3C!tv@90q*Z_oM7|>C5&_Ksj)bH8cY2w7$mKzlVnz%0^d&^ z!lHOgPJzdoe`7r%f+TGCPBkKRJI3ezOF>%|5zDFHA1tY&dp2b1(5IP@lw`&FhNSdZ zBuSE5mXdWcP6E9ocSZYkIL0d=ku>-$QL=uXC*IGqKxnc^0wy89|k^R~S3FQYmystL|vrvd}$N4Q9hB ze95h(6Xdj!75aW4UEC1p8#PMbf4tlC9sE@{y*=|ZKaPvmuY^d4LGoGs+QM{h&~tPE zUTB&E<e)<-1tR`KSd>Abl8mt(@S z7vs%26u>I`0`h3km1Iq}fc{ON;VNHTGc5Gr9&v-SM8gp~l7aZ$rp!hfGz{V+ezQ_ZjB7Fp75|Aoe?R9Z z#$Mp%%PS?5v?<(tS}DOMfkRVAuwHK_uL5P&eM`%)?E--Bg-!EoyM|q983f|cz;pWx zarAxuegQ*%h<8yyI%iz>ElpYaYMh>r@d=Yp8ybBoH3x_X+$)RW@+qO z`cN|ZREl3h3796wJuNUuOi4LwZ0Iw^VZDA6@l!HSe(!v3d>bZOE=JByqCvIOANUIR zpy68?CS?O3Moa6>?UBD{A`%whC0ftHQ%n=5<-=ar($ zi%iI7tod_k*)mn^sBH?@w9-in`&6J$_;STHG?dNiVU2ue{bL%k$3{&}8tFWRXqu)G z1+ux;6?iYck+-%GHI@!V@lYY3 zN90`Hg1A*&iFOeehip3*heQQ@(t`-$bCip4}dfSV+jx@%!WvRFdUODesvp=+;`vAZS?;<8ApI!GkAX6i(;&WF<7cA7#Pzk6( zj#gHkK>4obp2LN7Zfk^K3-8r08(Vg59SP;7pP#G$0QTc%-PTG>Zv~Iyh)yT^RrMU&kg8A#n($g;XvGY-!@WlIDIopLjFsPwfsyaq2C{tiq%lY%0rd zzkaHNVC?4wALqrVYerv6esX{aWLdKxCY9&r0Em(N%SVd|Y~1ezuYh{l)VMhu1@`82 ze*k^F)yV_*UBjaw$0_B=C(0gUwo<(8cH?|vBaq=09|h^4cE1WqF-0u6JX-6s>0ee} zIWi<=fi^xC)fOr!W#;r5gU^a=6m*?AY&Rr#$swF)$;z{1^g&#`6$!A|?~+eE&fWsm zx!Vp4CwSOi*IbN{kC>xtYg}C@5dY#I1$~!x$Aw+k3QS8MMC<(8XDu;MkSz%{62$EEW`-XrilmikZs8oIw_@z$EYa24@3@Q>3dbVP zu$l2JdC!ci@FwS&Qn5Y$!q{V8usF`*u*{ESR5)w;MGje}5j%q?c_MXbBirxtASPu; zXQqQ9AMt5@VLG7R?1TkUm|R1vI$Babc$%V;5P9X~QDR;faQ(9%wqm160lyf93~&b6 z3zukegh<}#mKza`|FW*JBBXgXhh63)6$S59VJ# zUY630lcW}ekkjNSvx9f9xWhAnJwNC35z4gnUipF z`q-zCv}YoCa-n)Pg%|P7q+NDB+!7e_rUeNDb`YWUa>{pOWb3 z&fusSA{WM;ai#T!o<7ar7#yy9YK}CP`u(D>QU`!axtdg--u^!0@_ezdX=$@>LL+td zj-yFND6Y!I{jBGmSa$pNFg6J9TE!ry01V)Y2;mPY0`B!ysh`&>ct3YWH zmu%Tym-uT!YJRF}*{pv68JB%}b6S#-&z8cKyy#AfzOq#gKh)tOBk&$5&6Aub=w+CY*#t}t&&A+}vdmi<*NOpcAFvYA| z`>oD5LU@0MH3NR-F%>gv)pox1h~RZU<(rLtDe~Tc8f^y;g(fw&bL9&PZQ7x@M>;zBHI(#k$-88NzX>&OB$<(Zy zXsB<=k)5V+B>{eTM${euX-}mZHBo;cpH?NYK(;E2Y3#qt<BdQ_ z)Hh&=$F3XC^4f^rwrxfAh(w89UdzP?#jR}pA5QY&5KXT*g;FDQm3C}Acf!!U+J%>6 z@yF>4GV7#5B3BRhx%DrbhsbO%60)W#lpOg>!aU{}x~&n?O1QJ)|Aq|QgZ&%Ec%JO} zdAjxduE;7lw_arRXpPlqzC0^yzm};gJ(ZM)im{YLK@-zeDu*98WG>Lr-(b}{U9i1f zF9VF9YH8Ds7QrNdR5IJ!UbH8syAyl8Xr7s=e)J3a>eEmZX&G6fz++PUd%CB;mh7Yt zC%+>NVy8mx@luFYj26=3{H&0Hg41etwl%pBk5XAHJ)2}-?l2;gRxt#6Q_3vN<(C;a z#R+z77*Uo5DhB|va()(7bsf+I{xA04GANGd`xYHsf-|@cgAFc$pg{)>u0exa2(Ezy zO&A7ucNm=D?(XiAKyXb40tpbvo8PT>t?sLO_1@=ye>pAHQ{87yckjK{K5N-eid$mm zN&wa=l!qV7-F?V6;X!r1B)*%WCR=z-2>-E6R3?D@2S8CpCap2@vlDdS|M?K~Yo-0S zKsoEZNxm29fGP{bEME?P_r zz1;q0-|`!RtAy|5iAOmU8>6jwno9rzWLng#2LBzduJOOiKja-2GqlpVH94>UCS&GW z%Ikf=j2f70MT3@!dqxqO!%Up7GZ}9JZcsvyj~Qo9WCgn~m7}fViMhRAHGd5aNawcn zM<%4BoNi)SI8-O$UZ7w7YVlwhk-+(+Y)UD$?&DdrsZd^k)fDy`La89PKulr-S}vxg z^L@=`aAIrW!mq%>Zeop5%9d@kDOj#~co0jGoON)sS$7x@Ge(ItR_Pce(oK3qSA-Jj zR?c~eEm;2Fw%Argp;p?zsDC;$qNM5=VdjJ9@cJ_Whr~W=DvD8K&+jgyaf%VZePYGpp0pkg0WJeG6Nqka}$|ENF24Fe{k4&vRLK+h5Zv1 z!M-dh{5x}xJZ_c{>3jS3BjRf;7Ppdaz zEsF!7N_`Awh(D?Af#(r56O`e*S6yNohyxp{tx&(hX2?{--_V_WS%5Yt{Hxz@C`XI< zbz68S9n=RA^UWo_^$uhNR-^T#9&@2ZF)6rcvLmLdOPXJatF~&r=cgN#(OF)06F<}M zBRkIr{(g?s)(%WSWim0uR@&!Jv62zxjeUSQeSXNZoqs&?Eg5_#p8@_ z@VTZ~DM@TkLLEW|xb+=aGUt*z4p)L%qHM65yvS?Kh@JZz1*{@UVF|0!qkux+%sv!r z6er+=>??EA0%3dg`QilYzpTQZ>i0-{*%vBlj8(0Z$2F7*&|;FcRV>J~gwverWP5U+=LC|9>NfQWp+jf&kN7WI zXd4LdGV*xgZQ#5~sGQNJhcO76mq1As#kyG`^_g#Y%|@CFMH* z7~&e;OnSLa&$!x!Jd5Ydd{&Y6&+0S9lK;2+!4T0_AF50DhP{t2%t)MZz|(2sMh}SD zHAw)=zn*N_Mpr(|kaRb1Bp`}h0q&hUZd{*nTJYE+bO5(V=FeyZ^j~%BvYo7t+DzKxI zES5W0vMY=q*4)}*Z75j&CNrnu<=L+T9T#|+a;x+KdZ1f$HMR#lag#vr_GCZzPVydI zOOqa|zh#TBtN!rG=A4_!>UI7SnLxeDPnR@So)yA(+pguKZ#sch{;KN?$l>`0EHwuK ziJ^}_ZJL~(w(2FMsgGYWT*4-H!t0^}tIP2uu?4eQLnO46YCpmr=SH1|P~Ux3IX@u` z<8ZZAw3_BTjhv1EnM$RO4og^=Z4M|<@VwTo%05*sTKts-ioq=q$)@ru$$K;Gr6aT< z!l;Ax_7CCO$@1(XP0AF5wa}9GV2q}edRIjJ_*y*Hnm6N43>Oo})rbhO@E%w@^@}TReW{+z|uHNSYVItQjnPnDp^E#-G5b6WkGyU0AcHw;%Z1tI& zOTiM$K!CN@9(l=dfaC_)0PwN@+-fbJklIt_cQGYP3geF85suss2SdMctT~LMeLIASPv4NSj4*(utQJs>;2u#1GE&`tutYgV0_wk4q8#_11|j9K zz%pu)h+Vv-4J0pdj;ycKcG7jC%$pX-PWC3Mgx?K_#JZ=oR6d&=cxQX|R7+cA1R`${ zXm`yR*#9*`^=`rDS<6WbKOI7#+clZV)is&=O{;2*X!x&;L4O(4N;GOEKqkei0yWzv z7?-qUSj_-qMcL`Iae}>d>C;u&a#olugUpK5Da2JrY@$d3m3wLZH}?uOG;A5bKfnQ- zjf+!GeJ%^{-U);KE!xdBG`wr&_KXJ`olAelqKg(QT< z!689@s6wJWmJwwUxtJiI+nwQX_6kzUsdeGHHrM4UZ#@7fx%wwgT;0#8Qg_BZJ8>N5 z^I{jjx6_mF)+&cxT8)1$Q`!&OdGl(1jsP1|&hFVpKGWAg`upN%KcxiQmcWT|1>4g5*_03p z>ie6-BHGSqIo82cVY3p8Ut77M<$UrXq~OT5#K^W)t7@d`+z}H8gE-@cY9=EKFJlrP ze0yG9Tf0>IfU{`{Fo1$~$nZH}g7ZL)=93I#NnXMqiHm_)fwHc&Q*PONmx^85J> z-N#ek8>6O`G8$&KXI8aZ2V=0zF>%e+)0T=~+Isu*eMy{J-HqIih?RJSK?@j5YJpW-FIG;gH4;62d{L z7+qT-_$swsm{E@&s9(UKLj)=x>ryyedGlQ+yeBU z!UDgBc=T2CQ0@ID=JHRhR&TtP;?cyb@fX9|MZUsNUF zNiOf8xCcc=nQ@7~h`eV2II6Pe;+P1&0+2L~R&h2Ca>;=#cNuV9h|gQyVLQQ{6ZyIA$EQHn9OwH8AQGJEm~6o?KPP7k-6E`PB{2Gt0wDiw zPe>>7O8Z&(;(#Qqu2ofs7y|1U`2sW-V5+tOXT<+vzDu^wFRI@A9k$oc zA}_t zvUns%WpKpS#Xo=q>h<(LcEevV4W}+~z22iU1amgQ2OW}h33uLVYv7Nt^HMExi~CmS zsC)uJc71>hB7##dy8>Uz)CCqi^%nRWO}?P~ptemhuk(ey<9#uQ2e98Zhbp^nI97f5 z&DVLa0ualX`*+tVc*XlyUCr!#?UxVfJkh^%YQ!K&aKmy@-lfg&4sl zRzD{`(67nF6^kW5jdne?h=yDD#!0XJ0}v+U5uOK6RNMT)tqRQj`UpVHYs4)_cE=Ah zdCG*g=X*81`TN_w_ek7x&*@u?_w3Gu@~LiJ(~J7cZwksuI!IPl?O631pr_#{A!q(I zt0Ms%NZ@u?kcROMLMA?{O1fzji4<{I$#{}xufAVo8Xa2BH?jHiC#&&HJ(!8y@PkwQ z-goEO8o|H+b;^JB*nbU}|F6$Wls_IAp-L^TitD$pDqoq%I3g*@p{SvIOI=V%0I)nK zx%m4D{t@07|Mgis)?qu1Na!;=Ak<*+{i6lvIkdavpopK%|N9ExfPUDL@f1^WN_ zfzm|UaU#BqXJSCTH~$hcKhE#%b)#etLQ7;iq~7VhL}7d@y$9!1aaUOVuT_4+VY-8M zUs%}K{nzFsLc+im(_`u4?Fx-Ld+R5U;oCSjj~M+HB`cf1GqdTFNT=~Zu1O#d^7i-p z!)Rnh+u&TE*|MZe0_oor6Kny}uQ&d_fA{{7S*0m_5%z@za{O$4*|R;xSOZ6v+|e90 zJNG0)p{HBuGhfxsWg==m`Fn#9jxxPPZ4&neJ5`nxZz0?g%QQ(1jxy%i#e!iM@p8sfVBiLPzhm~{PIl1-Oy_}(3Uw_y8zTkjfj|;S8I=z!#&G<*fp*pk@9XAw$SKFsjt~Ie1eOqyxPq*)2tI8HAf7B=`33P_ z`3&nnK>ZJyn%JGSFMIJVGK01p;ekw7Rn$-DTwjm;dSxkO2Dqf$Y>>)Z@jI>#TNhOS z057-r?`F)}HvIM&p8pX_yickcP*+pn>99pA%U`yQZh-!(9D@2KSIID^<38r!LJ7#J9cMWIq2sJayc zG*J19lJlNxM#i5{ibn5*5&3)OY|DilX)qWZFx__hKG^cN++(opPsyoU_Mk?}2#VM; z){gg(ktRj&KtA34rm~<4ak3X7FEQ`nU}cQKjJC)R&vO6Np8b?hyfYge#g{d{8*nN} z(NUByikbjfS2s8i%6|S$>_Kqd6VRcnlu(^(b6Rzp{i&}?2$ZcOSiA+nB2=!YNMcq< zWuAWpq?2y`##C}?!Bbu~b!bAOg0I;hLNy^i6oJDVT>z`hR}W#ZAr8|!KJ5SNLo<|r z`|{|fzKf3HugC4^AD4)qt{6UI4^r>0N;M~wMerad7vpAeraaCDTONe!-Z_OE z31nquzH0vxz;Fzz=!G4qZ*KRTVq#0L+ zd}CFIYiCO-=xtNCgQaAtcvG*8Sr1=w#>F$*eKsdmD8eDc{b<$A16_&hQtSAJ2wV8zRys*GxIB|Fmbau9`@Il~AJc zAN1|mA1MpCaw$(cxhhiy4#`qvyE{CdMB6ZyrJ{dI1;<_jZnVIEiJA5SLtg5Q^fU!C9B-Vgj7Tf+`+gmui!lkD?8bfb`f z?FuTkzHDxUM`>XrVZpK1Odi71vzLNaIC;5^cy z(D1~Vw&k9M3c7MwbK29oLh&iqp!k_>6AKHgq9(T4=tn(ZRusO+mH*R~Bx-66b{qNA z!k-S~y+zOHXACW)Qo~dcs-jWP(=$xu8eEfQPf03)a3ByJVjbEH5Dae$lC<7zejs&y zvN-!xSA#=uV#oQ5p6{vp$+HR&qaR+5(`bzTa-)W*$QqJ62n3QHv#W%!(Y+3Ct<-cy z6o>CMvPfEaiMU2t7|NM_KCElvtm;>3SPRuHA*t=0YX@Q$d3RWr3EnXidtSF!R*Z}2 z_T}L7nxQhBtOCDx#d_~My+aMu&@(lC;FzoV69)&BI*CnP1rrkD#r6a=Eg@wk+r{jT z;l4Eq>Wx{zsDx5xHAWDCh>edyG-iiH6#~!E`ki_!PiV4_dL zH$+|^3i$AdGaLGMhR2Y=hDn@MFUMD?X7^}qRKO2-ceHJr`9g_zv8l6;hh6dm5R&j2 z^G+Blx%t!gUM{;SMMBp#i(?VJRg79r=S7h@J_rm#Hu$)d zr>a!40-F6MChTeddeIXq`E7;L{LI*=6p;L*w9OSOFdAz`nk zQGF*%wasze2;SmYNQ>Cr;LMbRKpN~w#e=)z{`>><*J7glTzX{we{8(ykxEvwZM}Gu7pmK?bAmmgV@M9#_$0ds`6W3$t{F$C#>Vw z#*NC7J-;{lmd}V#QmXl+HY$>7Pg{T_QlTkaFg~bRk##lP9$W2@p~_V$-9`xaZuqnC zhZrFjux6JgT1s<~MN#`AG73ux%H;2jL_Y5OjXn{&b3B&*zpnl~Fk#&EduyA)O;xK?wY{WtKUK9ihZTRm9?0T5&b|%?17!Xt>A!7~h|RmO^rUSKEALZVXE!3U zWG^34U~M?OAR%TA-vH5rPn+wuXAtXsB*)@S->Z1BI!?-I5e?jL$YMDc1$+N>2It&f0~TWIrRQOw0wfzoJxJjlb)gUb6WE~mMpdu-|DKder0fwZJR`> zZ$8-}LB@&5RA|x{59#nd8-(Aie6cQ@ZCdP>+puD-x+ZKUh4{h7Z4=G1V7LKLVv;9E zZNG+P+KX=(uM#^~)`OonCPwQHlhuWGUP_LZ1KAGMyoV@UF65ubzU~NR4G7l+9|}$T zPItOQ%#oT8tym-D|Xjt#drmPR}UhInBqt;me04!}_cuqL0A`@Ll&UejSRjW|qWCcVmh9#_u>XDWi>vUp>Oz+NJ{rSNVeq+2&`Rr2NRR81;bA6@`lGM7yfL5oFPknsin43NUt6ERDN- zeRl+Y!R&rlen1kdookty^Rq7R+OXpEFZ4XtqW0sJvJS*=x(wK-6PZ`sE1gJ2@n@X2 ztGH(NM5QxnCL2C|oMNkMVMqp{9~T>jCadE~a;-gSMYloYDi6Kq1#-y0hdQK)wM zo-o*@z1~*^lB$IHP#7q{fTf(W&=FDqfKWLK{!VC_qhqU)(vOg`d}oxj0oaxa72)@3 zJXjsQmaxM}8RsGS>g^5>VKuJD8G6jasEP<%1kkp_mlXn;oOj*1wG9uZKlMU zLPGU5^q2U;&@sxX5omvRiFnorZRc-ddtJp;uI!B%MHI!%I5L>xs@fD)Mn0VwvYQcK zc#@uAV5L6MF2VV#!wk=5GRtr2J(3|wlkz4fE5007ykw6cEW9bXrD#xtJ@~@BRea7p zYSA1Es3=eWEpr(M!-_T{_K0J@;_&9a`@L8JdetX2yX~>DPU$FN%kg%K0MMNm1?6;- zflr+NqF^Xwt;G6EV0_n51O0k-iZ$rUc#irDp=xpTK-l~Zf*XJi0gxJ1l6+yr(JP8} z;@NKJZZK#A)A&-TbvcnH_1H#p(%sBeYQL2)a(Mr9cY+Jzg;=l0sYMQYl4i!AS6}rYZRkqMNtH>XbIgS?5-vAhcI^fYYo`_NZ@F0ozHT#MPsGeu zRI3+wJ5gk9o9^0SuQ4e4ZpNFwpvaX(2dd9JeR&sxAi2LM$}hXie7@D=P@TxTdI@eA zQM6yE5oi5*$dZa#diDp2nS-O#l#@Wwl^d_kJXZd~(i1qPFR{undbmy=P)Sk>E7Qvm zljDA&-aFQ6pmsr2owO0s?#l@&I*TI6Hmu;guskmK$Z@?ARL`*3isM12STs_*-_&LN z3Wwc7B3nJ?^#C3?1D9s8IHVT&(T_;eM-$AX6Fs$NZJ5%0J zlO+t6;~Yw0L2zNW036470BC$HJMs!8To7@)w zH0@({8&6+niIdUcnSTHenT@^gN5O6R@zJ?Y-#<$820TRl`BQ&%iSu9<^!oZY#$&BC zPvAr7(<&21&r_M+g$Gja$ElC>DnlZFSY^y4P?;ZUv|=~ zOr+z%x)rx=jMX+vzmV6h+P%|6;Mz>a?js)JW?QR<6*(u+ngP7YZ1`?G%8?(pP+gHc zfOXl)D0YQ8$vovPV!~;a?~5H9V^eHwtmDDonCRcQjn@6q!s@%5tF>ErdaA6+oN+~w z0>DxLS-GWe2mo%xVq(uUga9H_GGHi|_}G$tQ~dIGMjk>x zn+clcuM@8|(8n6Sx|JO&m1xVB2Q(YVHLZ^^FURQU<)A)?nJUw#hfJY6lzG4I0y0z_Qe2(C>JW#h0cGY;VQ@aPZ zB9DrUkF|KZ3~n7A`4a3Wv$?lt)Z3YVfkak)+n&As^Jn?T<e0sTb@Jr3?2A_5?b>|GzIF)#5p zf__&>*kKc(E-ttwK764I#2gU_0RluwNzVZQB0`szDAPJui%MKBfJJ#^3TamD!jx4?hsv9Z_vPYI}@1Nx3q zAtAn%3JQy6`rYtYi}4Sj4IX_w?|mEpBbeeL<3p`X_rB~uKsU|lSK`I#W!<% z6x*x?q!GQY{u*uEmVTay{W{;3@xtih`vHK5Y;%)Mekuz9{cV?5fwLF&gq4JT|DI4D z(vHo;%fO`VSVhU2+SjpXDye3uxIDb&viJ-Re6^8MS;h(X9^4|*_K6TO0fMZKY}P3% zCK9vUzvr8=r>f|kk)~^1cK;klm&#(wPx1M=E`)`ksNQ0vuWCt)k$dozu>Ev7%CdDv zZ0K4?^E|WDej$$T8VHR>Bn{1-q$r3g+Zqy>_bKkZjm_k6BUM!_KyarX3DsRAopZ#K zy_dbmyrJP0ow_7~KtaOMnZPaPWIFB8kXMXj&6=-c79xQiyTTpv`Nl8g^WYeRCAe3( zsH>3uFf85al_89Hv$1#k2mGtE`F_DmB*VXH`+KxkkCo8pAKGC#)-#miDehMy-$xj< zhO0OS6)~n_o1zXg^ROmx(c_m3#eip-zVwtNj7p!PjX8gtIF%2vZLSwQH1K53GpR1pg87ytH0`ePVJboO+Vhm-HJzQ zfO&>Tyr>G)^gz$q>M9Y4oT%1&>}vf7aBsUafCsmx{R3Q55uW`nzddGn@al|8iS_zc zcu2?UuIfWANtUpIumb3?>477bimcHzUt+D4J!CNT&WYv_s*aOjf2N;CP`IPdgC-*(WaaEgdH^0(L<{wYKiUVDL zpFC_9PJRh5DqMT{4{CnNh}~wQHkNY-G}nT zMJ-U;zRI!z-hb~DFfy}1LJ9`0- z2{lR8Bu(Zd^@&X37`&9zgrawV5r{MMQ&?Xq8aeJUfT0L1g;l#;L_kMXo5XeZQV!xp zRksyT$rD)3tpi+C z(%+=0UNKDn$7&RL!6@@q=3F^`WA=e`@0k^@yD?|bXqwn0XgL|DhkG};_qMxYnCl>HQI17&GQ6F zc-|^6! zN&7bNSCiW8(uE%9LZ-q{LR|N8aopVV*S%Bu-LI-Nm!kFOo|mfoVPmb9^WnDYjidZI z;v^cvdg=srngyY1+N1&@0A#Ag-cXWK$Th14gU-C1G#pQF%XpjHL<19wk?_eD5~^Be z>B#syOzr8US5uky>mi$I8`%|u$+DJOi7q?Z1qt^)n?B@E1dtHp8=Ww@0|TB}m0hjP zZBXRAyR{#C<1Bp%F|C6GZIz-ra{>Ae<8!JQPL5=`^a36sAHn7F1zT{UDzsQ4YSn`; ztAOpfa0HsNtpF|!Sq>&9jvP2DmrBDNtcNHhP~2>DZ5M}m(s!iniz{oBEMelDc}TWt zboW#LO3oZ=Q5qHG9U4Jo3~;|}HZTU$#>mE1zO>Ewcy(a_x(ph2+G*V3n4L;>@b>>a zNb*&*Vo`MjIs~gptX#kc+)`_QQd`*iw5UTR5ps)5i%M`^3DF*3vFX0kfA)$L9VS(4 zR*+sh-9#dWORUqFs=7hzdy7o)_>dBk8&9`=;m!GLLXQMbR4dQJA{9ikT>$9x!H~Nk zE+^HW@hLVGRJ$mQ0V|LXfw358sTjSJ<8(x4)b!do@#RAx%m=wAC5<8N|;(LJ6T z<99S=Ry_XVjo#15grbTyjQc~YMEcEfQlR(}M7cRk)3`ZdV5zJS9Q*DNDPk4OXE(jz zeX1S#usAndVWF%5B^C(f8A+JT>!O_jraWoN+Y}v)?!tAO5~4b)OnHSF;*9*FH$i|I z3fsbnm-{!2Gjl%;PI&@453>46(YJ<)T(gqCEZRQ9{b22NouOO@z(nCKe=>fltw)%$ z*H4)y2!jdML59RO6BN?X97o@7fNsRL4x&a_C*6*I`uP;P8 z;>_B_Rdm^P1R@1a%1;J7*BP*955A>TlEQO7>xUK)g=BUr{AznB6+xTxUM$Gp*pXO}Q1tAt%nL`bdYb|)&w84P4~QFHaSVxn8> zk;n_PmmeR`BC>>R$BX1U%Urkl*uYNg@)@TPFErF6Op1a}1@g#Ke3(A%ZPPR-jjc^I z1*YUR8Qow$sxF4BA7Fq&LkN!hp78)=HH3af`<_5LBk>V%+!6&yeOaI9mOxWnaq(Kg-sf)|Bvo)jt!ZpRP#7ldSGg zWg5HgUVyT^E;#Oev?jE^FF8MFsR=e;AL7H9O;+QNN!Ov?V7OAX;cAgG)icRThEna4a&^%4#kWEHeYm}&+#qXrX8 z;zpl0sxQhy zg3snL*XKSy#vd7fg4|PyU9~#2 zRq)q=;x$5<$17L4Q9u2pQipL`jh-v)B$q8w`-CUy6B#6{B15If0dgDipXK2R7gf=B zDp)N4YJG*AO8?XD072aF)&rT$=5YW~A-i{)^+U4N zwuCqeX#l=3|pR(X%Kic<)`|Fr{)xS9kT$%xC`Hz z>Q!4O|IvN4&NYd1oqLO?0*=(qcb=$n)*qfnfBPqm$8#iKk;!rt-fveYV-A zKKjMF-`E8&a;`JGlNKVmKVLvUpG(@CB>I7*RcAO^LDret{P>m|;TMd(ic~as*9OAR zU*D(^(HOuBakM4FqKy2}%COwpg->C!rD*>dod8-G40*zvgCCB4a9WhqftEr- zWO=0=8XHY63VZikcoB3J7Gg0C#Wa>qcQ&aTml&~k-P@YUGE`izXu!hSbGzjs*G`*d4&m>y+{)A~ zc9OH@Q+oaMT)ayee5oFrv^MvcsBfS2Xf%X>_*JpQ&mOo;BYN+6#nOCgb2kJZVuMV) zO>#AS9*HPw_*z*c51ci5qB6EBvP|gF1Hurb=ZBcuzKMV!YeeV;+XAj_Ox%R#?E zDfHDuxIz?fTDV0QPjqJuEMP^=yn)zMy~7%)EOI*ny8k-K z)}l$gK;lr*CCABW)|+HzqtknHohjEzex43LgI5BhQeGkjaLA)OUZ1t6jNV$gH-t|W zomENF-q~ro1*u!(MFdd}doT7VEajXbvaVK5n^w=CmdnCkw3|i;h=}v;$FdYtgY9x9 z%pSW+K)z@wnMUy`(0>wb5(sX%lT`m(UuAd#JnxI9q%}nW3@sGrma3u}KSSg|KQ=q1 z03;QigGtzrtM0m?s2k!Xo}Sa7QeGb7H06zQX(OHiD-zfDA%e7w(P2)Na-h&(5BNpx z8ei5|Ehu>Og>OXW`v#N^9)RIVsTxwrG^H`K3kRzs^1$5O5H{q`!+FZ{^!F@Y&Zkz< z3Qeiu>LUQ=BkO>a17#ib-zEAI?{CX-uYovy2Y)1Jx5=32O<%32y7EzYp5q$Cl436n zXd90*!|gO}&>d5T1@B_4*}ZjjlPDuAQc4YV*IDm%1`{DTDaF9JXtL9nyMRr_0rZ`^UU@{;w@wCnQ20}Cw4J#Gz|1ekes!pME^aw*wLje z-)66`f244sbRyADB#B)S2(zqML=&n7qGuPZ%RugNVTnSy@&$nk0YRVmbW~)?LfiG20z|pno?wB(Tq;)Q?uBXq0Kc<9gBme^bfz)TK$+)Q4`Szdvt_8m178dRp0@ zV6v2z6w{>KSmzMD0yNZ7s$3TC9?^j%2N3;#mCh}R;9yRt6UOJ^7`TZwAW-2^ezt@B-ldL?nbH6=l z=|>JL#uTqHu&*)n9D^8aHY`-i$ZC&ZY)ec_dw-~;`cMpyvyk?$Yx6$vijNr;j~Kjr z!k&8F;(l{BJnuZ6t*`HXvRHoDl)UgA7gAJ_Mkc=%s#+;5dvYsV-My&{ zWv{uf>la~=iGMEp?k(It18>Qlmo6$PD9Mh<4(K*kfa9i+fDrR%d7IkSnyi;}sj?s; z7{+eASUS6jf=(@g1?XNm16a(m>Q?9>uouZwl&!Y@6}T0rzxG;~O>je~NrJ83XZ2Nt zTNtRUx2vIJjJ5a4ZjnTcT$C6(1e#c{!`*He#t=0Ud1s_Sd$ZVc`O#1#jThtWl`VyxJQ7Lg{paw=N1-o)`kbIwzEUc`3qFS;$1cbxB|AnGgZO zjYBYqFo^w18-s{9)@~8a7hL6{;6f1yAaKLIDJJPiNZ!lOTRakxC~_tH?C$H4UDC`7 zX4d4UCn1BZKnR5c4G!V!6G6x9H?_zID@d>u+RnBiulL&2@@RGp%H2>_K|U4#QaLHw zxk%xDeuiH=P!#8R;4B-w-TD-r}E(lH`-cs1%xMVD4cmPCps=$ zMa$Cogvjuca5<1Aj@6>7h+>m$uVQBEh=kQUYINei+tq1_;vN*9J8Tz z&XDATS!CW}iYv<+zNoxl1%b2xOD;y;;E4J7qjk|~u6(^3l=?fsfCt<+&8-okJiL-% z&|vZpP>Cdt4lgg9DuPpwiqBXH&Ug_E#(B{-L6w`JK?FjV=9ae{*wF4%rICsiJLZ&q z8s3RwY<#LzXyJl(k@74IWT%epvexHrY>)goH-BDa4K@~ujgF3Wj)wd>zEw$We8FSf z!l|bUOy*R{vV{Ok0jTA>@>}X84RQa&YeIv`w2@~NuSc~&c@%qGJPHv*YHgzaS+1<< z97zU=8tBvmMgV#jlceqXJk|#vYE2V^c3G&FS;ZR~(;DpaCts29V%VQ>4durEp3K8Z z^pm=2&ErFq_#A$ZV6F0=gUTu6z4K76AhEx~RaOfk%dh>+CY~xj?VVEAKfr#|_tP_T z$!4{Yp6>2kYM+$V#_P7yZBp_x<3O?lbpUBGzM)CgBZ8nwUAP7 zK(#h(JcA&syi!(&4Hs3;D+v7$DiC+|%)2YfKZ>>_DT414H_XVwJ4UF8@p)zTC>9#> zs<;e>(I5L(jcv|9)v2T;%i0{s+8i;GpRB5bgdrM}OUoaI0Z4#*H9EJaRQFnurFO0_~Vg|he&Wem3s+t84C#>m^W0dvdQzIG$cLF;& z-Jk@H)I=2?Wp~LAWSygA?IP z%pTdnw;?HKIFr?`65Z;#?mf)5Sql$g6DQIk%Rr!^p^=a*Mq^ovKQA*&1XG%n)2myt z($~+9&1P)C!j16S9QANv85L+T5*hwhzU2sY_@(5{I zq`;mb@*EAM0{<4U+=K*~sj6zT<*HE97{b&=0;!m(PsJUE zBx3dS84&tQN0>8GV~|1x&3UxN{>+c54IJMBXGmSPS!#C&Dhk3lftp3AR9S;HnK=-O z;z|HGG5$3R>qRgX(7foaLj@Xi!PjA&Le zE#q6LUKm2)u84gxuZ}sv)9@LrQX)%Ef5@rO1ThBuXgJ#hSjE%;LET) zXFd~1(FZErTsHOfO%9l|R5Ylb2ws9lf&oO4hzf)YXoJ?goJ_#WQ4=#2Y~#|;gcV=w zkD27ieurQ*>C4Kz)qi@*BltfMnoh+=TLbq4R9|d&;N=>mo7;prf#Iql86$!b__`^W zP-WSj=&2Vfo>ggVz{ty89SjEQH0J3PC{@*$%2m2u}ldiSzIlb^?#IC zwozHkV;@E``yhRNf|VoH-@9QQcdhsB-f zEL@Qo_-`9E?cDXx7@h-C4&~|>SL=YJXaG)IfwS8hXdjMo?r!H17DtbDiv?#pH0VQE z-N#@eJ14E2)Gen(P3K-7_&W~fnKs-42aN@Zs9ZL24OD@JW@ZFtSD$A_!EW}*ai`-2 zmZ^mE0hn)l(ryumss-QzprBW%nso*yCZ?={CJzN{e&K*7f`|8rxl^?0V%io31B(TmfMQ{UX;ck!Q& z-C9R~ehT(RF`j?azI8BNApf$tZ4=-AA-?s}zs;>2gU zFI?1F3|l)7i>_jckci!ghvc$kbfk394VMwI+EU6uC#X$Z4;i7!hgWJ?$iSMOBSg2A z5rKV+l{~eR=VsVf zHRy}zA_>85tTz72BY`=F9pQMs_wZ0Jg41)Axz%kwOE&PnTZZwJ-I-RWL5rni&!Pw- zX^(6`eI1D8NJvodrB+OpewoyDtbAh?#pDkyCRQA}bIA7#yXQrbf?=e;uKG`IR= zNw?_;%V#3D$w3Gtt{5vM< zlXE|AsnY`L1w&*H_$i}@3$Hv19-*m%FvP`X2T%P0m>~F{?JsS^_xfwz?ch_l=qpoq1@OxHH z{|VRT#ZH^(f%`|-Si6O++mC+X^lFt~41T$AI$xQM9O^SHFALLXwmlpf0Cn71_81Fr zZ{sPAR7#gWs0UMd*aOVf@#9$hWvN&Excf5czdVTh}b;EeEIG$3o7Vs*PI(889k0cWtk zR(t0vE(@4b-|dozdRP2<^jJO;kNif;C}dvr(yKYq!OS4St7(eOpd{jrJ&BmCd6lb!J!z7H(>LN?SsIIqqtbZctWa4}dqs`=|B2^)u=kyHXxX zP9WS0!l0rwUHtv7Pez}qHU!@7-(Ab_$5n6%Go#FCmhV{>r)s%&ITLX7dt5$4n3yJ_+=|_tERu$ z=X9KX2}dc5+MxsnRy-LACcqN$Gqv*k-QRhe*VP#EEnjW~^n4x70@F;C&l!tr>8eb> z5Gy$yv!)joQjbAT)|vBh#}>m=-|E`p{KQ4vUa(`1s5v>alRn~-cGU?j((eG(^HpOO2LS>D?i{IN13Ir z5h(RxN)rUW zwQrxheurRQ~0feCumU*H*liKz;^V_moMErUuMxQ`Kr>(e_|<*sNBI?H5d>~ z^E`&dkNn|GhKcIrJZvvo=?egk?CxKg!4mrSAKOl^a|VCF;%WHi8QV9;8f&=*#5e58 zDi`EGEEpW}%?tR)CDpxNK`?(GgmgpC|$D){;CSe=yQ^{C3qNS;!YKHLfQ_U^#uH{%qc zXXv%_iNi73b!F1tUc@>Iy;W7%NV^$E(@#`HHW{A?gS4K+@UxWcPYi)gIjTT_ZBE_#apyE#S0WDw9u2^f6o8R#mt;% zF6L_XZLYGP)^69)IWC!m3xxhwgIs&-FiB?v^j=M&l(>1dMOed2)Di z2BO`$1CMY&v(bxrV8sYsC2|JXGkrrHw>GXtRDlWGrA;a65P51q(?!1hLOrif*$w30C-ugWaOFPXtU{IAYjb)r zS3Vr7WgQszRw<|`4Ay8*2vnD^KW+`X+7#uMN4+^^ajz~7gTB$s&IDqxTAneBw$Ut0 zre`_C*N*QOj>)tr%?-<(k{};K1Gk9h4P{W{(u}*H$Aq_yVF2}!WSY$^?l!B$2x7o> zA2nLKJMORP7k zW5lpxfnVVYvyEdYa`aqm@1fCs8K)*5NV~_+wz-=^k|`X*PM1@u?VDQ0NRvp}Eb*Q^ z(vuRsTwl`Om$>65o^UVs7cXkXv|x9ST-$_6ky+t(-8y4)TJG3jW% zP`ag0LWz$1T{ZdpAKLjh`6R5B1@o=A(zWM!G;g0Ip00B;uDz_qYw1?>j$eQJ`Imx< zR>WW`8J^TN#$@eeT?YC-OGF(i1+4U~ACVueEtTS>v9HrsI8JH)tSbAFE7aA_b0&Fc z!ZRghLTl6Hpd$ZACtqYvvaUD5<*FzcsVO5bB|-mWBKzzgplWB5w3YL#!_)k8xmB^N zr8X_G93lDPLqu+!cT%yh@1MU`chVpHY~`P7vzfmB1Kp-0kKhLp>0UM1gFF2W7fjdrV`G)SCm6X6zw01%}2K1`~1J zWx8*%zd_izg>n|Sa?u|QfSC`9QW19@V8oQ3H|ch^_oh9}1B! zQn8N)N1T(RirC`4qMc)75p0F-?_1{sXA@Yev(s2EOz6=VFtyH+GXNs3SRcyO)O-2l z+H_LbqOKCe;m#b}s+FmDJWQh`h|+BQ!{ZYc#+ecs|LIR_P9b*%F%f*v>^7PoA3mH! z+2SG^)^H0dhTi~x5Hvfo0lZ;agUnZ4Ne}3w_;erzSx_4P2?Y}RDCReRJ5FnBav^Tu zYYKh{zH7LpSaK9t2H~-zBae-Z{b|SJn*KQ44Lpo;1jcF9i)(Rf$j$Xy@1gIwDi&ML zpT#N3rwPiCnEB`$`DBRIan#I|*4CCpp0d@(5c~)9!orhWDp_{Pv!+rRkpRM=5qLJv z)UO32>~p$|V{drHAsz#R5zpAEZ&^ZqMZN=paxe=8;i3R>R1NvCcmU)5JJNJ24V%hS zdrhrn$i7s(^_0}lo1E_@|KI*b@P1+EVY1kgXbo!PhXSoJ?;QZtOo#nq4JT_!KlOEUF*!tWEdHiA6IW1>*`OJa&87?yXw1ok`(jpX?&!(tbv`2)&bIb}?&{#>g*`3ipUY~hB22VU%El#yqI9Jye1R>Yz zh-Wv?iOR%NV1DaV5rxA&vGU8;&~y)lxj`d)rb_s$`e$s5lJ}M~+61#{J%?{SL9vZ5 zDw6@p>zuXm90dY=PPwEGn5XPwdP`NW8#k)v1ZGr7m~fHNh0OfmU9VKu0e((IL)y$UFx?RpKqxr`;O;OlYp>jB0J$Bqox2>|8Pt@3~-A1AD(Yz>FGCLXtL>?>R%j z95c}kUw^52BTED-BO44QSym6H@KKfLqyvrWFT$qdu-Q7zm{BW^<|Xi9SvmF*orxTm zl#kt_TVx6a8LFP3mpSN9vv{$zIkcOWOEhBK6cJQa7A)t)(5*NoG=zM;&RYJe;p7}oi?F|K zVVO8?+&%vKsk^A>nm`p1Kxw^uG>_|OBRiuSh-VpBPSwu+>LmsxuA-hJ9v;ZqNxWVnSK*q(?ies=og5p|3QZ~#MV#gSjJZgzT_?~z+*C`Ndo zZZs8lZhL#~J36b*=&aq35IrTX|2oLyX-@vUqbce9`1`@&f@JBJ*f#d^x*|#izeRK= zbhkKcDK%Mr4F{Rd6c=So%jsLR*eoLuQEiVIrxn`5pBv}_c!&?jj(xVgc<2|&P4^@} za&fa3atC4Fl$bJ{^4 z^n+&%F_ZH?Ao$R13NJZxfKO#RBUu6y)kW? zXc=#2D=5_-h-;kUIAowBm{6G{t3IWD-~-Z`ikOf4HcuL{WP@0^ZL&|jwZM@FPP09L z8evGOH5N<;vAKv1lZXAZ{eIpd{WK^4U(-Lj?_OWLsL(Jc1Hb_>v)0y(>**16+ z80sGpj(u+Xs>uMvt>PgTizh$L5lwdUiY=_P6or+`k77ngl%}fS04vU4yk|!~m5ix1 zbueW33Cebj@k0?uZC?;zw@*6*WlU0@Oy=RQAlsQ0g%Z)K1ISxhi-0#1o}rQp07y;# z#jvx!`QUkGlBoRgaVFf=4oVqp$u3K`_*1`fk%tDH_gA3S{xvFYL8PbageCsdjyo-1 zg#HNcsv`x7V2KPwH4-hZV0tMQqhA+aSLqDHZD4m~CO>tY|7{ROTbPVeCiX6#rMs8o z9)Qsr@wR0{Uv$}YP72my{i?wt+9yvffBtG7@Ju(o52`B#A{?S`aK&vF@JE@NG(UWtofHYvDB=riZQ$ev_^Og>3B~QdwSy?5kCsoQ-&8`=Y zI=t6(ig(*0aj{E#Um@%%)X@N5C=9{W!l&`Kg-tA7DEa#10$I9ptj)Ocv<-d}#}eEX zERlbMpxGQu$(0B%zNVVYl;v|MzDNaI4jIu`51KH}45Z`Q3Y`-L2`AhTaw?GOyk0tD zC>ZPGN@6KP)6QJ7#skAW(+x09g&w+%z4D@-N{_Xv#ggCNPyLC)HGEsaWi!IS1hm1F zh1Hc0qN3u4A1F+*1B<8%wg=)lS>0-nV>afv-}QOcDz8!4DR1-Dja3c zMkZbz{JrDu*MnO&T#uyZ&q`coqA+a*nNb3r4{TN1@w{^d-jG21A1zkVkq|0nQsQh6j#7bB)j0 zB$qo<#W5@NcfH=8HEAvPVQB0SqzNS^m4HkeT*v5Q1H9;2000`1^_Q! zb~tqxG`8}{i?%u{g-)dpIl8vjhlUw6bDHCW%9zjk1SivArwZP9wpDRF@gWL!XqQqS z*8S_Ep)f>{)4$D}#xPVzTI`OT3YZo(oa}JQo6}ce)bTxxHhPF&K)ap2yt&JJ?HnM_ zFd?KaPI5ZFKy*{RqYdO(ztd23A#`2f&xs97Jj1p!0XkeRdH9@&IJqnK0kTHPw~n=e zU$SY!QI?&546PU)Hsc%8fPVMvf8iCjJLk1!N(hfk2eT7#}rbU`w(%ZxpnK z?vgufTgUO1c&;;vziK%6k4OZDV7Iist@?3$vJ>|q!K80;Sfx2T*+M-KZ{LjIOrib! zSv3;e|Na4o&baFPVwJ`geR^b&S*uzaMyL!1U`h_DtbIkk$UJS1NhDjA&sM0I?mL1! z_NzbYd2;uo;28U-^jJTpMU6=-@1ac{0|&(%gEvI9bC8;D9_+sf^`+I7Xv7oRNRNrk z`}p9-f;IHkm^&1_o$=iqn~u5qB#D|dYyXMW?hvp1J*+dv0LyqoYx(0?XPbcxEzE0B=}i zL{)u|V1(B{4y?nw6USjpO>EvX7|N>{>ylLMQp0nn|U#93kf# z(RQ{%isbViAKV2bwYKx05i@m8iD7XJQ$BA_Yx9gf769+s@qi~lfwxWlCD_Mjp6I>d zdbP~K88~lHt1OCPu=4~sBHTSS!NFoQHB3`*THuYx)L#JrdHnYpi-7|cgL+WRyl+>w zOykY?tt#@}{C<_2Ra}ODqZz$E+k%i`mS+Phu1+DoXDas0SHgSbYVch6=yk>cuTr~6 ze#sgVWKFd)FUg_{jB~>Rt9e?uNqqbxx~PScQ%a9PU&xOK9*@B?*eh}4tq_fBPZT&e zwVGm>ec%^OZsC|esi`bDskD{BXvVd?*w5(h_>J4^6oe+rcUJkSPFn^=v#km6YSFq3 z5MuiDT~`F-IMQnzU>i)3y4dLX07c0l!;dki_=*`;*T@vCyCHMzus1+g)Nj~_9 zgnbSWa>!n6o(;^ilG09^Dag0g(0Uk*Mn?MBB@!Hh7X2sjuAW}|Af!Rj?of{)H~c&y1)H>FZ5Aj~JoacNN)KZiGEpjo zjJ|6BX#Ts&rlFTaF~NDe#Kx41V`*F{oY`|kFf}_Dh007{kF2H>BCjFm)A6+$*_?s? zbX81(1f3Y^vFwPM12n1jjTj6kyTKeE>sBUmO^XsLM}yL{Y8}yNXr~|ga=mPrNOy0( zYaIQ)@z(xy{t_uoAoZEsVJs~Y7Hy;oLYB!N@^4s;CxyB_Ae3n z)^vE}0HxXVJ!hgjN}WjymwkmEA-RvR%FjNu$dZ!4xVP)>U*i&ls2zrfOmhAAlsl}I z`Qoxe%@)J^vY=zoADw21Wew*q)7p$)7I1;evWPGjxCjk(v)Gn;xHKA(fA29`&;k4b z21|Xx;WO7KKVU9Vt?j7ICsUrxEr3r_;EByjIkUL3%0vtMDfBGkda#w2DDZ%;)kd!` zv1{sF3fAf&&_SyTW}Y;Y;dWh=JGS!W+D3YYbtSBnPyc^CN^bs4iY7dc;lJ3^b%Foz zd;AIf)Fw=mO!_>(8G>cVQbZyhf<^XITt3gwsRmG=Rr-TDTs#n>xWut(U+@oIA1ghK;7h5W1uiT)VTigM_IPBq1k) zJT_wG>suWx!1>SZEjA>|{KliF$5fWl{%H9rA;pGKJZ0Ba)FsVYt6R`(@-85Bo%onh zmk_0>a8>qGucOnnZzr6CCyUsc9!|;;Wh+ao_3kg$Oc^FRHjN;~>@h&CIk)-OTJjOEKP-BqDmDm<9vJMC%BV?$vUYj37C3StcD2R^of>{F@@m zjwOu?nJVF>(JIB>HAw-yxqT)8ZnWtN>Z@nTQ=W0WZyDQH1fpMM!Odu5%TKB?~O;)DZRAKZwX@1fUV`yjhevPiDSCQC%a|6Y2x_$;rsi zN=HDuS!(-zvfp_Kjs{qUwpH0NU{eB;D|tMa4L-v$*Ink|M!5~@amx)~1EUCOHZZVb zI3nPor!mSno39Zrse;n9Y~KqX(_U``9<;~3WYz3*)ErxV?^HF=n)(PKyWS8WuAf39 zxpK;kmh)z?D&wsjg$$TkEjuTQK$k=L_gZVv+GKy^W1<3T91?1*R0@Le%Bn(Q*F@2n zp4A8waZ=E7k3~G)V;sm!>7_6jY&!Xah2qqgYQJVG+KsdhQf89mc1bgudDGzDK~(M% z8P~?7EYFc!QVP18$9QYnJZ)8bFVB*PCI6mnVQm*YgqNe@%2lRfhT9wwQUCq#SK`46 z#nrWgBjGe(dzpM$Hh72qbjnrCf(nOKrv7PiwEH~Ng8i}vlUf%zDOI{ze9jEx(LG=9 z2YOz}$meJNW&fb$GJ8)7`D7|C!6xF}%b8ye6RYdijV(<>8AD9nWGe7l8Ata*3!gsm zI^?=g5_s_7h6h+pjLvpuE2mIH!Y2?r%O9>kOE6^T0Y3BoZ=A(meQf&ZTu;*|+MV5a zO0ZY1d)Cetjz+Sb7+PQk$YPBfD1Ygrzib_ zKl9-aI?~^2=+5MS1Di-#nN$@;##N>j29MrwlkdpTj!Xh&k;K>m=)IWa(a6=ZY?0uw zQhBMld?&5Av=!s|yZ*qfA44VSvFYyMu6MFR15#;WFmA^MO*0YjgjGQd2ocH-i?_-V z1ji#hV)z$Gc*u<(L}bf!G1<5drzVTHh(C|{=F;;st9O^uZ>wFkLy0SJ zhtv1~#8CjS3^o91$hp}Di@fTja_oz#nKf|HlPBH>Q%e`H#R`hexhOz_C-qhawk8L7aNc-EMeRuHB(n3+nmlH@(Pb>t%u@tY3L>i)qoy?QF{S7&k(5{chZ- zn(e}51c^9K7GmY1Lm6ulsa5oT9)t_!dzEL|z16YuVfWcCfyDR@4VO&Q9~6s~ETRab zICz1oK;^bueP#w+g&cJ;T&5tOZ1-KS_&ToETFe}TuNR7doFT4|`9eQQm*L+}X@Ow` zPS^BELB`z%28QvIOHCw)IG8F55+~{PYi2m|nZmD^Wfwt>1+-Ev1M!1@TG%0}*`G-r zy_e$qr4=~MRCf~?uc56!s?qqyVs*0An~_6pOuLjm)^X~04i2xs@0M8SjL46)zNI1| zAzkQy=WOG}q`H=mdC!RrNmC%6(p^;HL&UHUn*{($0xA1wW3eAbU&eyx5RTg`)@?_T zUae8$rI6hEG^Ib`)l`=sl3NYt*lscs5(Cv(t`al?HWgk>7!DGwcDKt9aCEJA_vS^D zsEq35I*~h*66|zxlE?+1aYQ;)vRbFq{{UGdipY;%5ehs2bx;E2z{v*i8i1*&mWuKV zB^kaJ1Y*m{&d||ER(hoX$bOQLD$Xqt>7}F`17hZ<~@j7u`|cZ ziCQ)qW*r_HL2c1=7=MNjIM(wAJ;N0y9-1mQ>w(B}EC5wi>deM2S4KicTi8j*V+x|ioIHsBT+4%cMDMZfk!DlFJyq>z; ztaY_D6*W@$s{8#5sJ4`a=Q)k84%ufTsuGFsJ|ndg%b}GRVS;BTNeAp#IuB(e7P+N5 zhdC4a-s8c!eqB8LM)~IKwKN90C0^pYA&f=|kwAWwzke?BhFEfEiwNidP#0s{H54lm zwg^C^O%Fg?M&yJuyEVKfEegZ=-H)5FtIGSvBzIOCTEdRz_ zc{cU=`8g3$w`x>0R}Dfe|_#p{6=kvq4+!u?G{Y0CPx zheEyG%l)=Qv+ck}6R0FD<)@TV>x%mHtc_P>DZBgH9G}NHsY-3Wb&Jgvh=Fy$L4adI z7ROD$@X$V)Vb4hSp(W~Iu~Ri(W8f;13WTG_hR~snDT^zb-}qLM1v%#j*WsR=Sas_Z z0^eJ{zY(C-!7qW;q+ zl7E1)I-=lX<4?>P$KZ8HDZL-pGB;SvQnFYF4@I*yjf4q}LH#@|05TXX`nn|H>Luls znD3pBp6QX+o-GbN@Xc}%s{c(Q%irs-kDqdx1{sCx&_T55~B~5;^`ah83jn#2H zAM5!yy$BlSUx}?evdUbiU`<1bjSSXfId*OPLt%UYa%e=s$>`ZB@%f4;fYOUIcYs*9 z)*=2ecgpQ=@ffy3+YFwZQ|e)Ba;2_zrCXN&7hkB`oCsjq-Q>=ROOQ*YTCJWAiQ>Xy5dQJuME76#5N^+&8<; z-ivstPm5gMC0v|O&s^4}p6dLnqtd>Oe>X>ep0puK??pViJc;3e8T36(RSseG#7E9R zff3N*12E9qH-06DyF6H8h_SAMC7KAHcX#e64cN);cDLs0+w%wbjs0QK^m($? z_bF~+mwcPiw53OsCL^2pX?`HZtIZLvLmm|swvoMeDSPIj0PpXky{-p&fHdtCTtBid zKVRykwv*iG^NRHBe}KfsZFcsJ z3TpFS=L^a5>iOZ4=^{cl*vQre!vFv>P|U+^*!+ul>>f3h9z590tI&#^&nw;YHjAC2 z?qsT`d8NjKxmoJ!6o@vwm#V46L-ZZ5_3VAu;B1yA}`XUAte%P>`V zE&&n54@5d)k469{DK6@LzSr0GeEB3wR(S46@quAhE0JQ=lJB5Tf3G*AuOc{=WlU&f zClaRom0+@X@-d>9pv^vXv+1(CiP?bB1pze#>a)~*WgF?0bU2wh3=7-0wJu0S#Q~JE zkz;$3S247U;^<^l;@mbT_3cw}X{V0*5jKup{%qK-UsfgWfzL zzm4=&iyysnYYoy*{P;>>nr(hH4gBj|R|{U?j>_WVw0 zS6xYL^7bGNiO;#X70MSA?SE}PoF+>CBWs5q=qH0xY{X0JHr`op0k7rylNnv;5$8d` zGkC-e>w}kfdp52OTj}LVxccC`r^VM|gVLrvo=RPe%YCwR2j%dih{z$Ii+wTB`YdpA zde!OO=OkshN?k-_k!7#%f7-=T8x42yWVS&d$}oCgGA-_Jf5$0C1dvF~YIO%?1P_sD zP`{T9p)FM@`P;;^h}qz3Q>*PgOCe?Zc%PIGvbyfnV1{N};3J4nP0}9Hbfo>ygMa^@ z;Z9!9jp9HHyqDyVj&U2r==c=Da-vCHL?C#|!1vwfk?@xa1nsG2*G8GnbXE`VPk}DX z6CPVj@mT<~vIG(hps0Mp1KrCX_lv7)K>E%r&wV*;w=$`XLF(OzQ%s0B2= z0)`2jTr;hY$vG^{>=}Hf_?;822{5V`&s4hI7fQ!~k&22sy=|fso+N_>rRG?eFGYP$ zGmZtwEM0R}1@HHI^Q_*(e0^8?5+l2yC{y?xhDDmIZ`X!AlfUuP$aCvV)NG0AO;%NciT^Ha*?C&Y&+QjJN#m_~5o zE5dRU^G^Eu(+*H)jfAtMmT|%JU5nUB97*rqW|BpS&L`_W=mpoYQ3kYTLe{8GTNR?N z&v)!hYSBu`kG;eJLVo&Rlqur< z`G)a4Cncx&{)q0}{}GK;`pWjdSWuWP{{TVU>-&GckG?-bU=Nm!~m3Y3<_cQS^O0E#AQ~>IJpM4 z)_@`rh0{U&E}-oGDZ74^v(Emu$s}xwUD&D?Vb(5zlKySUa^+t`Vf=2lZP@Veiv5tu z&YpR@C>K$)gJ4Bw6zitS@?|n~1{VrzW`5W`% z++-{fb>^x34AfyTn@*)0sBF|v$nvCCpLrzLf^v9ZQ$xAgQyGkrc9Q%|#@KsE$ZJD0 zHK8^{%AuSsu3uWmswVy(URSASJr3Oe^Wm7wfhlc!o$qe@y%@cPXH0;A84*nr9=T$r z&;ZK~OKFMe8Rzrb{MbnG-Oa&MimLtpNH_rEZJ8G6*aMk3Jvt0ZH%;E z&R!YNv>hNb^)8UG>yC`+e)fEoQCiCT1%0^m-r=(5Z-lgl*)Kxq{~a?u1(!1!5gh z^3+5AY#=f!&{8aa>e|2imtywfZCpC)wv?u^VihDV%2W{jbTSydKRf? zkUQC9R%m5h16V($=IdK7XsZ8Ao(|zC3SKNOJuUA8m8T2Gu9B;R*JE;cL}oIp?sb}x zxv@S)PvkoV=Y<TWHg(5`bNo@%&1lCw-x)36H|AT2|V8?JC$tsS67yVynvnp#-ef(^x2{Rf}ZE@qaS6vm@-|OGupHbv|%5~sH z(*T^7nJSNvV$ql^72XSTHoey{!(L3Rcpa~FEi_Dd-(Y!GS9T90arb2tW-|A$Q2lt3 z#*>6P64XbT+Y z)ydB+bI#*t<`f?UehT1+)c;CAj-$8TxWi^L>dV0Q{r1Fi(#whuIuHbzOR#2qS#>{ z-NKRx)^@{QZx2@%Zfr?xHpFje2dIjqYfnh$u_F@~Z_;a!p#DNmfcIP>X=L*skvzc-70xu0zok*RRS*N`EGLB!-m?4BVp5(IXL zbM8wjvjraH6+eloir!788!RB8ZNw)W;gi&pwFpj2K`ApL+Y&>C@}emoIP{HbiThCJ zBxGicrNrt=b%6G<>lUN@L&SK?)zy&zkKD9B}D|EBX=N0jmxn+a?)oV2{5>q`i zRLG;)tMV1od-)&KL&ZNRucI$1JS#gMi1qjyC0cDx`8{Onws_A;dI~BQE5izdC-4|4 z)3auyJ6u6@cNi~E7zRa}8Uw@HF zM9lBZ_awK5`iAbF8c(Qu!E&ii zepyYBrG}4SF1X&4rQ?VLo=m%WaS2p+?H}_a&b$~3; zAXRz^*Dq97I01^IhL=6J1V8RDsC0`#%iVMmE+29wIggAx-t_5n1}z3!94+*HJ`i8{9)4- z8m&_~8VFQ@HD>Htb?mco$*=vqkmP)i|Emz!k)}%2`7O~Clob7*OMv@~-{brrqRbNB zm--@H5tJD4Ydrt`ACcLfzz8P)cTJABT(l#tC)7k?oXc z0NqZh)cxUaFu03E-LKEaUy1o&FHVVgpDab8+2fja8$yn?jtO|@-Vpp}!V~+O=l;m! z_Tfi^e0%e+DXjbm;P!1&c_Bf=eF7*omZg@DcdypbM@E(m5K;c9FUy!kJBoRYkiEaV zAFE@2V8(No9vTzeV#LP&kf<#$!8MZ~kuZ1D_19gD*hJ?a4~Sdj#ME7{u+_uxKbk91 zg`Mkd^qw4;o~_%O7VJNLlmbcr14u9IGL3-_QL42OW=Mj0rSZ>va|-3XXO;53MphYv zX7V}?K$N{*-U=mQ+0EDlA8O*T89iJxR8{GjUHe5YVuWIjo3qYQUuSHx`vjLbk#Fsf zXRF1(+S8?dK;b|=KS+1k1c#+IEZWimQAXdD9ee+y?NUZI1EpkO1;IercWDuF7?1;Oo!6n$}XOT=iEwC@0e~2PSvdE2IliQ)g2nQswg>i+J!6XYS=6&hof@% zx^%5mUQ9bTZPpf$$`&+n+I1fc^90sW$c`;Hdyd5Ot9X+*)Q?0glu3L(rNaw^$q)mRw%Rr z-LwJ^FyQ(=N|4cE z^AwL3_Q&m>2J}V@M#NF+eyKbf`_+;8;LMY@BA+Nq+3sZ7$#`8^4WDHhRUa`8B}n)` zK0yX@*YaUxB!&lET_ZnwDh{(mkPnv-VHYee3KtjJ zxOnY**lBR7r}2t@&LFGJ)bkCwq+Vk2zLTM-l>$Y1FLt}WmFy`diCIvq<|uPI>#Xs| z6~E&zG;(8+$U#MGPnz#;Fi;Bd@**I#_BkpufQ`9=9U%GMLU8^ zJ0{e;OIj`;?rtR~doWexAE;RFOI^*E?R2eQ*N7ZnVb|J_YRrx5^0C;2B-giu->xXb z)1lW+vWDf66z@_vU>;bw5|7Q52B=}Gi(2M|Ifb8**Cd7f1H`(u&$m;(JNI-|j=+ft z^Ib64+pDFnWwfvmAsjqaX;oB*U>^{=+D7q%K*eIEDjc8B2e)l#cdb*|?{>g`x#vN4 z2|by8p8XoYfe*a!q~d8NCO#V#*5)6Ax>J=tg{{h{W1ah^DTcWf->b8*j2nilEbZ#w ztpt9?{CUm{EYGc5SAgk8gMJUbvn2q9CX&rJZb*MotQ$jlavCiP;Sjk;RKjK9J?meS zh28J!D7p^Uxk(s1-@59@m+t4~X(+E;e7uv&Q+uWFNF#?IR~kQO%4Su((0i6slgOmh zuCyNSWUiSW?|eAx)R6k~JwCFeFQ7}G@>9BzF^l_BT&K8IlUXeT(E zs=o1xvWjw1cDFj2?eI>O(G*ClV4>C{IqFrA& zrZOh(MaM`(zh>vQ|Gmv{_5)ww?4)X`8dC{0FRogP7mBO{z{K<*=-yg7IPlF4udnMz*8{jx&NEvvmi~-#yzx^FSha$ zw{+G)LPp3vVJC)NNvW2o1-Sr+}Vep{OG~POx>0n}EJ!s_ZXczKmD=14!H(lf=w^65Ngs zh*R=itt2ph)3UC z81ZGj7R);qO@-N%t0X=_m&|Y!`WDeIC?8xIRlIR*o$d)Cw~=_&8GqA6qIC$Pe0m(r z!q8TS^EuHQ(5HKGAnIcjkX8s)?C9*!Z|NUiQHNPHA*uC$vEC3cn`zj{-18tgzfR0> zK{sMxNC&D&#>m!I>hafKz-3qjyC@F=M`pR6A>CZ@?r>v*wFz z@#VZqV<`KeI0&kz#!q?{KQ!S!MQu^fwz!rgp{1n#mckEPI;0n%g*F1OyDCC7mFa9W zj_~c&PH4?(aqVUl?{EwIg~B~tyLn)BYzWMJLPDJu<37V-$z>j9IC6ktv!GoNE(IAYu$nI0MNp#qaJt@!fswsIKo{~vlizOHj9# z*X*5u!S6j~I*Z&wri$SL{k0%ux}UycOuneVprMdzk6&k#7$2>&KQe-2G=J|O1Rap< z9voxa^|tX8LfxG8PFXZ8&hx6X{8cwsy1~W<+?+!bW`T^O$ui%up{Ec2NvmcxZv)P= zobg}d{=HJ05G2E0v8VGW*CZ~-;K&wZ!ynOOD(KRxlHKqoV_4)25p-eoC&gLnqk5UM zGP0}ZXUu95921KoamubV;S(S0!1i(FGK(EuGgchMzbfA(A#4P3VAp%HK@pPAkNbk9 zdoS_MV}g)KRE@fMQ-AC9{FJm}1E}PFakd%{iHwj-je>5Gx#?>OUWlAhTF`?RD?d~Z zZC>Et8GY`z2Ri0$+U%VdAF4CZLh>8ADASmuY>0sNM@I~*FaWO5Rj*lx=24x{&)Yhv z^YOwf+wZhIgFg4Z(3l_3>yvCN4Z7(r*1$JQ9`s(Q(@~Gr9g8f78%v<>8P-C2$0I#JcGkoy|9o7^s= zAcj;!M-sJdwsH67np8r)bJ3eiTfUic8%#Yh_6W>3uHkvuZzZ6kJTtZ_cE0!OxzqwK z^m}y}@0#~H(o{Zuw2T#zw3FuF)7>{zk`RT;m9}2LEoP(OYqYDOo+IkCp5DYsGQLu1 zRbS@3%k@TUZ+Jgz0@tSXB~^H>l;%4J(<4!<4n?5&V|}j~m4?;&<%T zK_V9gCHmT;7*lP)WI7cI^fMI9P8AwG)KVV>^bx&bXUD#!`*P_TTa4EfAcqIhvbTR6 z$W-e$6F_~G8U`-5X~cSHnBMXqb7Z2qDy52n`{IAZ{g9`9SBR|&H5if(78=TYTIE`G zcXp2iJ}z(7pjmsOfj`eauZ71`Fs9XPiNb{XMXes&qTP(Tjq1i+(_np-jrrA}Zs)_^ zK@4D|2$qIC01JYO3XqSTc}#Z&Uwm@*)Sy8N{K^O@zn7hyEt|9XAYGzo#xdIqlc~Yo zv}d9r;T45)=1YKy54l*&f()oxi~0r?l7cS}Iul&p@kl2;`~yS>g-twwF4``P^a^WeQk~P-)HZLZ->GCWjbusHiWud4 zjuw})2^;&uPG3*Gf8&&XnGntbRWP{k<~kF*!x`_3-t((827UiZF~mt3NOG z9y}*#X4}_huaTGc&M~XBEo;-*I8XoZrr`(yIFn-r z_IvwsFKxoKU;Y6e*?aKz0}Z;zi=Y8rS8a*tGx6jNTW)-Ltp_ONNb()S27~ei0B?NU zk{DS$+K5Y`_ZN#*X44G~7thmC88jNLXL}A&db;~pafv{ngi9tShShS){bKE00Ob7- z8_&cw!eyyJ^6ypSW5@ph$4}JwFMc97~QrP}ySBm8GAi{o=693_4 zC!}Mnc3g};q!zOKcN>)$I)Z3^p&4nm^T(s)GGGU~@I|eY`H)=~R9-i_NKJ_^y&Y{vk*J{ZMO|nK)aR zA^gz0n(J^SP;JJ=daKW&jF|>iA-G=kQh6H!MPiK0=ZJ@$5Ai1o__ z2XEMYo4DW7gZl3dWt22l;=fi(x#s@|*k*LHxIK%X`qQUa z()mtiQ4d4doALnL6*9HhoKzSS5MR+UUNeQDF}1l1b{Yck1+-~gF@e`%TJFT+^= zn@PTi;y@|Ujbyz6mwx~fE$H->tF|PL70wUnNRY`6BbcJ_GJcUrd^1NW9hom z3c5(=vNoC|&qeR?OfYCyYV~e?&H4N3yS*cdU!}S*^pG(c5tY&^h(E=u*EJ=|v>_A~ zV$RAd-pqPCizmK+k~y&;u*v;8;rR5P@|r9VcBETwyurK#9R+CWU}D~(z8d@9tU?W% zf2D)?XbY@oS{WcV!?vK)E%ZBC^KmW(_M30G*~{O5*v*YA9~12*b3uF>j#Kb&0& zaN_#-!jJ9`VhY`N;k=K1UkaCiqhb?$vaQ2)_O9B}7pS$|4!q&ffEagU| zB^;Hf-7E?=_EVURKZct= zOvl(&@Vp>uxhcA`5}kxd;asHi|AV%-3Tx}_x3$qC1&SAU4est9+@0c92oPL~ySqan zxEFUQ?(SY(iWDgh|DAWeYwf+hy}r|ZlAPtbl9@T5pNw%2M9H+5Fn10gE0zI{qnM!( zs)Nq+iHZ%Gmhm^;8;ws!-!-T*#1glvz~ENPv3eZP5**!DhXN!E*@#v|t5JnM`o>?j z#6syXAt}*|r!$UhM=;HCuyK_N8PZftAk3XB6fdq9LuU!AQE)hhyU}y-J1K7O;DIUb zV(Dcpha?au{T%y;klNlXjl(?>CM5{uvCmTR;Hu4z(4A5U~0zaJNA@58?Fn9ua4 zllb!e12cj2Ugb5O6Skp^kFnCpG9xgB&+Ob(9p}%`;HtN$qKLjHPdKK3TRg>|>0N$m z7t-qCe0&88MDQ6)W$_kcu#hJ{lI)risW+^}|2DW6rN8!6fhbt}eN@zyosJ{oM=^t~ z1}0QVQp^a%W0I*N{yE|pLDeSm#f67AYldiYs6Eg)cuc}1Tc@Z*hco#HTWm-%zQg7; z#zr?6#f_jCvERT)9gC>g2a`A31E|Ux5}V3TB=Xso>Y3QFmOQ zUe%1;@V%rTf-=e0RS^LGqx$5ACSjowEL+R3-OaZpznz!4*KZ!$GWCwCx0>u-qmjH$ zWwhnT8*L%Bg7zI#dA$03=+L6{q;|_q1(mAfBi;YkKsnsxIC2U1mdepf*sNmdBAc8;q7S^cbA1Kyx_IW+&PAeFd%~kRuR@EG?_ay)rfzzInW0Qg$ zXzfdCHDQ{3_N!5gt{(`xR_mvMkcrmpYD4=Qrr~k9F7a&xdO<2gdb1x<# zEnTv#^jKH^oI9UDFzg&d9uF#9Rx#AbY$tGf+Io3tZGH5;BQ+x<)xSNrCpjhA&mG@+ z*9y%FPyb&_-+xsi)Wh0?x>@bSXrQfIDrbVzgPb%{HWaB&9n2rRrOn!!D4kU(G{w0* zl$|)#wNcy(G^0xOZRdHW8R@QWP0$I(M~aK_xkVDU(BuJN!SX6IB-DDGCn-g>IPq=H zOn-b>z5h3$1pY&xUdh7nMp~H_(bf&*6Ndfmldx~h5x84py2~J#5&j<7&pg-JkBd*m zMnd)Bl(|q&u%nI}^fL=CFA|KvX(c_go?E~Vhn;BVF{Xjadqwu?B!ZT7{3 z&1dhg7ccIe9To(;9kf`w7nucv#zQ6=E>@e-=^vQhOa{+*6r~wr!-fBl5w*4vKKf>gZ zwyqsQ%3fkvmM4^yC(1GIa!X&mSX`2yCqs%LRC%7PG+o%9Y+L2g_ds_VNFOY}D;qIo zxI#zERGg-OXh=+btoHwNB(tlGzvU1-leM=(BLuGn!a12}H8_gsn+0PC9?`TT*P4Hb z+F$Egms6ci935~Z8p)4F-`**yzyG-t`*R&B*0Bj9Upu@IC>vI2(`ht?{8D7vjM5ps z2~kO|4jCtwOHOpkBjw|sAl4!ssQnARDPSQ^6l8ANi@RgWet*E(_x^rx^=_|a!a=s^ z(VYWW?WEm*RS&uLkcx0S9LthEV5Z8E1*gT<;Cz7}<&JG;BCu<@!m)4VXO2mR{4Q;v zdRn{x*`g6*shKmAY5gGE6Z3=eXVIp8q?qJjL3{xPnKdltiugG@+@jwIP9l@80gh}l z67Fke{|LHWDSH(BM?lBYivo@(Nerm4Ln$`eN;T74o+#lkTE199ISJ!~^RJ&2lBMt! zonChaRik$BWeXnHqQ*es4g&9x)-aNGweiY|&|`r?sA<9dI^h5{VjMTxms;t0WuUGnF1q?Lj(5BATm*H4@-a( zNghq=l16y=O(@dN{z0|wUrzF`3CZds zP-T8{Iz~d$Es$=HP=gEnkPl6x`RgTIk;IHjR>SO#XknjNs$P^~Cp}(=eQ?_dfI(%< z(i#6YR_+>wQ2M*n5r{PMC<})=z(R&fC7X6rx}1gm=i`FlO(VKrI}%YKYQg3)#~=b8 zIBOs%t4A+yJd3N`_Hl5eqi>MTe>znQC`glG9mbiB8UYQ7JSN-m>=4KtdTC0Py=ZjE z_03Oa^7NeKU5n4-{sQ?`rt}((Sqg<`^66uxa9u*~NZau-n7}$lvLlfO``U@>!Uc|^Wo_U0%^S70 z4D+GB^S1t1OW01~oS4!ttiYPN4jInClrUI4gR!PF;!aVSaTs@GNkHj;H3=PED_@kx zXUzVAu`30St3_eCIJjF~zn>nmBvqu1;`V1|S%<=iG2A@fk#g^U^?h5l_)M7YpP-SE z&O+8;?Zna=tLP_FU0iLXWk4^msSf~G>qz)KBVEY}&Z93V+e8*@spdH4Y|Kov`Q8Q; zh(^skf8Dt|x-^OL^nY7kXf2yPdLhwhEzBJsivH+zMup`v zk7@R$xK+I&sxi*_^tTj%QNGC<0u1sDxkFcarN~II{jxvO$u_Y04-CPl(<=OLuRr$J zdf)ZL{u=%oB&_YpO3NR3k|RH4kdAC?5o~yTU!7kg$}$R4VvI_+6^Fk`U`vo#P~mOu z-3sS$$+9{5%jj)B_nXf8X2WZ+b!am^b_E&~i%ey%IA7#NsW=Urz!so&L0=r%HtI=CStf!(+luzM#YW@?l#7x^?bLZ{au(ccb^ z+bA%ze6|vx=BE*QFSZFf6M<;mNRL*}(>*Frf_UyZI_d#$_^xi9dt4??errEb&=9D8 zcF6L{^fT0I7Hf)Mw7TZcPG}i=B!8mi#2r3uMbaPoQIcTmDL$`CF)>X~g0YDrfnTkm zFNd1jH=3AONs{BS>HqiY>Hh88Mz}oM*Ob)h&mX6D80TFk7A7>nA_81Ho1KZ`^;7i6 zMrByj(nI}KFtBLt)BFSLa*g8?o3|RNgKBu2^*Aw+if)yE^@{3D3#{#%Ecx^&C%+L07?et15n?bY;mh&5m7QV!!!IXx8F{Bb$I4y%p9W-l9i)Bo* z6*I4oCvUl=pmpUS4*`}s!!kxwP?$&B!>zm{HHkE=xFi#ct}t*I({{c(fDVjf7Ns}~IUOrulnD z;37WY6XC6@Q#0bn%S#Bras`hOV8o=VIVYE=pjj)L@H8#9)&sV&`4r?{6itCHp=#3A!h@J0+PuHLkX- z`O=Iwq;~7-m(Pw+)ZmW;&|~Av;ESL5Snw!u?M+E3ynD_apr5w1(=4dO&tru~d1XqW-c-IK-0SGMKXlV1_>=i9*(+xni>o<;262~qLy=ggpn?j9E)`;$q`5eC#3;v-evTXSyFlU&t zxKfd!p0qR+pLN57vC-IgOtB>)za5|B#m_wyd3lk(W^U(3fr~U+4)-Rvr1CUd|0hGevKyXkMtDNm_e8_hM)fGPEDd9G zc{dfRAxW2fXD8-@c-)R|2XlTR*}B#^Z#*}d4-5g0sjsipXj1~ zyJQ@>;jS^HqH4uy?a&s&Yqj8`NF=NG>Q~iB>Pv1wq|`EI5W0sYAmfZK^w;K zh@RN}I7j{o#9wzuW!L4~h55QPH%AI7q3*^ZCXGnU$)Kh1uq)tL4J!f_s<2F>^|3R> zUCq8HPXtX;+d_+{>wr6&nE)wkbdtx?{e!Q_vpKMrgM3+W>au#;>UHyPdm7c$5T7L$ zqIhBR+tay*$hJ*h(PFXn+Ct@N!R1oYk70R<-2Sn?L5LNui->-bHMj~PsBX2&cq^ zLGgY1&3g4O`xN>>I?)1luL;tXa|S9aLGZAKxq-~H@Gt8_k1fbp=^^4cSNp$@xR=h9 z|8sa-i8SzVw!>v=;xNw!A{~-_Aqy$Zb)4|)8Lb%uVe?HjXEE`F;5j~U?Qk5lXcri+ z^LSMRqeigK(YJKOhR~ytND-8IxESwS4o({suUXkHPo2I|2Uf0#yxa5kt)^$49{*g9b z@iYM&fOp!#TLuxcI|Rp1I{Ko@+m-J#dYX(m<|#m(C`~dVz2>FX?Oi5Erww!wvwJf* zG&jgYtwLTL?mf7)*dyO`^Y~{7j~6{ zTpB3L;xI~ZYUsW^`+EvQl0_{Skh+Sc-8;Y?5F_BuE zyoiQOaky@2=iSb;U2(m=m4t^L9@PiZ&?+mTai2eBzZf3g{5TODLkv zopBNtIk%SrlFMekOs8x19Q3M+`t>-z4xK`?P4>TYKC_+pJ@{Pqm|f)^Pcym;zL6z= zW39~9=XNCiGAH=Szd>^N zEpbG{6ES)U_qnR=dD)BWz(AqfcBxj`)W~`-Gin(7CreNtRf4%-Y=#0P%wxZx!q}Q5 z=@#+!<+ffbyx2SESo5fElFar;=mS&)adMc_?aqDb=7W6mURCb2c->|^;KO&~G&Aio zA@F_fldzA}N!@hZz2x=D&i9ups3_9+!Tl<4tk*E5VpcJyRF{KaWe3l3kn@H%x!$xo zf9fKJRM^j5+(dC@d}=`V4;VEiw^AV5EBY&eyl14)NwEfLRtvlq@sDOW?0(|-3>K|A zfuAz23*qUklNqdqb11XC5ctJeZh|zfp=6@&*fTJz<;kNWDVHx&{MUw$m=}tIe>;yc z@)Z@~U8BidQryamHLG=^nibqNc+JMh*}sVO3gPnG%VqVri`u{T9mOE4de2Nl^B06x zG*vR>3KH1PI>O&niNiMeiu}W0Uyn%|v)(XoM~X$GUr%-{*~sFa{(+2<0Z?-Q?eCS{LMgc0|@*~DQ#%eRlZHg;MlDvoYD!Lk|e-T*`86;@b ztkHE0i#MY}q}R!5)Da;~FTNi6NmG2*Tx$kj-u|AeVzfP_XqVY->YPlVc9dDFnfdqf zRj!~n3p4sOgZHrVJR}JkX-Bt2yClDqI~NFMNx?rji^;!5`dTfh{8<6FAMQw0CIteR zd-)8bM%{LNv26bsDxXP8iJA3akPTZeKSzBdPR6ro9({UlFVC#P8^}ATg z@eI;GFuCM{?-_o-pIQEa*|lGBz9fqaxX3UQ5J^fa7A zirHEv=6Jw7m=m{wI$>=b&^kBZVg zR`+Bzs)gcE+~X~BkN*85>h-iU8Baap1xIk3P)!V``iDvI0L}E7-JPS22^m6F{G}cc zG0ud8kpz`ew5_!H+{(}K^N_%&#bo~Vmt>#?y|Z9z#DtSnD^b!F#)NV6j2WL)7$6;c zBmuD%%anX_AqkRyjoL z*8^6o{>e$FpPTFW`5kFk(BbJrheJ^)sp0}F2O}W3s@}QEdcu{@IVMy`2AW5dJeK$? zWsw&)Ix5|eS3O1X_vQ%yJxU{wI(r^OTSF%)86a!4rAcQy|q)0Z~UHq z{R49wa{OifDlb5$_m#!ql5}9M?tNk9O6)N|Ve6|v5zK)h9n09#XOSa}6+O3X)?~&| z#7VyxdTH&VdsdhNIhUD@>ZLv&Wj8H~D49@`fORs~$;Qbnx}~qOVq;v73f@+8Hy>#k zai~8z*?%logJ;#T0!5b7aVk({rcur8pCculY{=K@WBT_?>i7jCw~kX)cDZ^oo0Ahm$Ln3!$-(T@|9kEc6RBi?tz-1YA+cR7;_n|T`<-;0iJ-Gz+~ zuCKFNzuh|B%H6i#LRAcM`I$c7PX2)@;D>x9@T<8sV)MQ7x4PQFr=I`q=6||y_B~vq zUe)+tazsZ;Qfjipf6EbT^}8*v>!bfAM+8Fb=}bA4V4zM#m}{g2*CmX&&PdcfbiH9i z7FUh77j_o>CDC9ddZ)yx1A;L^?&vJ}r*qVVRCZE1P+qyesrTpBi$V)DF461YI25~4 zeO6z-8vIiW6lF#nEMjz3hmUlbfdz6U;6wn7yQaQ~I)0Zf^R08els>*9RcZGz=5g&g5kY-we>(4xH-1C9OPK!$M%MS%E%DZ- zc&neS=jN{Yw|6(X`$8#^$0LC+eL(U<_DdQQvF3Y#sISnK?$HCc4AdCO|GMl$mB8kA zYm?I$oVqL_Kw98!UZxMvUtCc zZX_x5YO<(Pdjp%t(eTb#3_G#B_5{H}JPqf{sU=*g2<=#9F8D;})WMEh*HtdMmT&aR zgf>xlNDFBS1WOyEl44q+6y-(qq+1rL5pGMPi5(vWkNWK9bo~QyTL`-cre>BTRwx4} z(0SrSc~(p54lz|{3MgPGr>Q7tXAC1ZhgsxYZ=U=sP<*>NgW%6}mk&!igP7J~h2P(>?7R_jm%UOpx&VuX&iZl<9 zMxHK^0DZF2!S@F+%w*m?m3VTq6^F++P=4K8ve_Ox&&U~?sS}8=m+jR|PLAF%eNZT!xZ!P-=iLUiAlcC{x)?h;iVnsZ7F`G_j6~qy3kr@1=trN4ij8XjKWvG>Q9ubv z7M#>8(sFf#@g!g6!d{^S3EQr~(@2=>G zEp7@l}K-0tjV`$Sp zRUQy0Yv2CaIHjSFp%G_7zB3E4GaCa}yMYDwv$V7k@0phe)CZ7bB`EWi{o*KA$tLN# zCVl1;lC5c)+0BXMhMp{tIl6YQ*y5!zxyL~Yl0B?n837n2=`v+j4X(o^%i09~tk*#c~rJinwk>Eq0lo1e&jLvab-&a&J3b**eOP_sc zuSCY@NpAqry}sKLs4j_v(BU+q(5HkV&l;h!qOQ%QES6_V4KxM}={ku8^T+cxJ~jxx z0+fA2qfzn*npBWHPJYrVRiwI$hLg>F{!paWN7Z)fMlPnBszSGr`b@(A-!2@K)%7nF z%@GyLLLui>nT{wzz_8dt@^WyqhJy7F9@$tILOQdN zkg_h=oa6|X2y!@&1ufj|dww?!-fcVj!G8mFL*@AI6`aPK+`Ke=aKx7Rl`rQNCPe$_)1&x5q1~bDG!S zk9$>EVq>TtZ-jBFsWd&u4q2rgg(D4{mHuGxy8$hYv>lLQYP{y+cw)}+ZezX}NXKNG zm8J!37X@RMaz+#Cg(*>K&05!ww0t2x683Z(YCXa8(Zv1zW^Ynvk$^)WL8}{;qkz`| zw55|VrAU%|lIDn}5Z(K~O>p0!VTXn0d6hFiaAF9@H@iYf1}N2nvxDbVIHWUFGa*tUUbByX%af{|xp znU!B=$=@F${(-qH`s)x7d3_FST$%jtR;|YPqdbA}koo>8)s$lO_C&a#C<7&R1ar24 z%Iq9=pZKDYklVT;?c|`Ofp&;D)R8(F99pFne(0UfAEDNN2!>-;*BXZI1?*=_u_;Wb z&TxL*2$Z>`mpkWGo1s<@$ViMLd(PBnd3{Wv}8wyy?#Ib5%-yGWi%ypAHPB7?!38#^mCf0+wWri zoJlEub{N=PnH~u#q+|;eX>sei?HQ7C!&GnIf%#u(8+842^NF^Ar=EngS!LvvU<+ir zwdz}I5$=h~`IvIS*m%}We8#T5k9T^R6U$d>p9;3x>`_q_#s;eo3PSFIk;nya(Uq-w zJsQAV2>h@A8_h(Xc>ll0Q5&R&^eFfjJzGWvYt#e<0A7*|)`E(S_i}0eCzFixu|6f< zgHTioDw2bXh2RB_wNX<}qgqQ17-2sFKrSjUZ9>c$%zaK7DxSBpHEvieL5jrcE@ z>~nB3Ca!Qi-z)_2NT7*&E=-a;b&m zp%PR50`v}R`7xQ5bO@+_w`7fCfZ_sf657Zc?q!SZvaS#SA&VJaJ^5EdMI zNgh~q7*cRu7Ua++aLX5n@aXu4M+>+uM+IYjY8OuKjd9!RlloB^=)#lOohQI=sfqFD z(c?PDDpBI0r!MFE-H^%-Qf3>htgp9GGOl)HVtTYEhGBE=Xg@^AS)Y4Lo)6$X*=J8w z?yT@Qt53G9mp&1;JoxTr`<7kVYLssP$nEZOJmEo?^ICF&S!ohyFZqQaE;?NJ^vU`k znCRN=00b8jM@wYaR3wM^P-r|^c&}i}7sG@Nt+!cdv?cT{t7nHOQ(EqKPGPy()wY5(xgch*);-LidLW2l7vPc4goF<2FOGTci^v&Rinov-%)mNVrlW$ zW|U94lOle%U4x3)WAa>=ZPR_6o&MQKjL-2YO5rP#nabtj&q$1qQ5-`me)e3JB~-9apL+@|Wq-&~uagTlfA^WO zNd;oV5hgO5W9qnFxFxD){v;VjmxgbRbx-1BQL5xBw#-tik=18-V0a02Bb}X7RKZTM z7BZ)?zCq=7xNt+nu3j^5{Q}vQkpD%l-ZAenVp!8l;(|mf;?uO{P_A96oqikt+0NMV z?_=q1e|)i){YedRi+x2Xrrz;^G8YLa*>+=|F%YCRbSoXwB2Kz5fSahl=)=XKx~d?T zCr#4iH?*L8o=f8bC}++ND&q}$a>%%Yj)}T=|ABcae>3nQy{RZYdTI7+){LMWgqav^ zY+b4Y6vy{)!OQE)k%}2;x};v;;*GCe{3bCzo!rcP<7{=esqP2`wI@8!Qm()wzpT5^ zBXSQ-S`IRYtY+D+R1hL1-MCCEO}%`!{oWK^T!E1tMmEci{HZ;9oPe5s>zYnHjPL5B zG3pSlQ91$|IM4PWdZb*k-MW%Z!g`s2~{()F1t)lL4m2`#T-FjO zcQP_C@J&kaMXOb^y)u?_P3=}Gt0>Jyh%l7#-n4Q>f1uGBB3V?WUE+@T;SesZUtSh~ znpufNN4Im&Z5z_%(*CG-29nH(P^e5Sp1?DBGS;#QdSPe$bjo6eoz;6bBqc3qq(xQ^ z`)zo#C%b`un=4?nz4Mx6>`O$K-SCTlzRf={G3#%Npd5>7ma=^F&8*wbQz@Up`Lxp^ z5wt%1*btOWYiz1;IR%3Ba9znyf~H^(S9^TpF2}!VN#O#K!)@!6OT@-OzTBhZ>r`3+ z0LXky5(hTQPgqV^7*yCY+cWAxve{er4D$97N8IWv*G8Q)wmnWdLlJ>9&gKQnidJk^ zeYRPjwZ{EW%dcdOp()z_718jQr-Y(G+()gxBlyot_i5&qC^4fS9YyUvqJ!)Tm^E=K zm-gLsRdXRELr|4cW2{YFEA|!9kOoI=1@mOpV^0aaj*V?6Jw_9%Zj5K%!NFuxup_Ck z1U46!brfFz8(lFT(iwLCyg+7dXzdY^h0&C?%=70meJ;HJYRdyv`^C*>#&jp2u1 zg@Ws(YIsP0CBGm(@3tJ{D$wc65)qL>$0TFTr$VHFah@;}7VR{*sVhAe28NHmPlMOE4~h!AfE9*b&=NpO)k&+B^@kd}LeOfjg6j0|AeHUown^ z+pG*{R4fG@`GjR}f!Ny>v|`?pJKJ#BJbjrf0;BLjhaDBw(ZyJd zk*4ezA;aSL!523jUH3aTLs5p%nBe|q!5Qaa1eVm=b&~3*yiw)r+m7i<0Xqv1G)6P{?6&qFKh{he^oWP7RwQz<= z=Nk0oTL)X2+R?f3$)SBC#*ZFcT=WZYC4P}Lh3v_F`$*+o)HS|7>(^X%&C`?m=l6_! zjXEDm!$hJ;BDp0WZ~U$ z#0bSK`98;fx7li5S=(DLdN?b!nY+aem$F~)9@TXNds!>}2OgU6eEpcncB1sLE`NmXh}F8p>wU zg#$Wpe!Z)!ieY2M8UFV8orq5Sg{OmKs0Rwm=|(^h6-~4Dr^U z(NANJzYRn3t$iX&W5Qug+I09q-=jQ|ej-%1cx-8DF$5wLK;VB~l5L^tK6cu;C_7bT z{kBR?NS?f>FyX>wXB-Ta&|x1Ft4|!#!bpj{_emVo@*)Y!mz;85BcmVt8;h~dn_y}{ zrXhzoTU{v!WgM+Z&*}B+NAVn1Go~S^fgADEEVVaSHH73{0vaRMUXYS00U)h-T^h;c z)P>OrlGuwR2q8yf{wn`51cZTlUFuEjcZn*q?Fp`POK9wzEgv3 zy@F=1HzNU^BBp{#nR* zjAKI}%ib7Ax3^BettDT*TFIOuFei8dIpl7*U!7sS9Mi1pjBhf3kq)H5)}Wl;W0ch)%8W@35K6c zI`cf^h@c|DRxm$o>?}=ZN`3}K^%^3YTgv21ob=`00`ZYTyuDdOC z7NTc@Cm2n-zHWUU!|oVrt{z*J%KliVE1Qv$i`qhl5^#q{GM`a>Vv;2%%B+jAV?VO$ z>YVwHvSsk@K&iTV{`(mdVwk z;M?j&%Sp(s4GzA99)m0+m(?PtCGvhQCbQ1UTd^TY!M@qJlUl_xICpVVN4B!HevF?Q zaK;82z(TYrV=d3(RX!pR(u-mox7y@(fP~A^5hcHa$68(&NSW;WJrpi=aj@va-IDRo zznUjYM-?C5F>Z0G^T|;9oNw`{?JoZgQoFBy(Xya8+TC8-2ZzCvTJn2pY5~baQ^477 z3a9IY$aTfH_aUrvwvlBl86YjAW(HpM^-@lZ`gFm2r|z7MQ<4W zped;^!TQN!AkXfHd1u)V8c|TXcIb>DGs@L}N~uDy9KYl6$oMXswANS43VP(E4E_9p zP5KtSCa^gQDM^Tkm)Xp@EzXKZpH-9BB$kXr&0`8%2L*VwJmMY`_h!~CI;=&1OLkHU zsVZKIpVwYhXR7Q_kexf<)4_J)=v2k4N&#sbEt`o z1g)*U9MNRWlD^<(QCOnoX-X@=Qx~-EDFtse)G7F%YM$7#o^R6@aF;3GLUpalOVQVp zc^;h>0l*rq$|ScWgF13s&U$ucbuMlvWs1s%%< zE21i@t~b;Vok^X&1>Ro?JGHdctErDwavFSoPUPX@c}z|rN5fSroY$>!f9us;rOcc;_pcvR2-|W2CHu!L_~X=ku2rjjDw_P)6$LD9NnUq5MHESbJ(V6 zrEi}zg1QMUrmAqH8bkag(_q;cRksn8_c7UGVz=7Yh+^!C&lfzs-91&U*lU7{;jRp6 zf@6s%YqttFF%enKbEifHF@oVsi++fe)>H*#lU~I|eG~o}@VR48XkDU57~HbQRnwnk zG?3%oH&gh_(P4qax#|UBDjz$~Qk%clEhEc;(}OU$7VGfRZiEVB1-qlhvDdAWC~}a% z;J5I08kT9j-bs5Sfn;mG@8)XPs9RL@AT+@uLhbb7uL0f@aZs2H%oyg`2ebQdn?9As z##|Rk8@GU#71|s_lAi|>Swh}~;_4}BG?vt4<;AXgKnrlR{ipz!^RX?D&qDd`BTNTaUM%!q0Nf!j??D(}Tt3@xfOY8J!n5FVpKTm^A~ zrp_Q|1_E)2;0NK2c$9nhIS2iZRFh?7v$``LZe~nL$nTH&ZR-c>6_qSh=Gkd-3Au1K z{B$bPn%b_#$?2rAG+pkU&c3-J)m0pbB&X7e&>jMNbAIsbRxr^uw=BgQ*Y?C3z@S-9 zx;r*WV~=1mV0xdcwX@D4O~zHPSAX+0+s7bNfUL*uz@Xpoq&XCEbF4TUZ@{Zb+f7Ht z1Eu)7;j|4fMWWe~a6Tpz&TMM3kNLw=DWFw5-F!>}C|sCuy~R5|)*zlAP9;GH%I_+G zR`Z13uNZjoeZ-|B;STe_SzBP0QhO{2=iErBXf3TndtPa|G8Ym@((JshfQ_1mWW{6+ zO0m5Si-i#~apOJWAnaZEZqi6SxrNg-4sfv`gs0sc$COA@Zr6K2rGhGADPE)1|!A!pFmSJ zR6lXtE6&KJW9aw4@aaNHz&zz#7{inM^Tc(zZ*o;#Pt;y_eDU8 zb?n`-6T)ND`|Y5{E2zRV5MQJq_9^j(S2@*&ZknG_Rv%lRW;&H0n*Z}RB~}L0=U!1~ z6!iinj?kBfeDVHIYAZVZRkyg*_10<9YwNRZWwFEx3a1kBm*U0+9 z`F)i(BY>lB(DJ3&Lo5_}hFS?5bJ55XD?SNwNzyC>77Y9>SZFM<#YYfmW_@xZtUzwP zfbn3;imAPo_`#SLiv4=*_euV9>E6aqM}Oo%3yj!|=;c9yzS2TM{Qnx5>X!NnJp|aQ za2D89h+iad%yh$XRMpY^en=FmbKtdcR8qWIy>O~fj%f2JX{o4hvyRCa8hwpl{btEP zD7`+Z=^JtJRy)-8cjITMF>JaX?c=e`9m5qpt{M9TZy{~EiW8dSiYzM0omhwYc zgpWyYHT|oIM{UUA_u@e0)UXxJaz|9P3--VdUDVdow&2JMNz5;#|c7A-|mjvBwC!afM8EMZiHc~VK%}I*9Wx221w6@ z?y23V?Z}guNM*Bq>f%sD!6J|{9*+T=4zCgUCJ2{9u4ocY@QhPHmEusje1qLucI38+ zq*;69;kBFAQ4b5|dy!sMy^aBiCf*hll$_!k5=Hk*M`b%RTrf~NzQ?Ofs(c35i7+%> zN&JLYnP4?{4EvBy-ya1_T1@N0YX3Z3Fr;EB)hHB03rYFf;JQQPiibKEtR~GV(@rXA z?DM|Ol&m*|>0q9Uccv(iN@bTN0gq8w=#Is;rmtIjaB#5Io|u%B2yPs?`LxY!!$#Ka zbug^1ou)RdE`!US361p(m6rA@Ty@z?(CFSuP+RtY$Qf95TQg&W`k}($bVA3B;&9R_m{+i=7UmriYb(i%NfqN zDd0k3o+yEnsqzOS&&gwBI#yXr8N$Lmr_Us&Bh^VK1@~4GC$kw1tiXe$Eel`g|H0l~_NmuexB>G-X;0Elf2{Wf=i^ zTh!L3vURqU&r{fcm`k=aIjXq#BIk;3gQe5azRFf@vmkQ|4yn_z?Q}}eBPweKOVaRD zw+c)_oRC~HSjOcJWC3Q+HOcMSt3Ov+D(j6qIKY?3E`CCseA+nG+Cq~#U}O(M@fC@F zbB$C;=}3LWGzAQt>j$qfX?PN;Vc`;(#c#i09@@5@7IcJOhFyljcP#yzN}`tpSS-J% zI8!i0Cz7bFN4+gl9e|V=eA;s)s zFLgZQ?8u6RX|GIPNI%x)Dm77G4D%XCz!2^$h!R3FR?M?@-F|E}-QgcQLQB5o;PKX) zu@|sPS#Im%xriQijLcBfq7|AKq^Tc;F-CU9g^_#peo#J({}piV8P~k$Sa!*3BY3#U zm<=#x(syaGuF=;TsxW6fTdJH8`8xVnk5`pUFvZE}vnA*k)Q_BH3Svw=lCxf%wB23T z<9WBPlM~ht%p-WX`0e%W=~~O;5bG3nPCZk;Q=xwTfHN+#j!`9udmsGw2q=PzP`a?A zw3e(?MzFt0(eP#*M<^!;3)S+E%l4!Hlu~C*UlO-*mYspYeY!0|T{H5!t))G0Agv1T6{{OxZ|uB%k-aUUq9-eB#BsgDu+j?K z$w}$@;$s1vX%ulvVicz2f00mbkfBx_~NPtzj+|Jzp*so7cjWuZ7Uu)GV(2TY35@{``vQ+)^m*bhtKK*>R_T(OK$ECFtIgi7xn%+3o_NF%V`WcKC zFbRvIG@F*g!{%L`{HVld@Ai%6`g+lL@`+3*@ zc{iS|wX??h02${PS8~nFbI#*9erOa9cN(~M{-J?D2kwWaTN?47;pIJ3B}B9e&XP{C z-oKt`B*Znl1v_8!TsR1xR?SV9h33HB+bj*#9-{Be7a9}tvHn51izN-FxsK0xn`@h8 z|6y%N*ycy6$1-JRkZ$=zR}wlYDo1fKl7SeWE)UmVxgr0V{*li+R;vt{s7 zBc4`7kW7Z&u(Jg>TrXzSmxUEPewof6RbG|Lr_ACeNGgv9+=`geZ3%+48l{>{*|sVB z6~xVP!a2rAM+M@Ly1I$tQHdPeMesUOONAS;QR%%VF#ox@^Kct>#vt5K|QA;q}$bRm+5c_8j3LFtm5#HbkNLRerG?sD@1PFJ7L>(8lXqPJZ#rd z*?~wiCri-qvizSbO%;oS}`b}EK z;fsRei-Jk=S-v|Bso$kB3ja?;OrOF#{5bCbch~e{!NZbpR6KNocn^M=yKnxwEz75g}xF+CLAe) zFyA_*^%003eVYQH!Q6RX{0GH2j_3~9egs$eyNQj1Zi^Y+q}g5cyZ=Qx&eKrj+Ygw` zm~2neuSpq0-AE|q51cGVeiBT*7HEo5P6_St&}MdeIiNsp1?WEl9V;|Mvxq%}uK z$Tf3^Mem5_#=V7aF1=o6@08}n=@9QZ&TN{8e_>_Q7ciMPGpcxJ#73;`*MdGjKjOEk zzW=@1|L@s%%dbn*F*UnxG;#{P6j0mwi~~&fmDzoxvq_ovJOW)dCQNpJ_hWvv5}A~b z1*|8zTC)rfGnEls4{wY8{Sx5vr+)1BE*Os7 z4@$Q_ukKPXDYt1&bxqTtx!@FrdmH3>b|m0oZ$U`Wl!+^f>hA8AJ}wtKhbftANrOnJ z+Cn|XBzA!3z8hi;rb}++!>Q_T+~b?b0w!k=0oZ#?UK~V@;D<%3d1pDJQXcbI&`Va; zh0UsaS1>cq$owq~e)cDaZdcD_g-u?*|5xAStW8380_9w6aXzmfR|TnmbNMn*U+$}) zpp%I81=fA#r8}|um=(o)^Tr+Fv6E?;DmA-Gy&UhB9iP= zr@G=XC6ZFNxV4MolhrDng2OO!_8_%_8oV^9@xp#GJ3YBipN@!1p9NjJ?xDXNhb~h)YSXiFn(Y^b z>nY;}z(xq9(3(2q+jqsD{;tv*&5K&NcK2+X8MaMjd6+md?&k7l`@iq2)b3Nmpg(+?H;@-Jh2wfv2^=EpSG4= zzj5a!k?P1P%>Pu_piio)Rqk9km-+Ya!y8PUcVaq8WX5{H0n!(lFTe#G0(k;>zJL1^ zBg)*OQFP8BhE(xMcHN=Q4M`4VM&SP0jA%Vsi+C(M`gTnmqk8(%bqpditiQ}zt3Q6F z{{BxV^=&P4aT@bFsE$8*S0eEfK>%O;yC#ASGMoDTg>&eNx*=`LPB}?v^~MRss>X!$ z$W?7!f6TDvhCb-m*+&(fn35JY3BguJWWXx5X1NVU9S{Ci zO-vw6*WOi|g~B*ggwZ-#Plg3msB%R*aml~-ld!7F)SUGuOrggum(x?7sipeqgWv<^ ziPjL)XD0QDK^gFh8d99g1r=Ejt0Y=q;_7|+Bi^o(Q0TgDH=%C#5^dY%C7{a;cV18` zCxiOv$y2W}7w2W3-6dc!zJr9j+hI20yV5#WXX1j}eR{%rO%fZ zILp=DV@(iAWfo;wa8sRnOtQ{Q23nj$%kD*-Lz$2=Q*3A`l^;fMWKksbJ>1jQwybdi z7e=1(Mxj{cOvV9946aTaJM}T;V5B;4+!-0*EdN&3iqnhM@Tld$Nj{%KR9ueRsHHb7 zq%R{c(=s9|*Zsb7Ii~n>nB4fJ)9G%4*mp3r^SM!L_X`q4wNdw4vQ_G#SZQKFWc!P? z%dKwzKG8{*E-OrxorIJWErfDx4Ud4oVFtc44FYh_<+~D1+rr0{K%VoaBZH$F!wnwC z%g3eU@Yez(ecsKhjvjZ{5k$o4JonGibN`@FwEnQ(UYIw{G1-x5pmj~R_S=GQJ9*VC zg>^4(NeGZ~+B}L3?z?_F;lSGpx{6^(MGeG$$%9i2 z^$xyJvTK6eopkK@sE(Q^uAH?1^f~!VU$OJ-&2`DlSt07!9WJ`F%lPG2H)sCZ@Q~!G zT+buL?>>|JuBMYIr9iRM_!}m6!E&b#7Hlriz1$y}BdbGB&>;xnYu%*l@JdXl()N*a zTw`qa_{^D3Pjh-|k}YkM`TmMv;h1-MJ`?ghPd(ok>Gf`BH~5`^gV0=h&)W;!^=Ti= zBG<&6IIjMdJ@|;S7|+tWiOYd5HfF;yyVZ+^1|=$+2Jh~bIVnhNUjt&rk#z2N>%Iv&g-C&7C6!+#*-;&9z4^ zbhGNs4>LIOS|QD_HS)JXPnU_LIw@++jasVp!vbJY7h zX>?gt=(9>bvklNeh^Q8th9=GK0dsiBarEErsv~0J{nN$bp*ZzFX1;s>sVh%3$XM)+ z_J^XKCz_{6=WXqWqIvQDPe9}5tAl9znSW4jkp4Nu4&#T4&LWsdj!pmlDn=_}vT#GG zR;|}`^fB6ZvT2+{AC--OW#Gv`(6%V2IQ2@pq|4d&KH(}5zRCJA?e%K)cTJ1id9V*v z!gaHXvYsfvgbSTW**ltFKhHze(Y~)w8w`lXw1A^PTlmTun>kC;Qpyc>G|r%&Z2Po) zy_YThH-fA&$vzgYIkOmo1VEv>EXpACfxSHs8jVR`aZc7LX-!>T@sGX%8@i3ZBp!Rb zBcv~-rAz7z^iu?RX{4gS#i2WiI}6&NM?kHeeZ(c@J06=(GnVk2+RK?wpLAN=LV_6B zoZ1xYC&wynRM`|_^Ahx~#w$1~6})nZc!9%4Th|IxWCZnCcs<#Fw%>ig=Y(bg?w;0@ zcT_liFKaZpvgMI6r(C*_MfEjtrS}{eX1{pFP@6{H zN6%8PfUweK)g|5^>%>e9KNbRvXjWOYMz~m=J<(odACtSFw44%QfLsx{uVX1GyrlGL zA$&?g8ou#(Mm4rux|84Y@8>Fm-fzmjrguonL9x)W&;F zRQO(xzZu8PlMcKXEN0UM83-g5O*8##mpc9L+=pPA!$-3FgufUppT8g%*{lMJ$a$$E zdhv@&EG+*B@v?K3pj8GoT{%%u-e<=k=};`qj4&lG)nGHIeO}C`Gb$Vmd1{bXa$X3| z@<7iqHh|S}Z2e_3wpYnE!+y2lWLc6#OAB5UnsrsS+VGTe4LR*aX;_i0< z{6{!8JZj24Z7^3D+P&4Xg43X0Uu_-~9Q@ITEXm;TL{^K}%@B{ON9+TqbMF)Me@+*0z39Jp6v%OtNn66Il{}gxGr;>^<<{9q(nS@=S(?Dd@^K@s>Sh|lM$yg)ivUJFrdnA6`~Gh z&gFI>v+N@Cr2S9qX(hc+2E&?>IBIStuS%~NWNuo_zaDZ$e~&2aEkmEdN-15C1#>I} z$=al4IRM3>o2XzfmQsaId!W3)4pEdv!5kU%4PMQ-0b4b(fX36K0q@*Q&^(zsB@7@2 z2enDt9UL4surU<{O0otiWt1cxqEChh6~#zc+pk_n$t<13RFx8sRqWu5P+)S5)?`w z!qphs35h%*(iO|8Jsj)Gb!}qQb9EN5;MG3qEQrRGX zTd%q7G|r<}5y)CN?AFY;FXgMzG`5oc){AqtXVmxb`m8jo^yF*|ApGxFTh|oHK&KyD zUUdK5Y5osN*e-XnnVNC&k3-w>j#0@#r5v>7(uhTB&3Mv0-c)%&IpRPdPL~)ai>Zx{ z_lsuB&G_-o@MCVR&53Hm6o@B%LNSg-UH#=4TC-om7AdY3=gbOYKe{Ytdz6^;L$+X@ zM~AXpTaEIj;9rGyyKe3e@6&z>)|^c{g5Bx6)Z(=Admw3w%jj{4GY&n}WpQ{!HRUHi zn!FK_79yoCvU8XGn_6-_QW}!-ZS-R2n;A1$Y~}=`oD2?K2nsEq^Y||_uxyK@W|c&f zMNOJ$)e(S5fm3lcE6>CalY{bz9~DN3ioA<5fBRWNv_Bmh&)v(fvB2#GXp^d|UE=KD}IyHxyT5Bbz5(-oMyy$m65f+pMJu2Z~I7!F>@uOH-z5_8DI;wZ|B;!x1gP`{OY zz&ZTfJN$clJE!ceh+&gL$7XY7JO%{xlV!`Sem_rHyJt*&lq2;RFlkGX$BH9~71c#R zA7Mwg3(onaaT)shIC9D(zn$RmNP0kBf*wv=`kxO2l}J*-GvxvM#nmBOJz5m0^4O`LnYc(o^dP4X<28Z^{WcP zNCN%b+A67&)$SReoTx;cRZZ$;=7De}QC@aol08{x`EeE8^odmU#-&!Mg@l>X&MXd9#Ep;FwxjEudxy75N!woQ=Vfgi8EN{-MqEx!)z! zoAi?trk;l)$PdOVH;vB@6$u=nZcJmdEm=veRydBH?Qb0&G@z<=a|o&_@j*6=w{Ru z%-NhEpEWZ-U!APJpF&)k8cm$9HhKeNw@TB(%MWZ}28^W0I2m+qF{!*L$hJqjmWIhi#*}pP z!ylgfsW;DRCYyLsz91zFnc%wxM5ph?WCb#m#D`QO$(ks+b-tFI)4rHhp_BZz7001;?58M)h`>At zERpy&)owXK22Taq6gtn22lF%-jWZuFTk_JZJ7n^0nW4a)GTRJ6t>wiV6Ow7Zhu2>4#)i!@-H0j*SoimVkg7l zmwR8k<9Q#p10=V%@himC-dM7G_>e$4|E5Q};nsMzxytDwUzhgy8r@ZL<%D*X*}dNZ zq&5~soNGt!ENM~ET(D`ol@~C@w_k`|n2Hua%<0IpN10-gJwK7+tOYtQ8VUo0WXQGl z+TlHzMO#8dtu_e5;f!XGE#Ka0n?YWT{;NH0779-ltT5L#uKXXAq&N&E>)Qs>n~WNl zR78Vc9NsRzd@Hze-S!JCtU*qlfu~?}f*v za&z95mj9UkSzHb(7aa$8Yxy41>4(Gh8|P7II*5BYsOI)R!2fb~2RsoCd$tygKv%!( zxjdsc&cYo&KW*5nu9>hQv z4VnM-@WPk->9_FbQ%S3OLEbY7LZGDo(=e&80MzYIkGU<|=Ux-`xy4h!Y~zy7*W*}B zHwu&L@@kgv%^9b8n0Q+Z9w4$WCX6o#_xVxdkj8&3l#sI#1v~gsd-@HG{0YWEuo&uCBL5%z);)!|DF}xDZ|ZVMzqqw!2lP7v8%!wMBEf+*f381B0(pwk53|DN{^}XxjPt%<>5kVE)?;0WDquM5Q)sC*)Z3YysC)F_Y3ZUYw zUT!pF$Ow`~6B11(li{wsvYoL{+2)2B-Hg5m+Z3Kl*eJyos@kOIWM%SW8Q0CWQyLOYHmOQyHx!fCglsiq;2HK0_SkA0;MHqlgbzdh! zoDU`tNRqjo{nZgaQSh*lG$m(5NDqg~aBY#bQ?&~lr>Yj;rqdg&#~`qwwDCQYEnt5ZJyYN z*9;?rwe*dF^Ew{Odt$(01Pel)v;rtkiTf{&3K;+aT2H#I1=_ar(JuG!vQFu15j#zJ z;O4Mk3)h-hAXtH1IchLe5p>yRRy+kYLIF#&|Dz`(HJR1ggS=5Vh{et;7kL>}q`-$4 ziJh~Rvo_i4ph#7W!7f{ZUg^&egC)`()U>zzGC9_}Lc-y%@Pmt{*K^3;6xON~PDIIn z#^en{EoITaJ|rZ>6G0Xmf|s}K>ylhqlaYU46Y?`T9;p?tfir0mUGdUh`v+gpFd#y& zIJ~0cZ}}Zrsjrv36THf=O7dZ`fw5vi)Wz9Y9v;c}>WxC$`YlU$r)2f(EMi}TO1ucQLk%T}E9YE8_^ zOaM<(Vk`~Wun0{G(hX_?PvDyb)G6WF@cX*9&tDB`z(IJba5pj)TGI3nKcj zlz)Mx`y04F#5yNM8mJ9B#UZIo#`77`^KMbRN=&!H#=79_01Ci@#2+O^1zie}sa*7Q z$osK1J_`s0 zESLzDgLMg)Fy&tRwJQhhdc$REd3?NXE4$0=3FAny&4SC+UeRRfm!EJ%I>wX22CF!9 znVH4KlQ{?|MLstJXPGP(VDWOd{cevRBIEI({cn;UT7-qhDe!GH_&E~BHHjrGbr^)s zV6;ByA1i`jJJb2E@n%vw6(6i=dZES%H`8jo58Mt(TQ=#w@G*r$$S61Tn_2El#Y=1^ zv{(g8E<@mq+ZCg=zf?qZhx%~(icB>r+kvq#<`otHaZOQ@H+$wVP{!`X{+km(Ke3iZH}+Icpt?oGf? zOQgxgm5rzeWJ$jLjx*aG-HegA;r1qIBPG(E!k|mqgO$;31m6HIEgqs74bc7v zh1&OXtF3uDD7;6g;bi z*uj;3+e9Ik+KXSYF7)Wx8Q!e+s-GopmMvM8a19t;asI9`DQhPf6^9|$6AVTtcj5uB z$jXrvXwzDwT^i1NL&!&;M^BF**9r5elC!iOAYonA5GLC!9MsC|sJ26_MwyFJlgo!< z>(KEgq4lK0R6Pn6bpQO2?=R5|e?ZS}U|JLL*8E;BBk$g)$3jX{+HDisS*^Kw{gc=v7s;V?eC}50C6K-YB(h;wC>)?axJ};JaS4F zFmR}fP#mlVVNbhZ$m+a+ndw2yg@f_3J@(&YJ6Z^n0w4R)$*M9*1DSBL0@Mv2QcUD- zBsobeER6-6>#pv+ALHpM_1Vikoub`2guxgrvFfkuo#hcsl71ghv5r5hLB8q@A670tQM=y$bs#6*fPsWn?jyNFO66$F7nF z`NlhS8d!Eiia5U2_X+)=_*Y)B_Tdm*mYfJXlL-F5 zht4y0bb5z*v{l^uiKSl$JHV(jUv%meV+rew#6b;FUp=?OIqYA>r-?;WucoL@{?x_y zYierZ!%tKFeINV0d>(%{@4C%TMs5&+O3&;H&)Gqaf{2qh7>a+0sV)y*t=iDO?AFEgm5bYCA zlWKnBloObzEi@tRArCv!Z6so-{$6i1`=fC`FE=(y>bCS%daT{o^6@#`m^^-Q7R)2! zxX4wYMEoA!OrTh|D&73kvQEbkS(F z>javG&sP1>0>qEAxHe|Q%Cmhf{IffU#dpbCNp39YUx2WUhG3%f17{9?efW4ZinW1E z+5Rl{Rwi@&QKf=DDe&F#+($6tuh&n<&m+HfEHm<6i!;`B-M!FS{iD<0$wt8O1@U_{ z8QS#n@P?IM8*eWs9$s1s7yTJgw7XxMRC}S4*_J_J#_foM^7GZgDOX3$$Wz&0+diwZ4W&SdTanw$6UV?R@s_)%s}qY)9gtriqxpP1 z>&4y_RWPjHe01+wY=auK3fTfM%sW;O`KUgqI!?q#*s}@jL#}H}Bx7+L)nEw=eClk( z9nNbbD8eHZV4CQ$pp4Wguw$q~PWhWbF~H%8C}+HAP5enbx_sSZ?g0?9X$A{Iiu=k3 z7wcL|zg2#6<$^3ATk;~cTXJB@U<>_;YjJd1ep;yyK#sg0M8M>Rd}8#=i!>ErxM=jR z-fWe(Gd|^BXnOY4$(4OJBQ1l$EwwGvoo0tj<*I7#O4@wGRX%!Zz%C_Y7>*T5PEZQO zFFxL!%^K43rLeuq+ zM?UoNh`GEI+IMH!`t^I20zFKHx!Q#+&X8>MuV=8k(opGzfh}xbu*^eZZUgvK`2pi0 z6B%6h{Z`_tE0@*3-rIpLVlpjbE1F3g$!!OmSoGpwIOe!M}YW ziMaC-I|51LY#rpggTT`{nwrP&M&yOlwv!5^RUJpCA+c!(C~t^M*4kxWwXt|w98S2n zNr)t`?W5On&dRbA`j=9_X)#3YOhJDfEHX0Jd@;4in>!S94BDPixb&>kEd`pbHp=ha z8$aVLT$AiL+;Y}ZuAd)x_~5XeUpj4QYSU0Ep$L+zZtXfJ!PIN^ zy1g}yd~%iMmNp~KbL7nv2MN=(D@IAiQI;|)4x$dTe)e47q`L?WSc!ehN|vstBD9Z1 zen~@QM`M~bHqk#U>Njo8S(`J{brOOpF`#7^Ycna%#j+&kVqPVfmRW#UK{QkTgTfn- zDk4G$V6`mj@#9}H*av6j8RC{}{Tw4yt9Ep__(hlvIesi_&DC{qN=UR(;1bOH1g#a& z(nh9=kx-7X<%~D!gxC|C+CK%XFqBq%XCONQe9K+BTiZW(y=+P8?v~@?ZwBJNA&gj&k&~`>yXrUb?TKP2#g`{>7cLV;iq^Iv>@E-x8 z_Z;mXL?n6PxdB>AgvM3kNdMHQ3!A?NZ(J@7YSKI&!XMJEdf&mf=DA{|w{I&>Akw|J zGx?rpf#Nc}6$(%-t;XNo7e`0UhcsWd=csxyi*Y?nL>|5axmv<_yLk#z>+=b~ld**aeC zORe;8>zYM!+;CIs-*OgTCj&cg`Au!p;;)QcDgUf~k};q3Klr5%m*+3q;{JuRHnD z#%Q(!y~wgoRb`b{SX}uL`Pf^nCTAd)WZGiv)@my-+#v=(H)MK>ouy=tl3 z^g+i@IRCB93K?mAK%W_BJ*P0*2Dz;IfEtMs-CMPOQbDGM%9Jb}mi&Dyu@j>$spHLF z$wKkd*LX^Ld5Fl`$y!(r)r!$3(WG*5wvYDAASMY4PQ>|2eWVAybfeul+{4$fAw6(Y z)?y&$CBS@QqSt()&1#@g6Gs{v+ysPIAmTaYQQ%R;NPd!ERyUruj2*lT@%tYX8tud7 zT_dw8Zr9S5gb}&Qx@<+9&g|?#hC<3J&L%4mP~zj}^F6e?66c1`vKImR<{)ry=SMj& z@UBmG??j9^p)~yj_B4O)xE$zp!#3YsQXtJN1`~D>s!B%ec@TH=0vCA* zejTmSlr#tpb;TnLxF|UiIZZ`9qO(ya+NLCsQ}(o{G;Q1jL{RIZ_|Qmi(hX&IG}?5@ zgNUVD;#Dvi7j;D|6u(svH=6}*`wGr_UDu@hZ@_)Wnts1g4p1aIO)%hY@-oc2=`xVj zN77?>%wi*Y9kB>CA)5jjz^YkY2nM<5v-4M-rUpx1$Y%-ij`korrip&k7zm{Z>c7@> z+Atf+^4BlowKyWe$Fh_;LH+?XXKAaSC^Usjn9!2HI}2P_il^iTr(luE3%YMZb=2V% zZV6UPbSv%Gni<{RvZM90Zl{o5LySiiB28Xgo(1r5?zilKr{NcqY+`I>XaySzQNIM# zmC-Ez@Dxq!*M1?;mP&5o`M^%4hDckA7gMAEk?QQEf|pwuj#LDrF4|!z_OJhM=G$&J ztm#Aa>kFH3*BEOFbH@}l9;ZpcM&io!CD)?HZEq|%I!IAYE1{=b%>IyF2#CwZ6)k@D z$>0psP<6UMe%2n_h3DfF;Ir@P6xLy4UTO)RwHfA+^u77B1+=o5STLwEFEJ|v5V1^n z!YVDfc$Xw~R~|(YBADFwaw(%3XmOCl_c&4SIQ&!-(tD!|Rko|Ms~VSqLn`RTgJ%Ln z$WA1#zGH_8=w+~Gx-hp$H(G~w9JF7-LCZD*g2W68Owv8RnXwyVb} zU5kI3I9>Q`>dya=t3gac|MtCXjuQ@GZ;#teFwfOYT-A09X6|OvX5{iR)(cun7w=t6 z9*I!-oJGueZ3HOZM(}OLPP3xr4qD+eT?9o;{_b{3ESq;a;0!#sgB)rkxPA+1iYf@N z>r9Grtt1?n+?FCng!T^`S5o%Y?ek~cU+(~03%3Wy4R3g<3WlzeVQK=BK%J%m<2oh~ z86g@e$VMI|20Kabl+k-2dm~XOsVNc7;9Aslc;WqBihc^kdq(=ZMi}@mzC{zB!MDL# z;>ST7&rq}|p(*s`PjKD9upS^@ME)V`hU)jJ<2b+#^!beq(DJiH6hs^p` zv7$wRN^oLIl(|jVcTHYtPt3`?bM@UEKF>udDpM@kfli7eDnKGSv*nd;=fPAy-MIK1 zgGpx95>gbf_MK|^FeP5kLK(95cV#=GIJlCjXxWQiWqN*@nD>O$oB{cV!cFYD&Ro+q z#`(THTX9TcS`r0SqksRYP+mxmMe@3F4y9Bun)sFtoRi~H-#Y+`@@TOOHVez=>*Of0 zr$Mai=yt=%6xEUQe!Z-6qy{_$3%>{n!uWIAQMxJ!t8o!mc(b-cM!xQigKkeGJE4~Z zoYc^H0H|niT5*r1#)qTVGo|`U@|1d0E4jhmEB@W)+l)X9(y8o4lirllE_S!4rm!UYk}zMn1WZ} zIx>JRkcm{C2g_$Or3;&sAtagt&MK%@I{actj$sEF=gc<0m-V=(`aq^57{RX7`<+#m zoCDinF*iy;^W|%=u2F6?asRu+vu3!V>qc=&v{s(@rm~b$x5LalDw0S%jO3uAYcipr zFiC%Uk`WvG2c-!d^!J;mNl26198AUX zMf`s*GlW8uAdRW)Qc9Q}mS0TSUS}gW?$e1)h>pHy4D@@*h+EArCO#)Ax8pl|y6WQg zv`(QkQ)`E4c3r`C!APQH)h**@wms3Zo|;5R!+2Mq{G{!x`jX|0I?_XBv`jNji&-|2 zPLwJn%$A9;3=cT%DKMilklpMm)r6O6ui~=e=DGSvHsV}Pk!L8Pt#rd;{zT>atBX8=N7sSCO~RhVMPE^p&dVmf*DZw7N=8HFn$(!-QoUFJNl@uq0GS8IP)? ztpRo!Qe4&Mlv=7dNhGYO@T7@M360sve0toYK`I8%h5pU%K8c<=dSE@FT(pyfVCiRnkcx-zsZ67EA6X-aR(_=RZnV=j!w5x+W|M-w+QA zSMDw5b`R-qbKUfB+9+Fl8)U(2#XWM$BnvgJG>22+*;vC+H_E9mtAuitv6<9 zDlJ!x8^}z@BdjD+y-^cucoWRsvJWs&?IO^O;vMeGw|X1D?HTcjXxim>&_wWmSVDWO z>#h=_J98*)t#c#ag9Rw0*y&_zY)9RvXZsEgJgcDKN9#WZseU{hcO-R)BSAaT9F}zi zrI{Nq1g~wRy>{Sp+UR1|ptG-*X}wvac!&?PW~aXOPM(1+eF@Wgw@S2w;^g<7NvMZJ zD%jheTO{lFxanw`C{f7n)_8b2Ox+}RMr-#ZmV&3hOta3Cy2dC;GrtfKmu(wUQ=E_2 z4Gv_ORpQo7EwZmUU(b`5$R1nhx$8-Pr#Rsoy3asAA;b_vi86^A5sh*{*Xce*AO4T7%i#NU`HpAL zUzwV9JHof{y&YooPIlpSc&9#8kmr@fd)lqNX~`1K2AVWkq8X~zNFVZmKW zyEM>^A6}Gkz>5@Qswo^sD~b8L%ez#SgtvP9Sm!K{4%l|l9!|l>CB_{|2S`l^E6GyY z#J$IJpWrp{mX9^dQ;pKf5!SW9;mrVNzFz_W?yms3 z(|`-kUvReMB~dwVSthCD?Ui<4dx`1fy2Fo+HiUkYrv7l9I;m#*uqXDz4)?^)wRz)` zq6u8ht=$YREbi_w_d%h+HXP?fH-eoq%*eyGg^rnCOu7@UHzn-~wj$O__q*F0U{=|w z3bZ669vfpP)4g5afR`DacK!fAn)FSgbpT0y9pDKLI+uUNy5qe{5)Cdje*P~FB_u} zBNe&kSn~H~l9&~|h*C}fL(5Gp5&9%{DaJ|JFTu=#IK(RlOrY*sYypt#{IRYH+N@T3uG$9^rcK&HRvCsq&q53(0cfc1G*fzn@q z51C5)+mWB~CDaN}nPIwP#VaK`kucDdAsCzR#JlFo<&0q-*XG?^$uhGlfUndm{{r|o|uQ$sYBOc9u8WHrMhA|1nE(`P?e*Dr2A6KjYE6pE$s8xlMV&Wo71l5#<3Za&;1g zqj_HWZ#v~m({=4>XNnz3(}>x)YNV&SOoq}8Ej0O}FV^2|f;T=|_AOthA5oi@=gk>a zj&-Ur8%2z_cpt!YzDy~0T6D*f?hVKG*yU8##8nd&$Jq7`%`?u2#%v!54q3IUG&yYN z+U9m9d~J;Bo@-{-O9`n-qf%v0MPVVS{CR0n=~a2WVW)W_y*6j{qc_~A)5boVRFd6~ zEU9|H1L_LWXDa8Yj2D988q6}*73%v(`SF4&xXr*`f?jiM->Rpy)Kk-v;WZ~pYqEP? zRT)CY8JaPLrCW(HI9Sfblbl^~h!2x)BobTfO=X|R2AACZ`;>2)ZWaLq`mNGICfzza zYBN+m6mguxNtO~h6~rYg{@Dt}Hbl}_?M~duZdDiEV81DR^2kxLi3v*MS0a63Kkss5 zJ$r6nCPJ8ng{&Gp@6{N*_k~BiM=O9Pv zWVYrE5}q3W9wV+;E0?YFqZiW+&!rfW)Fu)9t)bnRYWu%PZKL$*?H)$YpIAQoT+wca zZm!YVsVjfTG5ren?X{{hbcDQWCbYk5;W`icW&Ahr3xF_E&xCnsN~PPII(veq_`q5y z-AL!*KOA-{HzUfK%WAP*b5^bRvGi$^@H2|+EYbAwON2+&KKbwOl@C*$Ee2E~UeZ45 z%c*LmS>;f?T3aEvU+>}5V`6bVJ+8%IhQ{7InB61MY|udU%I^$&AeCx`5=r5`#5X!J zsIKC)1+KJfGvr>q2$3z%B`l|tr`YEV0Iu$N@lt1f-f&LUpvC!MJibiL0^oQtUdX|E zK=K;y@acf)ud`7d-&^?({VPu`90=vG1TH4yIFml%2%Wl3mJ~AKL{=fDyqdXtd(nW4 z5GrE8&^W^TED*OxQa{2-&R4Pb9`6;pAUL#>EF~=s)%#!o$>gRS=CDqH%5Q*woOV11 zr|Bk7!|pgJ+6-vgL^E^T$-EuiBZ$*e2Xj4_E#Rxzl;>M4@vve1SpRi;mt?#3-exx2 zt@&i`I;d*1mcLmctuoszTRvv26wGas<_Xu^F;wm}wzH67v=@k6 zoj;v2!a-WZHEPp&k4UJuLJT&W?4VeBQRRo?S%)Rxkas1z^_NpkC9P+o9cRxtyRG`K zW~-?yiXX`9E-eJK=#Z?)ZP)@wWtzPS!e?3Ytku=#fP>ZtH_`pI1ESZ~W7+gf5o<$| za2<8?H$0p&f|%(&+qA2v5NC@V@y{=+`(VrA7QBRKoIetu(V%^={vUnd50-G>lRQv5 zKaudbt^hI!JOl-`-+{Z2qs3_i|Cx&XNUL}Z&8rNdd>xaN{|kpdQDAM^n8VJvNmWUZ zj%5rMC7`js`-4nz+J-}cbYB9ORRUl45D;f`9};pO5>48h>VA4|;C68P!3+Z|zwlHl zzoCQH6g5Wm(S9w2*g%+x$dCea{=N+S^zA3s!Os!rDFO=XI%DS7gW9rM{4QL0Vt5=& z({{V|#b1dOY1=$vXXB2!6^P_1M+6j$8AqD^Xt?XuH$?lJW{)6R02?E{dy77_3JL-? z#{R%07QiqOkzhGVM;!YPC@5R=bbz|YHY+tPeYtyRV+pAOAprZl ziJl^=cI8L92V@djgVN8(=AB6qh~=W-j@S42Hd2yQi3YOL&M5~03z4r zChYP!EGBuVAkvwSD*Ga~-l1;O4bUAMp9*$cw`8zHOZ+;mB*{E}PDJ_K*r}<^s}I~R z)F*nr$ssD2(2Pj)`luBD4n`Y?B(Hgmfy!UK~{t;pJOZNrjLN2aH6I zcpu}%?@ETC`<|#}GFrxIcHhQetL~EAekPeW*(xxZmBVvxeQ@;}-Ogb^#(PQ5Xy=m3 z-VWQ$nMH`AmO`8j1cep{5);)$iP@ff#oRde!v%>6zu-$8yqE&4Rd?I$2vMgOsn=${ zXite#{YiuFAwRl*?naPO%5KCx;-EBW&9dwlU(Vp0L?VFGRMj>kp5_BF)ZlfFXdjnu z{Cd8Srhf@ZlTcFH;?+uSGRF0Ta35~Oxzlu-?^dElk`0Jqb3z4^R(-Ew%mPVgzTe92 zs%UTP0hG=HI{04Vqt0QP%U;ld&i{&3Q>L4Ups#%l<5;(42owbAZ>2+oygNX=27)^YF zUxG8&vhL`w=ykN3iNHMN|JB@CzeUwY?SAO)5Gg6?8ju)5=>Z%N28IUd?nYF)2BcMv$B&M?HC45r zeLoM(K7HNt&H-O2KP~J1JCEA!)OcSgyYYNwhSER!wAB{#T=g_|@iK{CF?$~r(@3NS z_**T%nHBt2c^b0E96og^b(vEC|t`u}AFlkCWJQAL5Gb-T|e75L*;a=uO^F?%!zc2qM zxr{qCN{(JUS|u7yoZl}D4n0z>4;Sk`_p(R3kl&BwtG(BTIYg31F%VP-q&cI3JzA%p zTeY7|H=U}Oyey`&UI>s_)$0=e`FnCqf4-2_fZO_~r3Gqv$ns`uOM^?QIb;}wE9L!~ zB&tv}J3p=L9t{Vu{^h)S$u{%beJmn$R_Xr_0_gsLf$zV5d;H6vn87q}s`E{Hn}rl+ z$@H7_i^q~|fKL0xv5r?kj4KxK^BL9d1dwidCTNeIu{RE@HN=ZlBT@FAksY3*dgbUN zZ(Okg7QG;Hf^XN}gHsSK*t+al;KfC?_$V`vaf6j+Ll*FL zz{IGivF9HmNnL37ho}~0f5hur1Rq56$qU5BeQ&`Ob%iWHR_&*S7FjkzliU9r1(9Fh z4!@%vN7pLm+*Kg(mqw1OA=4wm82rR~SUP1xp+CXU)9(F0ao*!MK_lwb8BP}rb+d}@ zb5vElfs`Gp&6)rnH`=81N^V_)FLHD5TV1{jJ>Fg)ndR=zlrgy-on8(*h@{X$e<)|q z)9O{1=P$jxBo<#*d95X^b$;nFdfONGruP+m5_03!kDolyzFD2(LV_`!%6oU2{e1?!QU@QOtPCSd~wgNeiAOcc|q0PV5 zyL6WuGKP6ve0LgrqhZqyyK<*|rVVp}PI)-J!=!7qt;E9zp7Pa(=@X4lW|r&^433n% z>ykuMJd4E&Ph{FA->SHWJwfl}(ZVF7bel{V{8QtQk;TawF@-T1;WRQ7k`vR8ts1Qk z2&s%@Wa=xZ=oU^B5tyS2#T{-CBSuPmc9)r}jk*AtdM6+}8~YcSiE=(r?$dmu9Gcj( zb#SWF1tD5wiCN`c7rd7aq~4!wSdfCZ1)AKQ{|+nuE8#Kh#mnYcKM-BFNQwGv_VrJx z;dK_djrcBW%G7fuedUQW_9rTt@3OpiXANCTYDehV>Ur=+IR_Qr4TR%@LPH1C9V$ZNUAcrH z{Z2xi%T3gcwH~y(pqFq9?={!FirPI6Qz5AK7~N4W1hxm$c7ZvEoeS)@>OOX{K2-Vu z63Dp}ThWvqTCxE?XIMQiaOLt7VOrpX!nI_EsGReZZWJbvprEf_Op0vM(iv%?iP!wi z4jcLLvkdg&GrIwWB%}q&TxCh@3G6~dM)fl_@JnxvO2^IPbg2Da*5n)3a$Bw89qy%V z|1xC#Vc-)-MQFhk&Oxy2tzyDq8 zt8GT=#=xKtMgJp4QZUKb^Z$&ty=()&iP3LqpO3XlT<&h=e7%TBbE(=LYyC2`Ny7H@ zH*wcrTD@)TO0Z#1g4)hr%MuaH3exn(kl(^Rm zu|d{B{7q_xhHbGj?=uYD7NQ_x*PEWqR`@_>>H{Ul28z^gZ8IyM7tQvTeX`b>mJ4(mC|`v08*ewu{Dw&fEwm?l zVVo5{SYTAo>pbYC84#Hl@KGsYV2nFA5*D-qVt~Zr(a}-M78E^)$y@>DV9`1e)?Duh zvwI$RuG6+x8j;#(pcp^>-T}%RPK*-?Sb_QsDLELz_HzV1r@N>NjGJZ?((tl&)3 zfLZ*`sp0SZgT1QYh{>arA=vdNy-bL7(tAb1iQo1{TnC!X?vkEOBq(^jI?f0R;OOOCZZTiK;A^4xR(Ss06KZU3U@0Z#w6=lH?tX51F&C8%n;-mCa@>u); zl9q@+jS(juiRZKDgNyl%LGD=ozsPu_y0IDHm|QOF;JZCOcLr)cwmJgPCenWk##_v{ zT~n~_jbgLm#W{!b-+q!z63lfPoJuR2Z;;4mq*%W8{45*6#<@Ju70zTrH)uEQ(@BGv zO1rO(kfvLE;Y;{_+bWIypF6>$%_#dir=BNz#9+2{bY98k4YOdCaLOichYo&)=iCEN zs*J#MBLDxb-&n5obv^Yd?U0FOVFz|=4ym0q)e?yZ(5NO|vklYLm@z-e>FGee$9q{) zQ*F{wOq$Zwe0n?#&I>^|C(p-LBu!fe?xU~(64K!ohj*p55?0EHD8B`jCL5L*XStBpk)`5Rf@_)&T zkN&GUXOW?Q$zFX&4`hl7JT$cdz|<5=jqUw z-3iA8pF6wIfb?sM_{wWjsp;L+z~Nd!Q~*OGxK-n|mDmGLztoy0gK?nk<^hk3CUBn% zRXL}mdf}EYaT8_DXoFO-GaJlDwJ0lK+=j2rhvouAdrBFEXgSsB+GVB%?AfcwS*8O6 zhY5-5V*_KXRn#la5eNDUXS9z5agj4rH@fy|eFP(Q{(onxwW~!$T6&8;z+&RHc7DPY6dg;C&O=T;Z(whCYb?LPBmEW3Xv7vKP@;^ z`zU1aIX@zM3hg~q(d;A{{`DV0iy;0EW#9I7K1&T#_kTi@$nF?_v@GNC`=!q{>D2Bo z#zDA@=T&*xht>eRBM!br3%?eCY2LvBZ5-&PbiI&F0X$qfn2XN%2w@@@r*4dSn%SgK zjSkN~tw`cNk4IK#X!rdqe3_IdjFjx*+9+4E(9sbmHYNt<{-l!imOyQdbXCSljd24Z zT_OOrIGg;s*N1|O13qjY#tG8#G8oa>(AR1ey`qob!xNb+bKxwQ%lnX*%r3!5@Ts2= z)N}v(P@Gda^s9>xr=mHSy0F`~r2gwv4h3nUCZl{(Y5-{xjti;*zIx+9z09XpX%pim z8gr}dQ2Q=EVD>r{4byYeQ#sF&2s_623Zt|Yysow{PAA0CL{oTh)_>du#${7nCf=*d zpC`*XoDW&}bV)yPoPI|j0Y3@d@Ge=PKTN3Y{4G}Ba~IT3b>fW(Cq6eN)KWH{OqvHW zDa0$dUA_yUi}QRAyTUEoWSds`5;;t!jxyGtt4_7j;diEGI<_RQ*xjmy?5hUbhJ{d1g(sQWCJ-!Y0c&JSW^|8?o@%oYGJ%TEtzJElP{U{S=A?bq^?*HL=?#VWo`{kDG^(DXhD!IAK z^3l3-5(7)NqIK)GQeJ%rm)FdJ6H^36Kxs>YCkrretTBvdMCviOc96VFv+_UZGuaox zTVyZA6l!Y*-#@lX8Jp>T>p9v!srj`zeMLLlDpGq$!GGN(iS7xZ6<;1;Yt(REpjmaI zWr~1PE7xJFHAP1^scQZCvNYMaj$jzJrSsCeEYeR@(OI`z&yeQ`z_>qQsWNGNHr;+D z)tm*su}tye<IC&qDsi-381ZRt zpm8E8e$CCZ%O~+bDU-2z&%AngwMtXb8`c?9eBlR3%%c)LnWeAacI$5=Pu5M!UsRuu zmM8L+`~Tt4!k_$*O~8-6@OcDuffO{#NHfSZO6xr~4%O6ziW>1i854|r8Lj*00`3Q2 ziZ}{aHh1q*!!TI-PCA3$O5b~YH%t{x1c%EsiOFOe>l?7Q47=)M zww&z~NoLqt<3rgm0in0a3Xc0xHK^Q%s602}@F)1YF{w&{=kd=IF$1zi2%##8+EiP_Gt*++4qwp@HiH(edcpfK@SZYJ#>4LVp_xSsl(IeIy^^qY{+N1(r!GR~CY zJ>-SX^Uykns$X3xslj8ZCorZeXS5jPai(fZ4xv&;{-CJiA^|y{uNSEjtUcshdLw3Q zW?m?(`<$+!nOHdKt;|A>sVaX^nIolJ)&s|ZrD1!$x#Q>M!bb8S2D4+XqXhqUiT-@< zHASkX)ypF7+>g@ZS_xyJCw|vq)v8f>(cvni8n5RkD#?)^RcW7RFx@V;TYR_U0+JW% z>tDi44sUU<1K`D(a8SOcZbNGE&VdvN%c)pnY*LtqYLm>?A)d9Oz0&d_Z9+gO1v3U( zxh^d?`#dpw9-8!zIQJCtQ3d97=K1YKT3C85x+$(ftOW720rwYs+^v$0=|JXj* zV~h1C$F!*PL`z7>7cRr~G#I7e=-r=@G!U?goIH{$_j<1)z+`SZCu;Q91}489Y2{>M zJ`RHN_s;Cp*4O3eGA8TjC>UID2t%(~5_#6Una%6NsOS)v%;0M?CnX^_L>LW|ogc-L zs9t7+qcb9ln?E1j+%WK6Ixu>i-6(LTzhdcpabmq^{S7E=_G}V<_PD`J+T)>^hE3qS zj!ltS$1^P_E^eJkO&wZ(Nq!WWE>Td3m zm|WO%_1^_NI zVF$C^Io9kqnPJ4m7dS6BRZ-?lPc}@+J*+QxlX9o*eyiESXwi=FOWScZ=ZurY$96vr zouYOg%wqX^oFHWRE^1!ikp3SAK>*n|MZp^*0n;hB*5EDVr|GvVH%n#8Cbe97;9h&D z`l~k#|4?csYDLvh`cehaKFHlwQi40T$XA^FV5PE_^KwJ&pq4k@=c}m5zOJ1TuB{U- z&-u1l?Nm^2M@P@2L92z0l){Jx6x_`CHELU04h~Ptr1b1!MSEWX7bjQoJ!`?^niKr% zoIk1eW+q{`1r>YohJ*UjHv$_G%?*_j+}@8kCKeCB1+W|veU&-MyqZNIJwS9a$*aZb z{wW%WVCXqZ)uw^}ez%kLlW}}rhN7D-Pam817S~X>Be!f`px%dNs~*j>xTmLt25+gxzeaFrWg7G z#3Y3dgoQ}9I6~l#u=3$CWcwx}!?XFP$o|!cgjeUrVPW-e=Ml%E*I`0QVFK>Kg-)vm zC+y(_hI0S_fWV(~_HK*h*_3nrKw5~MQT>K6%rg)NF9c@Hu3c{FmbIQOjnu?8Ys>m? zCuB;8YNq7b+G(Uxm6D`fr*{y%m^LM~Do;86F?nYNdRb}q_~E3cg__skHR+b8%pStq zF$Nj08l*39Mh^(H@-BXA#4@X8bRECj^7duT=sH%?$tkVDu{U2RAlkvsg>nDXxR`JG zM5jlmWn@9P#2?*U0RXi(6^nbi@E!kp=C`IUf#vm}LMb3;IlDlS`-b-isPt?B8 za-T5X;M?CJduYgQY|cYk(8Pq3P~pcA7Op+N)k*CokX9Gi31ku63tLmuY%aXViFT*8?;Yd-GU3j=1R^im6FC zoc76stCXf~6|@jB9qhop#-OPS(r}NPK$!e;eIUiyV zWQp%4?VKBEDqvGO|7wHhI31Hc-hQQ1l9C*H%8@ZW#OO%pUN%AJ&)1||V`R2Z0Ep7J zog1>*7;6i0J-#?7w{)X;aGmh*p+?vA#=0ufd}!M85(B<4JTYUDqa{4dg>;w)+|Lbb z_Sf#4_dIL9UiV`OF&JJb7tcV z?q8zj-5f|estFS?62Sj{&+0`$_aBtzzi$!6NC#>-5wHoVJa{5l9|td#`oqf|5;u{k1}T16>2^ zf)1AkBX3t`BC84yPV{c6UX5k-284PDNCM-WFLqy6efey&OG&TD z;Z=i}yg@vjJ+kh!@GfMrGzsfny4-WtN~x~n{>+EfP+uvUk>Ee8lx$!%_mFRL@6zRW z-tNW=&iPa1_HZ=+n(F%UIO3DCdz{U6&TLv~k?jwkxp0}}hWQ^s-(G*S{(>l^E;h~8 z&%(u*-MJzm15q`Xi?nlBX7|HE(CHm&!7;VP`N(_ey(6SRpA+SwlTh)Fw_? zn&7;)T&XZygjsF2vw7Cy43Z9u)r;Y=q}+mg-<1j1it<`WsKwjmt^zQ&5I;(!RJ6G?w=UePJAm`Rt?!@T+y z*eJf0FXJrQZvOZ&nvbZ!-?c-7(b@=A-JzX?W~Gy2j3mN8D-Kh9S`-NLG^Uk}RC7TO~wcwBGl2<1vGzt^4c1 z;U`Mj&-^%yTjY}qvi$6_#V(4SJpVF}e#IW@Gy0y^u{&H}W2l)3*#G7(D`&Ve2z3}N zQWn&-Qe~MYTC$djh#oHfT2Js7Vlf8B+GI*jrk2^gFr?j~cu=aF(tqb({Yk(HK9b&o z5pButVc=u3W3V019+I=q?>mCmCX=qD(0Xp36r51BB}N~=HK(r$~?I;D;?CbBtILp|KRy?6=o=D?-YN9<8pb3-aIYcZhw3S1KQ&H6?%Z zEk!)Tc|WsykG1|>cDg@oxcDPiy|!#1iWJ$89~eoZy0`^4^wYyXMu;zx9FG_aGKKZL z%hUeTuWup=+hbQSRh0x>@hP~oUebAPi4OD6U6x0wZp>ZnyM)bTPU~}3^G&9a?$z_e zLGcivgGy-N=AaQP$QDn44B>-n>b|8L!n3oT1p&^zy5a{jEASOnR^kja=K6EE#CSiE2$Tw#z%qn6ux;YkG-5F6>eK&K8q^uP75&BIRkb0DlSF&syLP zO5WO(nh6#{QCl{JX^w|EWg8C$!C*#-s8L-0%P8FvxHUrRNuIREsd&?*8f_c{X;Om9 zrLXL1Vu@6Ajv~K^KMJ=6hK3Qm_aT?JcF2G`11)8RVrw!a9|=rQe;O}MZy(8WIv)II z2quPf!+UZdH7;h6CS+!P^o?qZ<9=E{Q-Xo8s9k7Qj6)98^((gg8?h)>cS>Bbhe=FC zh}*G0SYQnB;)84(pZZ5kbkzYUy+CCKC#qFyF2`iZDMfhIe|Ec`f!~m?>KDOVOwhRn z8l)He!A!7?E^Q4$JPjBSeEQuz)n^#BC<&V;Pbj}tcxV1WJ$;Iiyex5SJs+eaof=gZ zE673!P*G7KN$|m*Zy3={e>gmuw5!huspUZxx1!x(h!9@<1i{24RhGtMw5J&o3?6|D+Vlev@_?JG#4sf(#0T6F zzq`MIN=|e-OWjfEr6J|w3aksQZ1a<$!=qAdjmC^~uT>c+%9DHJ4atvDt`O2QeQm1b z@6mI;HP(I1TcZ-~om7ZXNGu#G7sL@7M&%O|Q`-8jT`>zM24d8I{c+=ocbHW>Gc*^u zQg%KH#ypcLUOHVii7fq=#$Y^C---+3Y+S>bmy6j1?zBa3JCfsPm}ZQ>pp#B2L)-BT z>-WwCV@Qd)bUL0Pi3&Hfjl#HLsjtgVinyvXMqbiFQ+o6~ z(Wq3^{}pNTb_8YQZ|K!+ec~o7@%^)E`aU)6)wK+Mccg$Map^+c19j3gS62hiR7Y(5 z?qy)+W*kh{uj#@B(<1q|erloz(eRO;q*q9zVCc_w_yntwU-l&H$q``TOSH6oZTjdX zlPTkoQ1JfmLA#g8r+o$Z)jt04T#XVh&iQLf_dJ?pV@Ho9c9Jvy(}CH;@F)9tl5~>b z-TEz2zc~OnL%w!!R4%OrN%As0!PWj~^Jk}!T9!(hMS1E%Ic(MJWA$}xyKxu|&lYd^ zm@ZR5LSJzj_&e5q09EDPl&Vm}?b_BGp6^cqjjvSQ^l#QHF$E^uFfBTH2%5nmYo&JKf?&xks&a|I>c_uNC>PRr{|USiX)kd8DcLls9085qlm0+Kya;LENA=u;IzpW4s0skxKHif0=_Qsq=S+Ui9{T7Qp!_f3 zebpldo@WaWY!vMb9vxGRd9pt0%ISz)A^LdXbY~U3icPy)52f;W_c1_%^|$P1gfh=P zM$tnrB0$3B$Pa7k`m8A9>D(n2=609ybd^lxs9Xidp>ii<_y@fjKUoo6&#Xz0JQkVE zMbIaB=Y603$a46BZ`r$Mnd=t7MX*M2i<{atCq@41PoV!7ARPsq=9m06JjlcQn->QF JNBq0~e*h|JW-I^z diff --git a/docs/public_models/HAILO15H_Classification.rst b/docs/public_models/HAILO15H/HAILO15H_classification.rst similarity index 57% rename from docs/public_models/HAILO15H_Classification.rst rename to docs/public_models/HAILO15H/HAILO15H_classification.rst index 0a1dda62..56ac7acb 100644 --- a/docs/public_models/HAILO15H_Classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_classification.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Classification`_ + + .. _Classification: Classification @@ -17,292 +28,378 @@ ImageNet ^^^^^^^^ .. list-table:: - :widths: 31 9 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Accuracy (top1) - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - efficientnet_l + - NV12 Compiled + * - efficientnet_l - 80.46 - 79.36 + - 84.6289 + - 166.522 - 300x300x3 - 10.55 - 19.4 - `download `_ - `link `_ - - `download `_ - - 84.6289 - - 166.522 - * - efficientnet_lite4 + - `download `_ + - `download `_ + * - efficientnet_lite0 + - 74.99 + - 73.81 + - None + - None + - 224x224x3 + - 4.63 + - 0.78 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite1 + - 76.68 + - 76.21 + - None + - None + - 240x240x3 + - 5.39 + - 1.22 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite2 + - 77.45 + - 76.74 + - None + - None + - 260x260x3 + - 6.06 + - 1.74 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite3 + - 79.29 + - 78.42 + - None + - None + - 280x280x3 + - 8.16 + - 2.8 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 + - 98.9888 + - 250.144 - 300x300x3 - 12.95 - 5.10 - `download `_ - `link `_ - - `download `_ - - 98.9888 - - 250.144 - * - efficientnet_m |rocket| + - `download `_ + - `download `_ + * - efficientnet_m - 78.91 - 78.63 + - 175.255 + - 432.658 - 240x240x3 - 6.87 - 7.32 - `download `_ - `link `_ - - `download `_ - - 175.255 - - 432.658 - * - hardnet39ds + - `download `_ + - `download `_ + * - efficientnet_s + - 77.64 + - 77.32 + - None + - None + - 224x224x3 + - 5.41 + - 4.72 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - hardnet39ds - 73.43 - 72.92 + - 351 + - 1168 - 224x224x3 - 3.48 - 0.86 - `download `_ - `link `_ - - `download `_ - - 356.246 - - 1172.24 - * - hardnet68 + - `download `_ + - `download `_ + * - hardnet68 - 75.47 - 75.04 + - 150 + - 366 - 224x224x3 - 17.56 - 8.5 - `download `_ - `link `_ - - `download `_ - - 151.079 - - 366.497 - * - inception_v1 + - `download `_ + - `download `_ + * - inception_v1 - 69.74 - 69.54 + - 344 + - 842 - 224x224x3 - 6.62 - 3 - `download `_ - `link `_ - - `download `_ - - 349.062 - - 844.278 - * - mobilenet_v1 + - `download `_ + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 + - 2874 + - 2875 - 224x224x3 - 4.22 - 1.14 - `download `_ - `link `_ - - `download `_ - - 2875.15 - - 2875.15 - * - mobilenet_v2_1.0 |rocket| + - `download `_ + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 + - 3455 + - 3456 - 224x224x3 - 3.49 - 0.62 - `download `_ - `link `_ - - `download `_ - - 1149.92 - - 1149.92 - * - mobilenet_v2_1.4 + - `download `_ + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 + - 580 + - 580 - 224x224x3 - 6.09 - 1.18 - `download `_ - `link `_ - - `download `_ - - 580.225 - - 580.225 - * - mobilenet_v3 + - `download `_ + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 + - 377 + - 1210 - 224x224x3 - 4.07 - 2 - `download `_ - `link `_ - - `download `_ - - 378.245 - - 1207.87 - * - mobilenet_v3_large_minimalistic + - `download `_ + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - - 70.96 + - 70.61 + - 2595 + - 2595 - 224x224x3 - 3.91 - 0.42 - `download `_ - `link `_ - - `download `_ - - 2598.06 - - 2598.06 - * - regnetx_1.6gf + - `download `_ + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 + - 362 + - 1100 - 224x224x3 - 9.17 - 3.22 - `download `_ - `link `_ - - `download `_ - - 370.973 - - 1124.24 - * - regnetx_800mf + - `download `_ + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 + - 2559 + - 2559 - 224x224x3 - 7.24 - 1.6 - `download `_ - `link `_ - - `download `_ - - 2558.81 - - 2558.81 - * - repvgg_a1 + - `download `_ + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 + - 1783 + - 1783 - 224x224x3 - 12.79 - 4.7 - `download `_ - `link `_ - - `download `_ - - 1783.68 - - 1783.68 - * - repvgg_a2 + - `download `_ + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 + - 245 + - 523 - 224x224x3 - 25.5 - 10.2 - `download `_ - `link `_ - - `download `_ - - 245.973 - - 522.845 - * - resmlp12_relu + - `download `_ + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 + - 86 + - 307 - 224x224x3 - 15.77 - 6.04 - `download `_ - `link `_ - - `download `_ - - 85.1754 - - 304.697 - * - resnet_v1_18 + - `download `_ + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 + - 2031 + - 2031 - 224x224x3 - 11.68 - 3.64 - `download `_ - `link `_ - - `download `_ - - 1944.85 - - 1944.85 - * - resnet_v1_34 + - `download `_ + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 + - 261 + - 693 - 224x224x3 - 21.79 - 7.34 - `download `_ - `link `_ - - `download `_ - - 251.433 - - 658.841 - * - resnet_v1_50 |rocket| |star| + - `download `_ + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 + - 246 + - 663 - 224x224x3 - 25.53 - 6.98 - `download `_ - `link `_ - - `download `_ - - 248.98 - - 817.34 - * - resnext26_32x4d + - `download `_ + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 + - 341 + - 819 - 224x224x3 - 15.37 - 4.96 - `download `_ - `link `_ - - `download `_ - - 352.687 - - 821.714 - * - resnext50_32x4d + - `download `_ + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 + - 191 + - 491 - 224x224x3 - 24.99 - 8.48 - `download `_ - `link `_ - - `download `_ - - 192.253 - - 480.675 - * - squeezenet_v1.1 + - `download `_ + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 + - 3327 + - 3327 - 224x224x3 - 1.24 - 0.78 - `download `_ - `link `_ - - `download `_ - - 3327.82 - - 3327.82 - * - vit_base_bn + - `download `_ + - `download `_ + * - vit_base_bn |rocket| - 79.98 - 78.58 + - 59 + - 199 - 224x224x3 - 86.5 - - 34.25 + - 35.188 - `download `_ - `link `_ - - `download `_ - - 52.905 - - 148.347 - * - vit_small_bn + - `download `_ + - `download `_ + * - vit_small_bn - 78.12 - 77.02 + - 117 + - 431 - 224x224x3 - 21.12 - 8.62 - `download `_ - `link `_ - - `download `_ - - 115.172 - - 418.135 - * - vit_tiny_bn + - `download `_ + - `download `_ + * - vit_tiny_bn - 68.95 - - 66.75 + - 67.15 + - 211 + - 955 - 224x224x3 - 5.73 - 2.2 - `download `_ - `link `_ - - `download `_ - - 195.616 - - 801.37 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Depth_Estimation.rst b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst similarity index 53% rename from docs/public_models/HAILO8L_Depth_Estimation.rst rename to docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst index 051a8204..d1ad814b 100644 --- a/docs/public_models/HAILO8L_Depth_Estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Depth Estimation`_ + + .. _Depth Estimation: Depth Estimation @@ -16,39 +28,42 @@ NYU ^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - RMSE - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fast_depth |star| + - NV12 Compiled + * - fast_depth |star| - 0.6 - 0.62 + - 1379 + - 1379 - 224x224x3 - 1.35 - 0.74 - `download `_ - `link `_ - - `download `_ - - 299.42 - - 299.418 - * - scdepthv3 + - `download `_ + - `download `_ + * - scdepthv3 - 0.48 - 0.51 + - 204 + - 423 - 256x320x3 - 14.8 - 10.7 - `download `_ - `link `_ - - `download `_ - - 113.957 - - 236.908 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst new file mode 100644 index 00000000..2ace0de4 --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Attribute`_ + + +.. _Face Attribute: + +Face Attribute +-------------- + +CELEBA +^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Mean Accuracy + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - face_attr_resnet_v1_18 + - 81.19 + - 81.09 + - 2379 + - 2379 + - 218x178x3 + - 11.74 + - 3 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Face_Detection.rst b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst similarity index 59% rename from docs/public_models/HAILO15H_Face_Detection.rst rename to docs/public_models/HAILO15H/HAILO15H_face_detection.rst index 794a8a3e..6c5031db 100644 --- a/docs/public_models/HAILO15H_Face_Detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Detection`_ + + .. _Face Detection: Face Detection @@ -17,72 +28,78 @@ WiderFace ^^^^^^^^^ .. list-table:: - :widths: 24 7 12 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - lightface_slim |star| + - NV12 Compiled + * - lightface_slim |star| - 39.7 - 39.22 + - 3968 + - 2768 - 240x320x3 - 0.26 - 0.16 - `download `_ - `link `_ - - `download `_ - - 3968.94 - - 3968.94 - * - retinaface_mobilenet_v1 |star| + - `download `_ + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 + - 73 + - 103 - 736x1280x3 - 3.49 - 25.14 - `download `_ - `link `_ - - `download `_ - - 73.4203 - - 104.099 - * - scrfd_10g + - `download `_ + - `download `_ + * - scrfd_10g - 82.13 - 82.03 + - 134 + - 206 - 640x640x3 - 4.23 - 26.74 - `download `_ - `link `_ - - `download `_ - - 128.785 - - 215.146 - * - scrfd_2.5g + - `download `_ + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 + - 315 + - 576 - 640x640x3 - 0.82 - 6.88 - `download `_ - `link `_ - - `download `_ - - 312.464 - - 549.501 - * - scrfd_500m + - `download `_ + - `download `_ + * - scrfd_500m - 68.98 - 68.88 + - 344 + - 662 - 640x640x3 - 0.63 - 1.5 - `download `_ - `link `_ - - `download `_ - - 331.106 - - 601.805 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Face_Recognition.rst b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst similarity index 51% rename from docs/public_models/HAILO8_Face_Recognition.rst rename to docs/public_models/HAILO15H/HAILO15H_face_recognition.rst index 245e20e5..66a62bb2 100644 --- a/docs/public_models/HAILO8_Face_Recognition.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Recognition`_ + + .. _Face Recognition: Face Recognition @@ -16,39 +28,42 @@ LFW ^^^ .. list-table:: - :widths: 12 7 12 14 9 8 10 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - lfw verification accuracy - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - arcface_mobilefacenet + - NV12 Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 + - 1924 + - 1924 - 112x112x3 - 2.04 - 0.88 - `download `_ - `link `_ - - `download `_ - - 3354.68 - - 3354.67 - * - arcface_r50 + - `download `_ + - `download `_ + * - arcface_r50 - 99.72 - 99.71 + - 154 + - 381 - 112x112x3 - 31.0 - 12.6 - `download `_ - `link `_ - - `download `_ - - 101.995 - - 388.718 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst new file mode 100644 index 00000000..362a27a6 --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Facial Landmark Detection`_ + + +.. _Facial Landmark Detection: + +Facial Landmark Detection +------------------------- + +AFLW2k3d +^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - NME + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - tddfa_mobilenet_v1 |star| + - 3.68 + - 4.05 + - 8936 + - 8941 + - 120x120x3 + - 3.26 + - 0.36 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst new file mode 100644 index 00000000..0565bae8 --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -0,0 +1,52 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Hand Landmark detection`_ + + +.. _Hand Landmark detection: + +Hand Landmark detection +----------------------- + +Hand Landmark +^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Network Name + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - hand_landmark_lite + - 1340 + - 1340 + - 224x224x3 + - 1.01 + - 0.3 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Image_Denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst similarity index 57% rename from docs/public_models/HAILO8L_Image_Denoising.rst rename to docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 284a027a..63ed98d9 100644 --- a/docs/public_models/HAILO8L_Image_Denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Image Denoising`_ + + .. _Image Denoising: Image Denoising @@ -17,58 +28,62 @@ BSD68 ^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn3 + - NV12 Compiled + * - dncnn3 - 31.46 - 31.26 + - 44 + - 44 - 321x481x1 - 0.66 - 205.26 - `download `_ - `link `_ - - `download `_ - - 29.165 - - 29.154 + - `download `_ + - `download `_ CBSD68 ^^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn_color_blind + - NV12 Compiled + * - dncnn_color_blind - 33.87 - 32.97 + - 33 + - 33 - 321x481x3 - 0.66 - 205.97 - `download `_ - `link `_ - - `download `_ - - 29.165 - - 29.154 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Instance_Segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst similarity index 64% rename from docs/public_models/HAILO8L_Instance_Segmentation.rst rename to docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index a7b49985..63677445 100644 --- a/docs/public_models/HAILO8L_Instance_Segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Instance Segmentation`_ + + .. _Instance Segmentation: Instance Segmentation @@ -16,116 +28,126 @@ COCO ^^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - yolact_regnetx_1.6gf + - NV12 Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 + - 47 + - 71 - 512x512x3 - 30.09 - 125.34 - `download `_ - `link `_ - - `download `_ - - 31.8537 - - 46.176 - * - yolact_regnetx_800mf + - `download `_ + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 + - 57 + - 85 - 512x512x3 - 28.3 - 116.75 - `download `_ - `link `_ - - `download `_ - - 31.3871 - - 43.7109 - * - yolov5l_seg + - `download `_ + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 + - 32 + - 46 - 640x640x3 - 47.89 - 147.88 - `download `_ - `link `_ - - `download `_ - - 18.2655 - - 17.5874 - * - yolov5m_seg + - `download `_ + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 + - 60 + - 89 - 640x640x3 - 32.60 - 70.94 - `download `_ - `link `_ - - `download `_ - - 41.0505 - - 60.2374 - * - yolov5n_seg |star| + - `download `_ + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 + - 167 + - 162 - 640x640x3 - 1.99 - 7.1 - `download `_ - `link `_ - - `download `_ - - 118.221 - - 141.6 - * - yolov5s_seg + - `download `_ + - `download `_ + * - yolov5s_seg - 31.57 - - 30.49 + - 30.8 + - 117 + - 155 - 640x640x3 - 7.61 - 26.42 - `download `_ - `link `_ - - `download `_ - - 77.1034 - - 104.49 - * - yolov8m_seg + - `download `_ + - `download `_ + * - yolov8m_seg - 40.6 - - 39.88 + - 39.85 + - 44 + - 69 - 640x640x3 - 27.3 - 110.2 - `download `_ - `link `_ - - `download `_ - - 26.5 - - 35.18 - * - yolov8n_seg + - `download `_ + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 + - 196 + - 319 - 640x640x3 - 3.4 - 12.04 - `download `_ - `link `_ - - `download `_ - - 122.153 - - 232.506 - * - yolov8s_seg + - `download `_ + - `download `_ + * - yolov8s_seg - 36.63 - - 36.03 + - 36.13 + - 89 + - 150 - 640x640x3 - 11.8 - 42.6 - `download `_ - `link `_ - - `download `_ - - 65.241 - - 101.822 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Low_Light_Enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst similarity index 53% rename from docs/public_models/HAILO8_Low_Light_Enhancement.rst rename to docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index 62d9f5d0..b8e1474e 100644 --- a/docs/public_models/HAILO8_Low_Light_Enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Low Light Enhancement`_ + + .. _Low Light Enhancement: Low Light Enhancement @@ -16,39 +28,42 @@ LOL ^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - zero_dce + - NV12 Compiled + * - zero_dce - 16.23 - 16.24 + - 114 + - 131 - 400x600x3 - 0.21 - 38.2 - `download `_ - `link `_ - - `download `_ - - 110.691 - - 110.687 - * - zero_dce_pp + - `download `_ + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 + - 29 + - 29 - 400x600x3 - 0.02 - 4.84 - `download `_ - `link `_ - - `download `_ - - 101.229 - - 101.224 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Object_Detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst similarity index 73% rename from docs/public_models/HAILO8_Object_Detection.rst rename to docs/public_models/HAILO15H/HAILO15H_object_detection.rst index 4e80b9e8..ac8856eb 100644 --- a/docs/public_models/HAILO8_Object_Detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -1,12 +1,24 @@ + Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Object Detection`_ + + .. _Object Detection: Object Detection @@ -16,520 +28,578 @@ COCO ^^^^ .. list-table:: - :widths: 33 8 7 12 8 8 8 7 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) + - NV12 Compiled * - centernet_resnet_v1_18_postprocess - 26.3 - 23.31 + - 122 + - 199 - 512x512x3 - 14.22 - 31.21 - `download `_ - `link `_ - - `download `_ - - 441.229 - - 441.223 + - `download `_ + - `download `_ * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 + - 80 + - 123 - 512x512x3 - 30.07 - 56.92 - `download `_ - `link `_ - - `download `_ - - 78.5698 - - 151.005 + - `download `_ + - `download `_ * - damoyolo_tinynasL20_T - 42.8 - - 41.7 + - 42.3 + - 143 + - 283 - 640x640x3 - 11.35 - 18.02 - `download `_ - `link `_ - - `download `_ - - 133.613 - - 324.124 + - `download `_ + - `download `_ * - damoyolo_tinynasL25_S - 46.53 - - 46.04 + - 45.34 + - 99 + - 220 - 640x640x3 - 16.25 - 37.64 - `download `_ - `link `_ - - `download `_ - - 125.284 - - 125.279 + - `download `_ + - `download `_ * - damoyolo_tinynasL35_M - 49.7 - - 47.9 + - 47.7 + - 56 + - 104 - 640x640x3 - 33.98 - 61.64 - `download `_ - `link `_ - - `download `_ - - 51.0468 - - 123.681 + - `download `_ + - `download `_ * - detr_resnet_v1_18_bn - 33.91 - - 30.36 + - 30.91 + - 26 + - 56 - 800x800x3 - 32.42 - - 58.97 + - 61.87 - `download `_ - `link `_ - - `download `_ - - 26.8564 - - 71.4389 + - `download `_ + - `download `_ * - efficientdet_lite0 - 27.32 - 26.49 + - None + - None - 320x320x3 - 3.56 - 1.94 - `download `_ - `link `_ - - `download `_ - - 87.3656 - - 245.752 + - `download `_ + - `download `_ * - efficientdet_lite1 - 32.27 - 31.72 + - None + - None - 384x384x3 - 4.73 - 4 - `download `_ - `link `_ - - `download `_ - - 62.5294 - - 168.514 + - `download `_ + - `download `_ * - efficientdet_lite2 - 35.95 - 34.67 + - None + - None - 448x448x3 - 5.93 - 6.84 - `download `_ - `link `_ - - `download `_ - - 43.2493 - - 92.2266 + - `download `_ + - `download `_ * - nanodet_repvgg |star| - 29.3 - 28.53 + - 738 + - 738 - 416x416x3 - 6.74 - 11.28 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 991.956 - - 992.077 + - `download `_ + - `download `_ * - nanodet_repvgg_a12 - 33.73 - - 31.33 + - 32.13 + - 155 + - 255 - 640x640x3 - 5.13 - 28.23 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 400.976 - - 400.965 + - `download `_ + - `download `_ * - nanodet_repvgg_a1_640 - 33.28 - 32.88 + - 181 + - 181 - 640x640x3 - 10.79 - 42.8 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 282.225 - - 282.218 + - `download `_ + - `download `_ * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 + - 244 + - 582 - 300x300x3 - 6.79 - 2.5 - `download `_ - `link `_ - - `download `_ - - 1015.95 - - 1015.94 + - `download `_ + - `download `_ * - ssd_mobilenet_v2 - 24.15 - - 22.94 + - 22.95 + - 180 + - 371 - 300x300x3 - 4.46 - 1.52 - `download `_ - `link `_ - - `download `_ - - 137.187 - - 356.764 + - `download `_ + - `download `_ * - tiny_yolov3 - 14.66 - 14.41 + - 1046 + - 1046 - 416x416x3 - 8.85 - 5.58 - `download `_ - `link `_ - - `download `_ - - 1044.6 - - 1044.59 + - `download `_ + - `download `_ * - tiny_yolov4 - 19.18 - 17.73 + - 908 + - 908 - 416x416x3 - 6.05 - 6.92 - `download `_ - `link `_ - - `download `_ - - 1337.63 - - 1337.54 + - `download `_ + - `download `_ * - yolov3 |star| - 38.42 - 38.37 + - 33 + - 45 - 608x608x3 - 68.79 - 158.10 - `download `_ - `link `_ - - `download `_ - - 33.265 - - 47.9808 + - `download `_ + - `download `_ * - yolov3_416 - 37.73 - 37.53 + - 50 + - 79 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 45.6488 - - 96.8943 - * - yolov3_gluon |rocket| |star| + - `download `_ + - `download `_ + * - yolov3_gluon |star| - 37.28 - 35.64 + - 33 + - 43 - 608x608x3 - 68.79 - 158.1 - `download `_ - `link `_ - - `download `_ - - 38.2519 - - 68.7707 + - `download `_ + - `download `_ * - yolov3_gluon_416 |star| - 36.27 - 34.92 + - 50 + - 79 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 47.115 - - 97.8943 + - `download `_ + - `download `_ * - yolov4_leaky |star| - 42.37 - 41.08 + - 43 + - 68 - 512x512x3 - 64.33 - 91.04 - `download `_ - `link `_ - - `download `_ - - 44.4488 - - 87.9633 + - `download `_ + - `download `_ * - yolov5m - 42.59 - 41.19 + - 75 + - 123 - 640x640x3 - 21.78 - 52.17 - `download `_ - `link `_ - - `download `_ - - 65.308 - - 130.748 + - `download `_ + - `download `_ * - yolov5m6_6.1 - 50.67 - 48.97 + - 24 + - 31 - 1280x1280x3 - 35.70 - 200.04 - `download `_ - `link `_ - - `download `_ - - 25.8568 - - 38.3845 + - `download `_ + - `download `_ * - yolov5m_6.1 - 44.8 - 43.36 + - 78 + - 125 - 640x640x3 - 21.17 - 48.96 - `download `_ - `link `_ - - `download `_ - - 83.7009 - - 151.406 + - `download `_ + - `download `_ * - yolov5m_wo_spp |rocket| - 43.06 - - 40.76 + - 41.06 + - 90.4388 + - 147.557 - 640x640x3 - 22.67 - 52.88 - `download `_ - `link `_ - - `download `_ - - 217.983 - - 217.98 + - `download `_ + - `download `_ * - yolov5s |star| - 35.33 - 33.98 + - 162 + - 282 - 640x640x3 - 7.46 - 17.44 - `download `_ - `link `_ - - `download `_ - - 379.521 - - 379.511 + - `download `_ + - `download `_ * - yolov5s_c3tr - 37.13 - 35.63 + - 120 + - 246 - 640x640x3 - 10.29 - 17.02 - `download `_ - `link `_ - - `download `_ - - 109.528 - - 246.763 + - `download `_ + - `download `_ * - yolov5xs_wo_spp - 33.18 - 32.2 + - 244 + - 481 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 175.324 - - 437.664 + - `download `_ + - `download `_ * - yolov5xs_wo_spp_nms_core - 32.57 - - 31.06 + - 30.86 + - 244 + - 481 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 100.493 - - 100.493 + - `download `_ + - `download `_ * - yolov6n - 34.28 - - 32.18 + - 32.28 + - 228 + - 472 - 640x640x3 - 4.32 - 11.12 - `download `_ - `link `_ - - `download `_ - - 1249.65 - - 1249.62 + - `download `_ + - `download `_ * - yolov6n_0.2.1 - 35.16 - - 33.66 + - 33.87 + - 241 + - 511 - 640x640x3 - 4.33 - 11.06 - `download `_ - `link `_ - - `download `_ - - 801.552 - - 801.456 + - `download `_ + - `download `_ * - yolov7 - 50.59 - 47.89 + - 43 + - 62 - 640x640x3 - 36.91 - 104.51 - `download `_ - `link `_ - - `download `_ - - 47.448 - - 81.0344 + - `download `_ + - `download `_ * - yolov7_tiny - 37.07 - - 35.97 + - 36.07 + - 171 + - 297 - 640x640x3 - 6.22 - 13.74 - `download `_ - `link `_ - - `download `_ - - 373.122 - - 373.11 + - `download `_ + - `download `_ * - yolov7e6 - 55.37 - 53.47 + - 9 + - 11 - 1280x1280x3 - 97.20 - 515.12 - `download `_ - `link `_ - - `download `_ - - 7.86357 - - 10.129 + - `download `_ + - `download `_ * - yolov8l - 52.44 - 51.78 + - 28 + - 41 - 640x640x3 - 43.7 - 165.3 - `download `_ - `link `_ - - `download `_ - - 29.5984 - - 52.7786 + - `download `_ + - `download `_ * - yolov8m - 49.91 - 49.11 + - 54 + - 90 - 640x640x3 - 25.9 - 78.93 - `download `_ - `link `_ - - `download `_ - - 56.3972 - - 112.155 + - `download `_ + - `download `_ * - yolov8n - 37.02 - 36.32 + - 229 + - 508 - 640x640x3 - 3.2 - 8.74 - `download `_ - `link `_ - - `download `_ - - 855.471 - - 855.462 + - `download `_ + - `download `_ * - yolov8s - 44.58 - 43.98 + - 122 + - 211 - 640x640x3 - 11.2 - 28.6 - `download `_ - `link `_ - - `download `_ - - 125.95 - - 270.492 + - `download `_ + - `download `_ * - yolov8x - 53.45 - 52.75 + - 18 + - 25 - 640x640x3 - 68.2 - 258 - `download `_ - `link `_ - - `download `_ - - 19.1925 - - 30.3876 + - `download `_ + - `download `_ + * - yolov9c + - 52.8 + - 50.7 + - None + - None + - 640x640x3 + - 25.3 + - 102.1 + - `download `_ + - `link `_ + - `download `_ + - `download `_ * - yolox_l_leaky |star| - 48.69 - - 46.71 + - 46.59 + - 29 + - 41 - 640x640x3 - 54.17 - 155.3 - `download `_ - `link `_ - - `download `_ - - 33.1201 - - 54.3777 + - `download `_ + - `download `_ * - yolox_s_leaky - 38.12 - 37.27 + - 126 + - 208 - 640x640x3 - 8.96 - 26.74 - `download `_ - `link `_ - - `download `_ - - 250.702 - - 250.697 + - `download `_ + - `download `_ * - yolox_s_wide_leaky - 42.4 - 40.97 + - 77 + - 114 - 640x640x3 - 20.12 - 59.46 - `download `_ - `link `_ - - `download `_ - - 73.6626 - - 131.346 + - `download `_ + - `download `_ * - yolox_tiny - 32.64 - 31.39 + - 257 + - 574 - 416x416x3 - 5.05 - 6.44 - `download `_ - `link `_ - - `download `_ - - 225.587 - - 634.08 + - `download `_ + - `download `_ VisDrone ^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) + - NV12 Compiled * - ssd_mobilenet_v1_visdrone |star| - 2.37 - 2.22 + - 318 + - 815 - 300x300x3 - 5.64 - 2.3 - `download `_ - `link `_ - - `download `_ - - 1212.8 - - 1212.72 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst new file mode 100644 index 00000000..38e4431e --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Attribute`_ + + +.. _Person Attribute: + +Person Attribute +---------------- + +PETA +^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Mean Accuracy + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - person_attr_resnet_v1_18 + - 82.5 + - 82.61 + - 1944 + - 1944 + - 224x224x3 + - 11.19 + - 3.64 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Person_Re_ID.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst similarity index 53% rename from docs/public_models/HAILO15M_Person_Re_ID.rst rename to docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index d9ae1c87..b60ef669 100644 --- a/docs/public_models/HAILO15M_Person_Re_ID.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Re-ID`_ + + .. _Person Re-ID: Person Re-ID @@ -17,39 +28,42 @@ Market1501 ^^^^^^^^^^ .. list-table:: - :widths: 28 8 9 13 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - rank1 - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - osnet_x1_0 + - NV12 Compiled + * - osnet_x1_0 - 94.43 - 93.63 + - 167 + - 396 - 256x128x3 - 2.19 - 1.98 - `download `_ - `link `_ - - `download `_ - - 110.173 - - 317.055 - * - repvgg_a0_person_reid_512 |star| + - `download `_ + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 + - 5082 + - 5082 - 256x128x3 - 7.68 - 1.78 - `download `_ - `link `_ - - `download `_ - - 2632.33 - - 2632.12 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst new file mode 100644 index 00000000..5cc4c29e --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -0,0 +1,105 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Pose Estimation`_ + + +.. _Pose Estimation: + +Pose Estimation +--------------- + +COCO +^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - mAP + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - centerpose_regnetx_1.6gf_fpn |star| + - 53.54 + - 53.53 + - 67 + - 100 + - 640x640x3 + - 14.28 + - 64.58 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - centerpose_regnetx_800mf + - 44.07 + - 43.07 + - 85 + - 128 + - 512x512x3 + - 12.31 + - 86.12 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - centerpose_repvgg_a0 |star| + - 39.17 + - 37.17 + - 146 + - 249 + - 416x416x3 + - 11.71 + - 28.27 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - yolov8m_pose + - 64.26 + - 61.66 + - 54 + - 88 + - 640x640x3 + - 26.4 + - 81.02 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - yolov8s_pose + - 59.2 + - 55.6 + - 114 + - 197 + - 640x640x3 + - 11.6 + - 30.2 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Semantic_Segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst similarity index 58% rename from docs/public_models/HAILO15H_Semantic_Segmentation.rst rename to docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index 3d85eef0..3075b15a 100644 --- a/docs/public_models/HAILO15H_Semantic_Segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Semantic Segmentation`_ + + .. _Semantic Segmentation: Semantic Segmentation @@ -17,110 +28,130 @@ Cityscapes ^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fcn8_resnet_v1_18 |star| + - NV12 Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 + - 24 + - 28 - 1024x1920x3 - 11.20 - 142.82 - `download `_ - `link `_ - - `download `_ - - 24.8146 - - 28.8987 - * - stdc1 |rocket| + - `download `_ + - `download `_ + * - segformer_b0_bn + - 69.81 + - 68.01 + - None + - None + - 512x1024x3 + - 3.72 + - 35.76 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - stdc1 |rocket| - 74.57 - 73.92 + - 13 + - 27 - 1024x1920x3 - 8.27 - 126.47 - `download `_ - `link `_ - - `download `_ - - 13.63 - - 26.89 + - `download `_ + - `download `_ Oxford-IIIT Pet ^^^^^^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - unet_mobilenet_v2 + - NV12 Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 + - 206 + - 390 - 256x256x3 - 10.08 - 28.88 - `download `_ - `link `_ - - `download `_ - - 206.162 - - 390.466 + - `download `_ + - `download `_ Pascal VOC ^^^^^^^^^^ .. list-table:: - :widths: 36 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - deeplab_v3_mobilenet_v2 + - NV12 Compiled + * - deeplab_v3_mobilenet_v2 - 76.05 - 74.8 + - 90 + - 90 - 513x513x3 - 2.10 - 17.65 - `download `_ - `link `_ - - `download `_ - - 61.0695 - - 94.4568 - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 + - 97 + - 189 - 513x513x3 - 2.10 - 3.21 - `download `_ - `link `_ - - `download `_ - - 97.4441 - - 189.921 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Single_Person_Pose_Estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst similarity index 55% rename from docs/public_models/HAILO15M_Single_Person_Pose_Estimation.rst rename to docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index 8907c2c8..58037f2e 100644 --- a/docs/public_models/HAILO15M_Single_Person_Pose_Estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Single Person Pose Estimation`_ + + .. _Single Person Pose Estimation: Single Person Pose Estimation @@ -17,50 +28,42 @@ COCO ^^^^ .. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - AP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - mspn_regnetx_800mf |star| + - NV12 Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 + - 305 + - 784 - 256x192x3 - 7.17 - 2.94 - `download `_ - `link `_ - - `download `_ - - 239.436 - - 543.199 - * - vit_pose_small - - 74.16 - - 71.6 - - 256x192x3 - - 24.29 - - 17.17 - - `download `_ - - `link `_ - - `download `_ - - 25.0889 - - 77.8054 - * - vit_pose_small_bn + - `download `_ + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 + - 82 + - 276 - 256x192x3 - 24.32 - 17.17 - `download `_ - `link `_ - - `download `_ - - 73.2665 - - 194.156 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst new file mode 100644 index 00000000..c6140ffa --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Stereo Depth Estimation`_ + + +.. _Stereo Depth Estimation: + +Stereo Depth Estimation +----------------------- + +N/A +^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - EPE + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - stereonet + - 91.79 + - 89.14 + - None + - None + - 368x1232x3, 368x1232x3 + - 5.91 + - 126.28 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Super_Resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst similarity index 57% rename from docs/public_models/HAILO8L_Super_Resolution.rst rename to docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index e8a80be7..1393ef05 100644 --- a/docs/public_models/HAILO8L_Super_Resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Super Resolution`_ + + .. _Super Resolution: Super Resolution @@ -16,50 +28,54 @@ BSD100 ^^^^^^ .. list-table:: - :widths: 32 8 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - espcn_x2 + - NV12 Compiled + * - espcn_x2 - 31.4 - 30.3 + - 1637 + - 1637 - 156x240x1 - 0.02 - 1.6 - `download `_ - `link `_ - - `download `_ - - 1161.56 - - 1164.97 - * - espcn_x3 + - `download `_ + - `download `_ + * - espcn_x3 - 28.41 - 28.06 + - 1925 + - 1925 - 104x160x1 - 0.02 - 0.76 - `download `_ - `link `_ - - `download `_ - - 2197.77 - - 2218.23 - * - espcn_x4 + - `download `_ + - `download `_ + * - espcn_x4 - 26.83 - 26.58 + - 1891 + - 1891 - 78x120x1 - 0.02 - 0.46 - `download `_ - `link `_ - - `download `_ - - 2165.26 - - 2189.48 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst new file mode 100644 index 00000000..91ad0b1f --- /dev/null +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Zero-shot Classification`_ + + +.. _Zero-shot Classification: + +Zero-shot Classification +------------------------ + +CIFAR100 +^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Accuracy (top1) + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - clip_resnet_50 + - 42.07 + - 38.57 + - 139 + - 394 + - 224x224x3 + - 38.72 + - 11.62 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Face_Attribute.rst b/docs/public_models/HAILO15H_Face_Attribute.rst deleted file mode 100644 index 66ccf744..00000000 --- a/docs/public_models/HAILO15H_Face_Attribute.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Face Attribute: - -Face Attribute --------------- - -CELEBA -^^^^^^ - -.. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 - :header-rows: 1 - - * - Network Name - - Mean Accuracy - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - face_attr_resnet_v1_18 - - 81.19 - - 81.09 - - 218x178x3 - - 11.74 - - 3 - - `download `_ - - `link `_ - - `download `_ - - 1871.21 - - 1871.21 diff --git a/docs/public_models/HAILO15H_Facial_Landmark_Detection.rst b/docs/public_models/HAILO15H_Facial_Landmark_Detection.rst deleted file mode 100644 index c1118d5b..00000000 --- a/docs/public_models/HAILO15H_Facial_Landmark_Detection.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Facial Landmark Detection: - -Facial Landmark Detection -------------------------- - -AFLW2k3d -^^^^^^^^ - -.. list-table:: - :widths: 28 8 8 16 9 8 8 8 7 7 7 - :header-rows: 1 - - * - Network Name - - NME - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - tddfa_mobilenet_v1 |star| - - 3.68 - - 4.05 - - 120x120x3 - - 3.26 - - 0.36 - - `download `_ - - `link `_ - - `download `_ - - 8939.84 - - 8939.84 diff --git a/docs/public_models/HAILO15H_Hand_Landmark_detection.rst b/docs/public_models/HAILO15H_Hand_Landmark_detection.rst deleted file mode 100644 index 467c9462..00000000 --- a/docs/public_models/HAILO15H_Hand_Landmark_detection.rst +++ /dev/null @@ -1,39 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Hand Landmark detection: - -Hand Landmark detection ------------------------ - -Hand Landmark -^^^^^^^^^^^^^ - -.. list-table:: - :header-rows: 1 - - * - Network Name - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - hand_landmark_lite - - 224x224x3 - - 1.01 - - 0.3 - - `download `_ - - `link `_ - - `download `_ - - 1,340.45 - - 1,340.45 diff --git a/docs/public_models/HAILO15H_Person_Attribute.rst b/docs/public_models/HAILO15H_Person_Attribute.rst deleted file mode 100644 index 89471f85..00000000 --- a/docs/public_models/HAILO15H_Person_Attribute.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Person Attribute: - -Person Attribute ----------------- - -PETA -^^^^ - -.. list-table:: - :widths: 24 14 12 14 9 8 10 8 7 7 7 - :header-rows: 1 - - * - Network Name - - Mean Accuracy - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - person_attr_resnet_v1_18 - - 82.5 - - 82.61 - - 224x224x3 - - 11.19 - - 3.64 - - `download `_ - - `link `_ - - `download `_ - - 1944.9 - - 1944.9 diff --git a/docs/public_models/HAILO15H_Pose_Estimation.rst b/docs/public_models/HAILO15H_Pose_Estimation.rst deleted file mode 100644 index fa5bf9d3..00000000 --- a/docs/public_models/HAILO15H_Pose_Estimation.rst +++ /dev/null @@ -1,66 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Pose Estimation: - -Pose Estimation ---------------- - -COCO -^^^^ - -.. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 - :header-rows: 1 - - * - Network Name - - AP - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - centerpose_regnetx_1.6gf_fpn |star| - - 53.54 - - 52.84 - - 640x640x3 - - 14.28 - - 64.58 - - `download `_ - - `link `_ - - `download `_ - - 61.9846 - - 94.4347 - * - centerpose_regnetx_800mf - - 44.07 - - 42.97 - - 512x512x3 - - 12.31 - - 86.12 - - `download `_ - - `link `_ - - `download `_ - - 82.4346 - - 120.143 - * - centerpose_repvgg_a0 |star| - - 39.17 - - 37.17 - - 416x416x3 - - 11.71 - - 28.27 - - `download `_ - - `link `_ - - `download `_ - - 137.609 - - 242.58 diff --git a/docs/public_models/HAILO15M_Classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst similarity index 57% rename from docs/public_models/HAILO15M_Classification.rst rename to docs/public_models/HAILO15M/HAILO15M_classification.rst index 66082abe..82fa75f2 100644 --- a/docs/public_models/HAILO15M_Classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -1,12 +1,24 @@ + Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Classification`_ + + .. _Classification: Classification @@ -16,292 +28,378 @@ ImageNet ^^^^^^^^ .. list-table:: - :widths: 31 9 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Accuracy (top1) - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - efficientnet_l + - NV12 Compiled + * - efficientnet_l - 80.46 - 79.36 + - 57.9951 + - 98.1713 - 300x300x3 - 10.55 - 19.4 - `download `_ - `link `_ - - `download `_ - - 57.9951 - - 98.1713 - * - efficientnet_lite4 + - `download `_ + - `download `_ + * - efficientnet_lite0 + - 74.99 + - 73.81 + - None + - None + - 224x224x3 + - 4.63 + - 0.78 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite1 + - 76.68 + - 76.21 + - None + - None + - 240x240x3 + - 5.39 + - 1.22 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite2 + - 77.45 + - 76.74 + - None + - None + - 260x260x3 + - 6.06 + - 1.74 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite3 + - 79.29 + - 78.42 + - None + - None + - 280x280x3 + - 8.16 + - 2.8 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 + - 73.7215 + - 151.255 - 300x300x3 - 12.95 - 5.10 - `download `_ - `link `_ - - `download `_ - - 73.7215 - - 151.255 - * - efficientnet_m |rocket| + - `download `_ + - `download `_ + * - efficientnet_m - 78.91 - 78.63 + - 127.623 + - 253.787 - 240x240x3 - 6.87 - 7.32 - `download `_ - `link `_ - - `download `_ - - 127.623 - - 253.787 - * - hardnet39ds + - `download `_ + - `download `_ + * - efficientnet_s + - 77.64 + - 77.32 + - None + - None + - 224x224x3 + - 5.41 + - 4.72 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - hardnet39ds - 73.43 - 72.92 + - 280 + - 794 - 224x224x3 - 3.48 - 0.86 - `download `_ - `link `_ - - `download `_ - - 282.524 - - 794.832 - * - hardnet68 + - `download `_ + - `download `_ + * - hardnet68 - 75.47 - 75.04 + - 120 + - 255 - 224x224x3 - 17.56 - 8.5 - `download `_ - `link `_ - - `download `_ - - 124.168 - - 258.157 - * - inception_v1 + - `download `_ + - `download `_ + * - inception_v1 - 69.74 - 69.54 + - 234 + - 523 - 224x224x3 - 6.62 - 3 - `download `_ - `link `_ - - `download `_ - - 237.247 - - 522.84 - * - mobilenet_v1 + - `download `_ + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 + - 1427 + - 1427 - 224x224x3 - 4.22 - 1.14 - `download `_ - `link `_ - - `download `_ - - 1427.43 - - 1427.4 - * - mobilenet_v2_1.0 |rocket| + - `download `_ + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 + - 352 + - 823 - 224x224x3 - 3.49 - 0.62 - `download `_ - `link `_ - - `download `_ - - 351.244 - - 821.737 - * - mobilenet_v2_1.4 + - `download `_ + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 + - 267 + - 638 - 224x224x3 - 6.09 - 1.18 - `download `_ - `link `_ - - `download `_ - - 268.706 - - 638.46 - * - mobilenet_v3 + - `download `_ + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 + - 323 + - 777 - 224x224x3 - 4.07 - 2 - `download `_ - `link `_ - - `download `_ - - 324.885 - - 778.084 - * - mobilenet_v3_large_minimalistic + - `download `_ + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - - 70.96 + - 70.61 + - 446 + - 1248 - 224x224x3 - 3.91 - 0.42 - `download `_ - `link `_ - - `download `_ - - 447.24 - - 1248.59 - * - regnetx_1.6gf + - `download `_ + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 + - 302 + - 775 - 224x224x3 - 9.17 - 3.22 - `download `_ - `link `_ - - `download `_ - - 303.337 - - 775.166 - * - regnetx_800mf + - `download `_ + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 + - 410 + - 1105 - 224x224x3 - 7.24 - 1.6 - `download `_ - `link `_ - - `download `_ - - 413.96 - - 1107.48 - * - repvgg_a1 + - `download `_ + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 + - 319 + - 718 - 224x224x3 - 12.79 - 4.7 - `download `_ - `link `_ - - `download `_ - - 321.519 - - 719.195 - * - repvgg_a2 + - `download `_ + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 + - 180 + - 345 - 224x224x3 - 25.5 - 10.2 - `download `_ - `link `_ - - `download `_ - - 177.889 - - 322.874 - * - resmlp12_relu + - `download `_ + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 + - 87 + - 307 - 224x224x3 - 15.77 - 6.04 - `download `_ - `link `_ - - `download `_ - - 87.3561 - - 308.33 - * - resnet_v1_18 + - `download `_ + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 + - 381 + - 891 - 224x224x3 - 11.68 - 3.64 - `download `_ - `link `_ - - `download `_ - - 364.787 - - 846.451 - * - resnet_v1_34 + - `download `_ + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 + - 209 + - 466 - 224x224x3 - 21.79 - 7.34 - `download `_ - `link `_ - - `download `_ - - 184.44 - - 390.499 - * - resnet_v1_50 |rocket| |star| + - `download `_ + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 + - 179 + - 416 - 224x224x3 - 25.53 - 6.98 - `download `_ - `link `_ - - `download `_ - - 224.794 - - 565.76 - * - resnext26_32x4d + - `download `_ + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 + - 249 + - 501 - 224x224x3 - 15.37 - 4.96 - `download `_ - `link `_ - - `download `_ - - 245.252 - - 500.296 - * - resnext50_32x4d + - `download `_ + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 + - 156 + - 336 - 224x224x3 - 24.99 - 8.48 - `download `_ - `link `_ - - `download `_ - - 142.988 - - 311.237 - * - squeezenet_v1.1 + - `download `_ + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 + - 647 + - 1275 - 224x224x3 - 1.24 - 0.78 - `download `_ - `link `_ - - `download `_ - - 651.115 - - 1276.19 - * - vit_base_bn + - `download `_ + - `download `_ + * - vit_base_bn |rocket| - 79.98 - 78.58 + - 43 + - 103 - 224x224x3 - 86.5 - - 34.25 + - 35.188 - `download `_ - `link `_ - - `download `_ - - 42.9047 - - 101.898 - * - vit_small_bn + - `download `_ + - `download `_ + * - vit_small_bn - 78.12 - 77.02 + - 97 + - 273 - 224x224x3 - 21.12 - 8.62 - `download `_ - `link `_ - - `download `_ - - 93.3559 - - 267.595 - * - vit_tiny_bn + - `download `_ + - `download `_ + * - vit_tiny_bn - 68.95 - - 66.75 + - 67.15 + - 174 + - 688 - 224x224x3 - 5.73 - 2.2 - `download `_ - `link `_ - - `download `_ - - 154.715 - - 556.287 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Depth_Estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst similarity index 53% rename from docs/public_models/HAILO15M_Depth_Estimation.rst rename to docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index 203750c3..64715e78 100644 --- a/docs/public_models/HAILO15M_Depth_Estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Depth Estimation`_ + + .. _Depth Estimation: Depth Estimation @@ -17,39 +28,42 @@ NYU ^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - RMSE - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fast_depth |star| + - NV12 Compiled + * - fast_depth |star| - 0.6 - 0.62 + - 324 + - 717 - 224x224x3 - 1.35 - 0.74 - `download `_ - `link `_ - - `download `_ - - 317.237 - - 768.32 - * - scdepthv3 + - `download `_ + - `download `_ + * - scdepthv3 - 0.48 - 0.51 + - 153 + - 274 - 256x320x3 - 14.8 - 10.7 - `download `_ - `link `_ - - `download `_ - - 157.349 - - 276.312 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst new file mode 100644 index 00000000..66536455 --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Attribute`_ + + +.. _Face Attribute: + +Face Attribute +-------------- + +CELEBA +^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Mean Accuracy + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - face_attr_resnet_v1_18 + - 81.19 + - 81.09 + - 430 + - 915 + - 218x178x3 + - 11.74 + - 3 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Face_Detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst similarity index 59% rename from docs/public_models/HAILO15M_Face_Detection.rst rename to docs/public_models/HAILO15M/HAILO15M_face_detection.rst index 80c74822..0f0cc631 100644 --- a/docs/public_models/HAILO15M_Face_Detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Detection`_ + + .. _Face Detection: Face Detection @@ -17,72 +28,78 @@ WiderFace ^^^^^^^^^ .. list-table:: - :widths: 24 7 12 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - lightface_slim |star| + - NV12 Compiled + * - lightface_slim |star| - 39.7 - 39.22 + - 644 + - 1534 - 240x320x3 - 0.26 - 0.16 - `download `_ - `link `_ - - `download `_ - - 649.138 - - 1535.33 - * - retinaface_mobilenet_v1 |star| + - `download `_ + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 + - 49 + - 65 - 736x1280x3 - 3.49 - 25.14 - `download `_ - `link `_ - - `download `_ - - 50.0765 - - 65.3995 - * - scrfd_10g + - `download `_ + - `download `_ + * - scrfd_10g - 82.13 - 82.03 + - 90 + - 131 - 640x640x3 - 4.23 - 26.74 - `download `_ - `link `_ - - `download `_ - - 90.3432 - - 131.564 - * - scrfd_2.5g + - `download `_ + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 + - 206 + - 343 - 640x640x3 - 0.82 - 6.88 - `download `_ - `link `_ - - `download `_ - - 206.764 - - 320.546 - * - scrfd_500m + - `download `_ + - `download `_ + * - scrfd_500m - 68.98 - 68.88 + - 232 + - 409 - 640x640x3 - 0.63 - 1.5 - `download `_ - `link `_ - - `download `_ - - 231.127 - - 407.787 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Face_Recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst similarity index 51% rename from docs/public_models/HAILO8L_Face_Recognition.rst rename to docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index 14fc2d6e..952140e3 100644 --- a/docs/public_models/HAILO8L_Face_Recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Recognition`_ + + .. _Face Recognition: Face Recognition @@ -16,39 +28,42 @@ LFW ^^^ .. list-table:: - :widths: 12 7 12 14 9 8 10 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - lfw verification accuracy - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - arcface_mobilefacenet + - NV12 Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 + - 439 + - 1188 - 112x112x3 - 2.04 - 0.88 - `download `_ - `link `_ - - `download `_ - - 334.052 - - 1114.78 - * - arcface_r50 + - `download `_ + - `download `_ + * - arcface_r50 - 99.72 - 99.71 + - 113 + - 235 - 112x112x3 - 31.0 - 12.6 - `download `_ - `link `_ - - `download `_ - - 80.6634 - - 205.256 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst new file mode 100644 index 00000000..611c8a9d --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Facial Landmark Detection`_ + + +.. _Facial Landmark Detection: + +Facial Landmark Detection +------------------------- + +AFLW2k3d +^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - NME + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - tddfa_mobilenet_v1 |star| + - 3.68 + - 4.05 + - 4483 + - 4484 + - 120x120x3 + - 3.26 + - 0.36 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst new file mode 100644 index 00000000..38000d97 --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -0,0 +1,52 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Hand Landmark detection`_ + + +.. _Hand Landmark detection: + +Hand Landmark detection +----------------------- + +Hand Landmark +^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Network Name + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - hand_landmark_lite + - 387 + - 1102 + - 224x224x3 + - 1.01 + - 0.3 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Image_Denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst similarity index 57% rename from docs/public_models/HAILO8_Image_Denoising.rst rename to docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index fef5b5e7..a3562a12 100644 --- a/docs/public_models/HAILO8_Image_Denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Image Denoising`_ + + .. _Image Denoising: Image Denoising @@ -16,58 +28,62 @@ BSD68 ^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn3 + - NV12 Compiled + * - dncnn3 - 31.46 - 31.26 + - 20 + - 20 - 321x481x1 - 0.66 - 205.26 - `download `_ - `link `_ - - `download `_ - - 60.2436 - - 60.241 + - `download `_ + - `download `_ CBSD68 ^^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn_color_blind + - NV12 Compiled + * - dncnn_color_blind - 33.87 - 32.97 + - 20 + - 20 - 321x481x3 - 0.66 - 205.97 - `download `_ - `link `_ - - `download `_ - - 60.2437 - - 60.2424 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Instance_Segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst similarity index 64% rename from docs/public_models/HAILO8_Instance_Segmentation.rst rename to docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index a3b2978a..2852b7c2 100644 --- a/docs/public_models/HAILO8_Instance_Segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Instance Segmentation`_ + + .. _Instance Segmentation: Instance Segmentation @@ -17,116 +28,126 @@ COCO ^^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - yolact_regnetx_1.6gf + - NV12 Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 + - 34 + - 43 - 512x512x3 - 30.09 - 125.34 - `download `_ - `link `_ - - `download `_ - - 40.2637 - - 79.7636 - * - yolact_regnetx_800mf + - `download `_ + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 + - 40 + - 50 - 512x512x3 - 28.3 - 116.75 - `download `_ - `link `_ - - `download `_ - - 35.1857 - - 65.5711 - * - yolov5l_seg + - `download `_ + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 + - 20 + - 24 - 640x640x3 - 47.89 - 147.88 - `download `_ - `link `_ - - `download `_ - - 33.4538 - - 53.3109 - * - yolov5m_seg + - `download `_ + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 + - 43 + - 56 - 640x640x3 - 32.60 - 70.94 - `download `_ - `link `_ - - `download `_ - - 61.7092 - - 106.821 - * - yolov5n_seg |star| + - `download `_ + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 + - 148 + - 163 - 640x640x3 - 1.99 - 7.1 - `download `_ - `link `_ - - `download `_ - - 184.261 - - 184.236 - * - yolov5s_seg + - `download `_ + - `download `_ + * - yolov5s_seg - 31.57 - - 30.49 + - 30.8 + - 82 + - 114 - 640x640x3 - 7.61 - 26.42 - `download `_ - `link `_ - - `download `_ - - 92.2305 - - 92.2163 - * - yolov8m_seg + - `download `_ + - `download `_ + * - yolov8m_seg - 40.6 - - 39.88 + - 39.85 + - 30 + - 41 - 640x640x3 - 27.3 - 110.2 - `download `_ - `link `_ - - `download `_ - - 44.5823 - - 85.8304 - * - yolov8n_seg + - `download `_ + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 + - 143 + - 242 - 640x640x3 - 3.4 - 12.04 - `download `_ - `link `_ - - `download `_ - - 165.335 - - 408.028 - * - yolov8s_seg + - `download `_ + - `download `_ + * - yolov8s_seg - 36.63 - - 36.03 + - 36.13 + - 66 + - 94 - 640x640x3 - 11.8 - 42.6 - `download `_ - `link `_ - - `download `_ - - 99.3603 - - 202.581 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Low_Light_Enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst similarity index 53% rename from docs/public_models/HAILO15H_Low_Light_Enhancement.rst rename to docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index 95986b8d..9c98d8f2 100644 --- a/docs/public_models/HAILO15H_Low_Light_Enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Low Light Enhancement`_ + + .. _Low Light Enhancement: Low Light Enhancement @@ -17,39 +28,42 @@ LOL ^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - zero_dce + - NV12 Compiled + * - zero_dce - 16.23 - 16.24 + - 71 + - 79 - 400x600x3 - 0.21 - 38.2 - `download `_ - `link `_ - - `download `_ - - 69.8101 - - 69.8101 - * - zero_dce_pp + - `download `_ + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 + - 54 + - 59 - 400x600x3 - 0.02 - 4.84 - `download `_ - `link `_ - - `download `_ - - 29.6328 - - 29.6328 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Object_Detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst similarity index 73% rename from docs/public_models/HAILO8L_Object_Detection.rst rename to docs/public_models/HAILO15M/HAILO15M_object_detection.rst index 283f342e..b54c16fa 100644 --- a/docs/public_models/HAILO8L_Object_Detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Object Detection`_ + + .. _Object Detection: Object Detection @@ -16,520 +28,578 @@ COCO ^^^^ .. list-table:: - :widths: 33 8 7 12 8 8 8 7 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) + - NV12 Compiled * - centernet_resnet_v1_18_postprocess - 26.3 - 23.31 + - 85 + - 116 - 512x512x3 - 14.22 - 31.21 - `download `_ - `link `_ - - `download `_ - - 95.3632 - - 149.8 + - `download `_ + - `download `_ * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 + - 53 + - 71 - 512x512x3 - 30.07 - 56.92 - `download `_ - `link `_ - - `download `_ - - 52.0467 - - 81.0303 + - `download `_ + - `download `_ * - damoyolo_tinynasL20_T - 42.8 - - 41.7 + - 42.3 + - 100 + - 169 - 640x640x3 - 11.35 - 18.02 - `download `_ - `link `_ - - `download `_ - - 93.7939 - - 183.896 + - `download `_ + - `download `_ * - damoyolo_tinynasL25_S - 46.53 - - 46.04 + - 45.34 + - 80 + - 145 - 640x640x3 - 16.25 - 37.64 - `download `_ - `link `_ - - `download `_ - - 63.797 - - 109.853 + - `download `_ + - `download `_ * - damoyolo_tinynasL35_M - 49.7 - - 47.9 + - 47.7 + - 40 + - 63 - 640x640x3 - 33.98 - 61.64 - `download `_ - `link `_ - - `download `_ - - 31.9208 - - 57.5705 + - `download `_ + - `download `_ * - detr_resnet_v1_18_bn - 33.91 - - 30.36 + - 30.91 + - 18 + - 32 - 800x800x3 - 32.42 - - 58.97 + - 61.87 - `download `_ - `link `_ - - `download `_ - - 16.544 - - 33.436 + - `download `_ + - `download `_ * - efficientdet_lite0 - 27.32 - 26.49 + - None + - None - 320x320x3 - 3.56 - 1.94 - `download `_ - `link `_ - - `download `_ - - 71.6623 - - 174.318 + - `download `_ + - `download `_ * - efficientdet_lite1 - 32.27 - 31.72 + - None + - None - 384x384x3 - 4.73 - 4 - `download `_ - `link `_ - - `download `_ - - 44.6487 - - 93.3219 + - `download `_ + - `download `_ * - efficientdet_lite2 - 35.95 - 34.67 + - None + - None - 448x448x3 - 5.93 - 6.84 - `download `_ - `link `_ - - `download `_ - - 25.5898 - - 44.7808 + - `download `_ + - `download `_ * - nanodet_repvgg |star| - 29.3 - 28.53 + - 187 + - 315 - 416x416x3 - 6.74 - 11.28 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 500.345 - - 500.336 + - `download `_ + - `download `_ * - nanodet_repvgg_a12 - 33.73 - - 31.33 + - 32.13 + - 108 + - 159 - 640x640x3 - 5.13 - 28.23 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 107.291 - - 173.794 + - `download `_ + - `download `_ * - nanodet_repvgg_a1_640 - 33.28 - 32.88 + - 82 + - 112 - 640x640x3 - 10.79 - 42.8 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 101.027 - - 101.026 + - `download `_ + - `download `_ * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 + - 182 + - 351 - 300x300x3 - 6.79 - 2.5 - `download `_ - `link `_ - - `download `_ - - 139.074 - - 292.96 + - `download `_ + - `download `_ * - ssd_mobilenet_v2 - 24.15 - - 22.94 + - 22.95 + - 136 + - 256 - 300x300x3 - 4.46 - 1.52 - `download `_ - `link `_ - - `download `_ - - 95.7241 - - 218.572 + - `download `_ + - `download `_ * - tiny_yolov3 - 14.66 - 14.41 + - 270 + - 465 - 416x416x3 - 8.85 - 5.58 - `download `_ - `link `_ - - `download `_ - - 623.301 - - 623.296 + - `download `_ + - `download `_ * - tiny_yolov4 - 19.18 - 17.73 + - 273 + - 430 - 416x416x3 - 6.05 - 6.92 - `download `_ - `link `_ - - `download `_ - - 474.288 - - 474.283 + - `download `_ + - `download `_ * - yolov3 |star| - 38.42 - 38.37 + - 20 + - 24 - 608x608x3 - 68.79 - 158.10 - `download `_ - `link `_ - - `download `_ - - 15.527 - - 19.18 + - `download `_ + - `download `_ * - yolov3_416 - 37.73 - 37.53 + - 35 + - 51 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 25.2653 - - 39.9843 - * - yolov3_gluon |rocket| |star| + - `download `_ + - `download `_ + * - yolov3_gluon |star| - 37.28 - 35.64 + - 20 + - 24 - 608x608x3 - 68.79 - 158.1 - `download `_ - `link `_ - - `download `_ - - 14.861 - - 19.19 + - `download `_ + - `download `_ * - yolov3_gluon_416 |star| - 36.27 - 34.92 + - 35 + - 51 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 25.19 - - 39.98 + - `download `_ + - `download `_ * - yolov4_leaky |star| - 42.37 - 41.08 + - 28 + - 39 - 512x512x3 - 64.33 - 91.04 - `download `_ - `link `_ - - `download `_ - - 24.657 - - 38.382 + - `download `_ + - `download `_ * - yolov5m - 42.59 - 41.19 + - 53 + - 75 - 640x640x3 - 21.78 - 52.17 - `download `_ - `link `_ - - `download `_ - - 46.3823 - - 76.2339 + - `download `_ + - `download `_ * - yolov5m6_6.1 - 50.67 - 48.97 + - 16 + - 19 - 1280x1280x3 - 35.70 - 200.04 - `download `_ - `link `_ - - `download `_ - - 14.794 - - 18.657 + - `download `_ + - `download `_ * - yolov5m_6.1 - 44.8 - 43.36 + - 53 + - 75 - 640x640x3 - 21.17 - 48.96 - `download `_ - `link `_ - - `download `_ - - 52.5796 - - 83.1643 + - `download `_ + - `download `_ * - yolov5m_wo_spp |rocket| - 43.06 - - 40.76 + - 41.06 + - 67.6233 + - 94.4975 - 640x640x3 - 22.67 - 52.88 - `download `_ - `link `_ - - `download `_ - - 50.931 - - 81.035 + - `download `_ + - `download `_ * - yolov5s |star| - 35.33 - 33.98 + - 117 + - 179 - 640x640x3 - 7.46 - 17.44 - `download `_ - `link `_ - - `download `_ - - 87.263 - - 163.191 + - `download `_ + - `download `_ * - yolov5s_c3tr - 37.13 - 35.63 + - 100 + - 177 - 640x640x3 - 10.29 - 17.02 - `download `_ - `link `_ - - `download `_ - - 77.7032 - - 156.74 + - `download `_ + - `download `_ * - yolov5xs_wo_spp - 33.18 - 32.2 + - 180 + - 329 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 135.015 - - 306.014 + - `download `_ + - `download `_ * - yolov5xs_wo_spp_nms_core - 32.57 - - 31.06 + - 30.86 + - 180 + - 329 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 135.259 - - 306.096 + - `download `_ + - `download `_ * - yolov6n - 34.28 - - 32.18 + - 32.28 + - 178 + - 318 - 640x640x3 - 4.32 - 11.12 - `download `_ - `link `_ - - `download `_ - - 159.005 - - 337.45 + - `download `_ + - `download `_ * - yolov6n_0.2.1 - 35.16 - - 33.66 + - 33.87 + - 181 + - 323 - 640x640x3 - 4.33 - 11.06 - `download `_ - `link `_ - - `download `_ - - 159.137 - - 337.457 + - `download `_ + - `download `_ * - yolov7 - 50.59 - 47.89 + - 28 + - 37 - 640x640x3 - 36.91 - 104.51 - `download `_ - `link `_ - - `download `_ - - 25.0569 - - 37.3181 + - `download `_ + - `download `_ * - yolov7_tiny - 37.07 - - 35.97 + - 36.07 + - 127 + - 192 - 640x640x3 - 6.22 - 13.74 - `download `_ - `link `_ - - `download `_ - - 114.461 - - 195.12 + - `download `_ + - `download `_ * - yolov7e6 - 55.37 - 53.47 + - 6 + - 7 - 1280x1280x3 - 97.20 - 515.12 - `download `_ - `link `_ - - `download `_ - - 4.906 - - 4.906 + - `download `_ + - `download `_ * - yolov8l - 52.44 - 51.78 + - 18 + - 23 - 640x640x3 - 43.7 - 165.3 - `download `_ - `link `_ - - `download `_ - - 18.88 - - 26.88 + - `download `_ + - `download `_ * - yolov8m - 49.91 - 49.11 + - 38 + - 55 - 640x640x3 - 25.9 - 78.93 - `download `_ - `link `_ - - `download `_ - - 37.919 - - 60.241 + - `download `_ + - `download `_ * - yolov8n - 37.02 - 36.32 + - 177 + - 327 - 640x640x3 - 3.2 - 8.74 - `download `_ - `link `_ - - `download `_ - - 139.213 - - 278.816 + - `download `_ + - `download `_ * - yolov8s - 44.58 - 43.98 + - 84 + - 131 - 640x640x3 - 11.2 - 28.6 - `download `_ - `link `_ - - `download `_ - - 84.26 - - 145.274 + - `download `_ + - `download `_ * - yolov8x - 53.45 - 52.75 + - 11 + - 13 - 640x640x3 - 68.2 - 258 - `download `_ - `link `_ - - `download `_ - - 19.1925 - - 30.3876 + - `download `_ + - `download `_ + * - yolov9c + - 52.8 + - 50.7 + - None + - None + - 640x640x3 + - 25.3 + - 102.1 + - `download `_ + - `link `_ + - `download `_ + - `download `_ * - yolox_l_leaky |star| - 48.69 - - 46.71 + - 46.59 + - 18 + - 23 - 640x640x3 - 54.17 - 155.3 - `download `_ - `link `_ - - `download `_ - - 19.999 - - 27.722 + - `download `_ + - `download `_ * - yolox_s_leaky - 38.12 - 37.27 + - 86 + - 129 - 640x640x3 - 8.96 - 26.74 - `download `_ - `link `_ - - `download `_ - - 80.768 - - 129.54 + - `download `_ + - `download `_ * - yolox_s_wide_leaky - 42.4 - 40.97 + - 52 + - 70 - 640x640x3 - 20.12 - 59.46 - `download `_ - `link `_ - - `download `_ - - 48.6473 - - 70.904 + - `download `_ + - `download `_ * - yolox_tiny - 32.64 - 31.39 + - 182 + - 363 - 416x416x3 - 5.05 - 6.44 - `download `_ - `link `_ - - `download `_ - - 152.341 - - 343.324 + - `download `_ + - `download `_ VisDrone ^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) + - NV12 Compiled * - ssd_mobilenet_v1_visdrone |star| - 2.37 - 2.22 + - 243 + - 527 - 300x300x3 - 5.64 - 2.3 - `download `_ - `link `_ - - `download `_ - - 208.918 - - 488.485 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst new file mode 100644 index 00000000..04802c5f --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Attribute`_ + + +.. _Person Attribute: + +Person Attribute +---------------- + +PETA +^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Mean Accuracy + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - person_attr_resnet_v1_18 + - 82.5 + - 82.61 + - 361 + - 815 + - 224x224x3 + - 11.19 + - 3.64 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst new file mode 100644 index 00000000..3c234b8a --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -0,0 +1,69 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Re-ID`_ + + +.. _Person Re-ID: + +Person Re-ID +------------ + +Market1501 +^^^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - rank1 + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - osnet_x1_0 + - 94.43 + - 93.63 + - 114 + - 321 + - 256x128x3 + - 2.19 + - 1.98 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - repvgg_a0_person_reid_512 |star| + - 89.9 + - 89.3 + - 2632 + - 2632 + - 256x128x3 + - 7.68 + - 1.78 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst new file mode 100644 index 00000000..0b16642b --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -0,0 +1,105 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Pose Estimation`_ + + +.. _Pose Estimation: + +Pose Estimation +--------------- + +COCO +^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - mAP + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - centerpose_regnetx_1.6gf_fpn |star| + - 53.54 + - 53.53 + - 46 + - 63 + - 640x640x3 + - 14.28 + - 64.58 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - centerpose_regnetx_800mf + - 44.07 + - 43.07 + - 56 + - 75 + - 512x512x3 + - 12.31 + - 86.12 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - centerpose_repvgg_a0 |star| + - 39.17 + - 37.17 + - 95 + - 139 + - 416x416x3 + - 11.71 + - 28.27 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - yolov8m_pose + - 64.26 + - 61.66 + - 37 + - 53 + - 640x640x3 + - 26.4 + - 81.02 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - yolov8s_pose + - 59.2 + - 55.6 + - 79 + - 123 + - 640x640x3 + - 11.6 + - 30.2 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Semantic_Segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst similarity index 58% rename from docs/public_models/HAILO15M_Semantic_Segmentation.rst rename to docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index a3e4d72b..6df69c7f 100644 --- a/docs/public_models/HAILO15M_Semantic_Segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Semantic Segmentation`_ + + .. _Semantic Segmentation: Semantic Segmentation @@ -17,110 +28,130 @@ Cityscapes ^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fcn8_resnet_v1_18 |star| + - NV12 Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 + - 18 + - 20 - 1024x1920x3 - 11.20 - 142.82 - `download `_ - `link `_ - - `download `_ - - 18.8156 - - 21.0842 - * - stdc1 |rocket| + - `download `_ + - `download `_ + * - segformer_b0_bn + - 69.81 + - 68.01 + - None + - None + - 512x1024x3 + - 3.72 + - 35.76 + - `download `_ + - `link `_ + - `download `_ + - `download `_ + * - stdc1 |rocket| - 74.57 - 73.92 + - 15 + - 18 - 1024x1920x3 - 8.27 - 126.47 - `download `_ - `link `_ - - `download `_ - - 15.5433 - - 18.1757 + - `download `_ + - `download `_ Oxford-IIIT Pet ^^^^^^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - unet_mobilenet_v2 + - NV12 Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 + - 133 + - 227 - 256x256x3 - 10.08 - 28.88 - `download `_ - `link `_ - - `download `_ - - 133.439 - - 227.59 + - `download `_ + - `download `_ Pascal VOC ^^^^^^^^^^ .. list-table:: - :widths: 36 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - deeplab_v3_mobilenet_v2 + - NV12 Compiled + * - deeplab_v3_mobilenet_v2 - 76.05 - 74.8 + - 42 + - 56 - 513x513x3 - 2.10 - 17.65 - `download `_ - `link `_ - - `download `_ - - 42.9866 - - 56.8301 - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 + - 87 + - 119 - 513x513x3 - 2.10 - 3.21 - `download `_ - `link `_ - - `download `_ - - 87.5364 - - 119.235 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Single_Person_Pose_Estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst similarity index 55% rename from docs/public_models/HAILO15H_Single_Person_Pose_Estimation.rst rename to docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index 84bd4817..e7ec4403 100644 --- a/docs/public_models/HAILO15H_Single_Person_Pose_Estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Single Person Pose Estimation`_ + + .. _Single Person Pose Estimation: Single Person Pose Estimation @@ -17,50 +28,42 @@ COCO ^^^^ .. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - AP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - mspn_regnetx_800mf |star| + - NV12 Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 + - 279 + - 661 - 256x192x3 - 7.17 - 2.94 - `download `_ - `link `_ - - `download `_ - - 313.703 - - 789.441 - * - vit_pose_small - - 74.16 - - 71.6 - - 256x192x3 - - 24.29 - - 17.17 - - `download `_ - - `link `_ - - `download `_ - - 31.9975 - - 135.983 - * - vit_pose_small_bn + - `download `_ + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 + - 73 + - 191 - 256x192x3 - 24.32 - 17.17 - `download `_ - `link `_ - - `download `_ - - 92.4463 - - 298.863 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst new file mode 100644 index 00000000..ea70258b --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Stereo Depth Estimation`_ + + +.. _Stereo Depth Estimation: + +Stereo Depth Estimation +----------------------- + +N/A +^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - EPE + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - stereonet + - 91.79 + - 89.14 + - None + - None + - 368x1232x3, 368x1232x3 + - 5.91 + - 126.28 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Super_Resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst similarity index 57% rename from docs/public_models/HAILO8_Super_Resolution.rst rename to docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index f44d41ed..b246fd69 100644 --- a/docs/public_models/HAILO8_Super_Resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Super Resolution`_ + + .. _Super Resolution: Super Resolution @@ -16,50 +28,54 @@ BSD100 ^^^^^^ .. list-table:: - :widths: 32 8 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - espcn_x2 + - NV12 Compiled + * - espcn_x2 - 31.4 - 30.3 + - 1637 + - 1637 - 156x240x1 - 0.02 - 1.6 - `download `_ - `link `_ - - `download `_ - - 1164.97 - - 1164.95 - * - espcn_x3 + - `download `_ + - `download `_ + * - espcn_x3 - 28.41 - 28.06 + - 1925 + - 1925 - 104x160x1 - 0.02 - 0.76 - `download `_ - `link `_ - - `download `_ - - 2218.23 - - 2218.23 - * - espcn_x4 + - `download `_ + - `download `_ + * - espcn_x4 - 26.83 - 26.58 + - 1908 + - 1908 - 78x120x1 - 0.02 - 0.46 - `download `_ - `link `_ - - `download `_ - - 2189.52 - - 2189.5 + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst new file mode 100644 index 00000000..ed50f045 --- /dev/null +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -0,0 +1,57 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Zero-shot Classification`_ + + +.. _Zero-shot Classification: + +Zero-shot Classification +------------------------ + +CIFAR100 +^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Accuracy (top1) + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + - NV12 Compiled + * - clip_resnet_50 + - 42.07 + - 38.57 + - 85 + - 199 + - 224x224x3 + - 38.72 + - 11.62 + - `download `_ + - `link `_ + - `download `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Face_Attribute.rst b/docs/public_models/HAILO15M_Face_Attribute.rst deleted file mode 100644 index b6aac8fc..00000000 --- a/docs/public_models/HAILO15M_Face_Attribute.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Face Attribute: - -Face Attribute --------------- - -CELEBA -^^^^^^ - -.. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 - :header-rows: 1 - - * - Network Name - - Mean Accuracy - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - face_attr_resnet_v1_18 - - 81.19 - - 81.09 - - 218x178x3 - - 11.74 - - 3 - - `download `_ - - `link `_ - - `download `_ - - 366.06 - - 884.264 diff --git a/docs/public_models/HAILO15M_Facial_Landmark_Detection.rst b/docs/public_models/HAILO15M_Facial_Landmark_Detection.rst deleted file mode 100644 index 3d5b8783..00000000 --- a/docs/public_models/HAILO15M_Facial_Landmark_Detection.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Facial Landmark Detection: - -Facial Landmark Detection -------------------------- - -AFLW2k3d -^^^^^^^^ - -.. list-table:: - :widths: 28 8 8 16 9 8 8 8 7 7 7 - :header-rows: 1 - - * - Network Name - - NME - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - tddfa_mobilenet_v1 |star| - - 3.68 - - 4.05 - - 120x120x3 - - 3.26 - - 0.36 - - `download `_ - - `link `_ - - `download `_ - - 4482.73 - - 4484.42 diff --git a/docs/public_models/HAILO15M_Hand_Landmark_detection.rst b/docs/public_models/HAILO15M_Hand_Landmark_detection.rst deleted file mode 100644 index 189585ac..00000000 --- a/docs/public_models/HAILO15M_Hand_Landmark_detection.rst +++ /dev/null @@ -1,39 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Hand Landmark detection: - -Hand Landmark detection ------------------------ - -Hand Landmark -^^^^^^^^^^^^^ - -.. list-table:: - :header-rows: 1 - - * - Network Name - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - hand_landmark_lite - - 224x224x3 - - 1.01 - - 0.3 - - `download `_ - - `link `_ - - `download `_ - - 386.777 - - 1,101.96 diff --git a/docs/public_models/HAILO15M_Person_Attribute.rst b/docs/public_models/HAILO15M_Person_Attribute.rst deleted file mode 100644 index cbc97e9e..00000000 --- a/docs/public_models/HAILO15M_Person_Attribute.rst +++ /dev/null @@ -1,45 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - - -.. _Person Attribute: - -Person Attribute ----------------- - -PETA -^^^^ - -.. list-table:: - :widths: 24 14 12 14 9 8 10 8 7 7 7 - :header-rows: 1 - - * - Network Name - - Mean Accuracy - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - person_attr_resnet_v1_18 - - 82.5 - - 82.61 - - 224x224x3 - - 11.19 - - 3.64 - - `download `_ - - `link `_ - - `download `_ - - 364.245 - - 816.634 diff --git a/docs/public_models/HAILO15M_Pose_Estimation.rst b/docs/public_models/HAILO15M_Pose_Estimation.rst deleted file mode 100644 index db3251da..00000000 --- a/docs/public_models/HAILO15M_Pose_Estimation.rst +++ /dev/null @@ -1,66 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Pose Estimation: - -Pose Estimation ---------------- - -COCO -^^^^ - -.. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 - :header-rows: 1 - - * - Network Name - - AP - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - centerpose_regnetx_1.6gf_fpn |star| - - 53.54 - - 52.84 - - 640x640x3 - - 14.28 - - 64.58 - - `download `_ - - `link `_ - - `download `_ - - 46.2622 - - 63.1189 - * - centerpose_regnetx_800mf - - 44.07 - - 42.97 - - 512x512x3 - - 12.31 - - 86.12 - - `download `_ - - `link `_ - - `download `_ - - 57.078 - - 75.5737 - * - centerpose_repvgg_a0 |star| - - 39.17 - - 37.17 - - 416x416x3 - - 11.71 - - 28.27 - - `download `_ - - `link `_ - - `download `_ - - 91.8911 - - 137.367 diff --git a/docs/public_models/HAILO8_Classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst similarity index 72% rename from docs/public_models/HAILO8_Classification.rst rename to docs/public_models/HAILO8/HAILO8_classification.rst index d2bd0731..05913af8 100644 --- a/docs/public_models/HAILO8_Classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -1,12 +1,24 @@ + Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Classification`_ + + .. _Classification: Classification @@ -16,347 +28,347 @@ ImageNet ^^^^^^^^ .. list-table:: - :widths: 31 9 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Accuracy (top1) - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - efficientnet_l + - Compiled + * - efficientnet_l - 80.46 - 79.36 + - 221 + - 221 - 300x300x3 - 10.55 - 19.4 - `download `_ - `link `_ - - `download `_ - - 155.208 - - 155.208 - * - efficientnet_lite0 + - `download `_ + * - efficientnet_lite0 - 74.99 - 73.81 + - 1731 + - 1731 - 224x224x3 - 4.63 - 0.78 - `download `_ - `link `_ - - `download `_ - - 1731.95 - - 1731.94 - * - efficientnet_lite1 + - `download `_ + * - efficientnet_lite1 - 76.68 - 76.21 + - 892 + - 892 - 240x240x3 - 5.39 - 1.22 - `download `_ - `link `_ - - `download `_ - - 934.714 - - 934.709 - * - efficientnet_lite2 + - `download `_ + * - efficientnet_lite2 - 77.45 - 76.74 + - 433 + - 433 - 260x260x3 - 6.06 - 1.74 - `download `_ - `link `_ - - `download `_ - - 433.436 - - 433.435 - * - efficientnet_lite3 + - `download `_ + * - efficientnet_lite3 - 79.29 - - 78.33 + - 78.42 + - 223 + - 223 - 280x280x3 - 8.16 - 2.8 - `download `_ - `link `_ - - `download `_ - - 223.849 - - 223.848 - * - efficientnet_lite4 + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 + - 84 + - 250 - 300x300x3 - 12.95 - 5.10 - `download `_ - `link `_ - - `download `_ - - 301.62 - - 301.619 - * - efficientnet_m |rocket| + - `download `_ + * - efficientnet_m |rocket| - 78.91 - 78.63 + - 890 + - 890 - 240x240x3 - 6.87 - 7.32 - `download `_ - `link `_ - - `download `_ - - 890.529 - - 890.53 - * - efficientnet_s + - `download `_ + * - efficientnet_s - 77.64 - 77.32 + - 1036 + - 1036 - 224x224x3 - 5.41 - 4.72 - `download `_ - `link `_ - - `download `_ - - 1036.47 - - 1036.47 - * - hardnet39ds + - `download `_ + * - hardnet39ds - 73.43 - 72.92 + - 313 + - 1313 - 224x224x3 - 3.48 - 0.86 - `download `_ - `link `_ - - `download `_ - - 328.985 - - 1348.15 - * - hardnet68 + - `download `_ + * - hardnet68 - 75.47 - 75.04 + - 121 + - 356 - 224x224x3 - 17.56 - 8.5 - `download `_ - `link `_ - - `download `_ - - 122.727 - - 347.067 - * - inception_v1 + - `download `_ + * - inception_v1 - 69.74 - 69.54 + - 928 + - 928 - 224x224x3 - 6.62 - 3 - `download `_ - `link `_ - - `download `_ - - 928.649 - - 928.906 - * - mobilenet_v1 + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 + - 3489 + - 3489 - 224x224x3 - 4.22 - 1.14 - `download `_ - `link `_ - - `download `_ - - 3489.37 - - 3489.35 - * - mobilenet_v2_1.0 |rocket| + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 + - 2434 + - 2434 - 224x224x3 - 3.49 - 0.62 - `download `_ - `link `_ - - `download `_ - - 2443.67 - - 2443.68 - * - mobilenet_v2_1.4 + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 + - 1669 + - 1669 - 224x224x3 - 6.09 - 1.18 - `download `_ - `link `_ - - `download `_ - - 1676.77 - - 1676.7 - * - mobilenet_v3 + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 + - 2415 + - 2415 - 224x224x3 - 4.07 - 2 - `download `_ - `link `_ - - `download `_ - - 2488.59 - - 2488.52 - * - mobilenet_v3_large_minimalistic + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - - 70.96 + - 70.61 + - 3518 + - 3518 - 224x224x3 - 3.91 - 0.42 - `download `_ - `link `_ - - `download `_ - - 3484.95 - - 3485.62 - * - regnetx_1.6gf + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 + - 2321 + - 2321 - 224x224x3 - 9.17 - 3.22 - `download `_ - `link `_ - - `download `_ - - 2321.66 - - 2321.6 - * - regnetx_800mf + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 + - 3506 + - 3506 - 224x224x3 - 7.24 - 1.6 - `download `_ - `link `_ - - `download `_ - - 3506.03 - - 3506.02 - * - repvgg_a1 + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 + - 2545 + - 2545 - 224x224x3 - 12.79 - 4.7 - `download `_ - `link `_ - - `download `_ - - 2545.65 - - 2545.64 - * - repvgg_a2 + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 + - 911 + - 911 - 224x224x3 - 25.5 - 10.2 - `download `_ - `link `_ - - `download `_ - - 911.79 - - 911.784 - * - resmlp12_relu + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 + - 1429 + - 1429 - 224x224x3 - 15.77 - 6.04 - `download `_ - `link `_ - - `download `_ - - 1430.06 - - 1429.99 - * - resnet_v1_18 + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 + - 2533 + - 2533 - 224x224x3 - 11.68 - 3.64 - `download `_ - `link `_ - - `download `_ - - 2533.72 - - 2533.78 - * - resnet_v1_34 + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 + - 1346 + - 1346 - 224x224x3 - 21.79 - 7.34 - `download `_ - `link `_ - - `download `_ - - 1346.63 - - 1346.62 - * - resnet_v1_50 |rocket| |star| + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 + - 1394 + - 1394 - 224x224x3 - 25.53 - 6.98 - `download `_ - `link `_ - - `download `_ - - 1331.76 - - 1331.76 - * - resnext26_32x4d + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 + - 1630 + - 1630 - 224x224x3 - 15.37 - 4.96 - `download `_ - `link `_ - - `download `_ - - 1630.58 - - 1630.58 - * - resnext50_32x4d + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 + - 398 + - 398 - 224x224x3 - 24.99 - 8.48 - `download `_ - `link `_ - - `download `_ - - 398.117 - - 398.05 - * - squeezenet_v1.1 + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 + - 3035 + - 3035 - 224x224x3 - 1.24 - 0.78 - `download `_ - `link `_ - - `download `_ - - 3035.18 - - 3035.17 - * - vit_base_bn + - `download `_ + * - vit_base_bn |rocket| - 79.98 - 78.58 + - 36 + - 127 - 224x224x3 - 86.5 - - 34.25 + - 35.188 - `download `_ - `link `_ - - `download `_ - - 34.5985 - - 126.352 - * - vit_small_bn + - `download `_ + * - vit_small_bn - 78.12 - 77.02 + - 113 + - 533 - 224x224x3 - 21.12 - 8.62 - `download `_ - `link `_ - - `download `_ - - 120.661 - - 559.253 - * - vit_tiny_bn + - `download `_ + * - vit_tiny_bn - 68.95 - - 66.75 + - 67.15 + - 184 + - 967 - 224x224x3 - 5.73 - 2.2 - `download `_ - `link `_ - - `download `_ - - 204.19 - - 1092.91 + - `download `_ diff --git a/docs/public_models/HAILO15H_Depth_Estimation.rst b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst similarity index 55% rename from docs/public_models/HAILO15H_Depth_Estimation.rst rename to docs/public_models/HAILO8/HAILO8_depth_estimation.rst index 3f0ab8c3..93a8dcb2 100644 --- a/docs/public_models/HAILO15H_Depth_Estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Depth Estimation`_ + + .. _Depth Estimation: Depth Estimation @@ -17,39 +28,39 @@ NYU ^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - RMSE - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fast_depth |star| + - Compiled + * - fast_depth |star| - 0.6 - 0.62 + - 1739 + - 1746 - 224x224x3 - 1.35 - 0.74 - `download `_ - `link `_ - - `download `_ - - 1334.6 - - 1334.6 - * - scdepthv3 + - `download `_ + * - scdepthv3 - 0.48 - 0.51 + - 777 + - 777 - 256x320x3 - 14.8 - 10.7 - `download `_ - `link `_ - - `download `_ - - 194.529 - - 399.198 + - `download `_ diff --git a/docs/public_models/HAILO8/HAILO8_face_attribute.rst b/docs/public_models/HAILO8/HAILO8_face_attribute.rst new file mode 100644 index 00000000..eda30aa1 --- /dev/null +++ b/docs/public_models/HAILO8/HAILO8_face_attribute.rst @@ -0,0 +1,55 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Attribute`_ + + +.. _Face Attribute: + +Face Attribute +-------------- + +CELEBA +^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Mean Accuracy + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - face_attr_resnet_v1_18 + - 81.19 + - 81.09 + - 2929 + - 2929 + - 218x178x3 + - 11.74 + - 3 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Face_Detection.rst b/docs/public_models/HAILO8/HAILO8_face_detection.rst similarity index 63% rename from docs/public_models/HAILO8_Face_Detection.rst rename to docs/public_models/HAILO8/HAILO8_face_detection.rst index 4492f923..46959fb3 100644 --- a/docs/public_models/HAILO8_Face_Detection.rst +++ b/docs/public_models/HAILO8/HAILO8_face_detection.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Detection`_ + + .. _Face Detection: Face Detection @@ -17,72 +28,72 @@ WiderFace ^^^^^^^^^ .. list-table:: - :widths: 24 7 12 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - lightface_slim |star| + - Compiled + * - lightface_slim |star| - 39.7 - 39.22 + - 4206 + - 4206 - 240x320x3 - 0.26 - 0.16 - `download `_ - `link `_ - - `download `_ - - 4206.18 - - 4206.06 - * - retinaface_mobilenet_v1 |star| + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 + - 104 + - 104 - 736x1280x3 - 3.49 - 25.14 - `download `_ - `link `_ - - `download `_ - - 104.626 - - 104.624 - * - scrfd_10g + - `download `_ + * - scrfd_10g - 82.13 - 82.03 + - 303 + - 303 - 640x640x3 - 4.23 - 26.74 - `download `_ - `link `_ - - `download `_ - - 292.285 - - 292.278 - * - scrfd_2.5g + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 + - 733 + - 733 - 640x640x3 - 0.82 - 6.88 - `download `_ - `link `_ - - `download `_ - - 694.727 - - 694.64 - * - scrfd_500m + - `download `_ + * - scrfd_500m - 68.98 - 68.88 + - 831 + - 831 - 640x640x3 - 0.63 - 1.5 - `download `_ - `link `_ - - `download `_ - - 831.076 - - 830.989 + - `download `_ diff --git a/docs/public_models/HAILO15H_Face_Recognition.rst b/docs/public_models/HAILO8/HAILO8_face_recognition.rst similarity index 56% rename from docs/public_models/HAILO15H_Face_Recognition.rst rename to docs/public_models/HAILO8/HAILO8_face_recognition.rst index 275067f2..b557b86f 100644 --- a/docs/public_models/HAILO15H_Face_Recognition.rst +++ b/docs/public_models/HAILO8/HAILO8_face_recognition.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Recognition`_ + + .. _Face Recognition: Face Recognition @@ -17,39 +28,39 @@ LFW ^^^ .. list-table:: - :widths: 12 7 12 14 9 8 10 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - lfw verification accuracy - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - arcface_mobilefacenet + - Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 + - 3458 + - 3458 - 112x112x3 - 2.04 - 0.88 - `download `_ - `link `_ - - `download `_ - - 1924.66 - - 1924.66 - * - arcface_r50 + - `download `_ + * - arcface_r50 - 99.72 - 99.71 + - 108 + - 393 - 112x112x3 - 31.0 - 12.6 - `download `_ - `link `_ - - `download `_ - - 154.533 - - 381.773 + - `download `_ diff --git a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst new file mode 100644 index 00000000..c7346266 --- /dev/null +++ b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst @@ -0,0 +1,55 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Facial Landmark Detection`_ + + +.. _Facial Landmark Detection: + +Facial Landmark Detection +------------------------- + +AFLW2k3d +^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - NME + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - tddfa_mobilenet_v1 |star| + - 3.68 + - 4.05 + - 10084 + - 10084 + - 120x120x3 + - 3.26 + - 0.36 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst new file mode 100644 index 00000000..5f263026 --- /dev/null +++ b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst @@ -0,0 +1,50 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Hand Landmark detection`_ + + +.. _Hand Landmark detection: + +Hand Landmark detection +----------------------- + +Hand Landmark +^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Network Name + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - hand_landmark_lite + - 2672 + - 2672 + - 224x224x3 + - 1.01 + - 0.3 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Image_Denoising.rst b/docs/public_models/HAILO8/HAILO8_image_denoising.rst similarity index 58% rename from docs/public_models/HAILO15H_Image_Denoising.rst rename to docs/public_models/HAILO8/HAILO8_image_denoising.rst index 0d5a5192..b1093ef8 100644 --- a/docs/public_models/HAILO15H_Image_Denoising.rst +++ b/docs/public_models/HAILO8/HAILO8_image_denoising.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Image Denoising`_ + + .. _Image Denoising: Image Denoising @@ -17,58 +28,58 @@ BSD68 ^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn3 + - Compiled + * - dncnn3 - 31.46 - 31.26 + - 60 + - 60 - 321x481x1 - 0.66 - 205.26 - `download `_ - `link `_ - - `download `_ - - 44.813 - - 44.813 + - `download `_ CBSD68 ^^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn_color_blind + - Compiled + * - dncnn_color_blind - 33.87 - 32.97 + - 60 + - 60 - 321x481x3 - 0.66 - 205.97 - `download `_ - `link `_ - - `download `_ - - 44.8138 - - 44.8138 + - `download `_ diff --git a/docs/public_models/HAILO15H_Instance_Segmentation.rst b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst similarity index 67% rename from docs/public_models/HAILO15H_Instance_Segmentation.rst rename to docs/public_models/HAILO8/HAILO8_instance_segmentation.rst index 8135b768..03a5282a 100644 --- a/docs/public_models/HAILO15H_Instance_Segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Instance Segmentation`_ + + .. _Instance Segmentation: Instance Segmentation @@ -17,116 +28,116 @@ COCO ^^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - yolact_regnetx_1.6gf + - Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 + - 47 + - 84 - 512x512x3 - 30.09 - 125.34 - `download `_ - `link `_ - - `download `_ - - 46.7838 - - 70.2961 - * - yolact_regnetx_800mf + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 + - 45 + - 75 - 512x512x3 - 28.3 - 116.75 - `download `_ - `link `_ - - `download `_ - - 57.6 - - 84.9004 - * - yolov5l_seg + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 + - 32 + - 53 - 640x640x3 - 47.89 - 147.88 - `download `_ - `link `_ - - `download `_ - - 33.0779 - - 46.4715 - * - yolov5m_seg + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 + - 61 + - 102 - 640x640x3 - 32.60 - 70.94 - `download `_ - `link `_ - - `download `_ - - 62.4293 - - 94.3386 - * - yolov5n_seg |star| + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 + - 184 + - 184 - 640x640x3 - 1.99 - 7.1 - `download `_ - `link `_ - - `download `_ - - 174.461 - - 175.657 - * - yolov5s_seg + - `download `_ + * - yolov5s_seg - 31.57 - - 30.49 + - 30.8 + - 92 + - 92 - 640x640x3 - 7.61 - 26.42 - `download `_ - `link `_ - - `download `_ - - 116.583 - - 161.778 - * - yolov8m_seg + - `download `_ + * - yolov8m_seg - 40.6 - - 39.88 + - 39.85 + - 45 + - 86 - 640x640x3 - 27.3 - 110.2 - `download `_ - `link `_ - - `download `_ - - 42.0778 - - 66.1054 - * - yolov8n_seg + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 + - 173 + - 417 - 640x640x3 - 3.4 - 12.04 - `download `_ - `link `_ - - `download `_ - - 182.371 - - 452.741 - * - yolov8s_seg + - `download `_ + * - yolov8s_seg - 36.63 - - 36.03 + - 36.13 + - 103 + - 204 - 640x640x3 - 11.8 - 42.6 - `download `_ - `link `_ - - `download `_ - - 88.1516 - - 149.641 + - `download `_ diff --git a/docs/public_models/HAILO15M_Low_Light_Enhancement.rst b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst similarity index 54% rename from docs/public_models/HAILO15M_Low_Light_Enhancement.rst rename to docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst index d4b87cc1..f9d104ad 100644 --- a/docs/public_models/HAILO15M_Low_Light_Enhancement.rst +++ b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Low Light Enhancement`_ + + .. _Low Light Enhancement: Low Light Enhancement @@ -17,39 +28,39 @@ LOL ^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - zero_dce + - Compiled + * - zero_dce - 16.23 - 16.24 + - 118 + - 118 - 400x600x3 - 0.21 - 38.2 - `download `_ - `link `_ - - `download `_ - - 71.5373 - - 79.9602 - * - zero_dce_pp + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 + - 101 + - 101 - 400x600x3 - 0.02 - 4.84 - `download `_ - `link `_ - - `download `_ - - 52.7195 - - 58.1534 + - `download `_ diff --git a/docs/public_models/HAILO15H_Object_Detection.rst b/docs/public_models/HAILO8/HAILO8_object_detection.rst similarity index 66% rename from docs/public_models/HAILO15H_Object_Detection.rst rename to docs/public_models/HAILO8/HAILO8_object_detection.rst index 9f9807a2..70dc035c 100644 --- a/docs/public_models/HAILO15H_Object_Detection.rst +++ b/docs/public_models/HAILO8/HAILO8_object_detection.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Object Detection`_ + + .. _Object Detection: Object Detection @@ -17,487 +28,531 @@ COCO ^^^^ .. list-table:: - :widths: 33 8 7 12 8 8 8 7 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - centernet_resnet_v1_18_postprocess + - Compiled + * - centernet_resnet_v1_18_postprocess - 26.3 - 23.31 + - 441 + - 441 - 512x512x3 - 14.22 - 31.21 - `download `_ - `link `_ - - `download `_ - - 121.699 - - 199.138 - * - centernet_resnet_v1_50_postprocess + - `download `_ + * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 + - 75 + - 150 - 512x512x3 - 30.07 - 56.92 - `download `_ - `link `_ - - `download `_ - - 80.3504 - - 124.306 - * - damoyolo_tinynasL20_T + - `download `_ + * - damoyolo_tinynasL20_T - 42.8 - - 41.7 + - 42.3 + - 130 + - 324 - 640x640x3 - 11.35 - 18.02 - `download `_ - `link `_ - - `download `_ - - 142.061 - - 283.447 - * - damoyolo_tinynasL25_S + - `download `_ + * - damoyolo_tinynasL25_S - 46.53 - - 46.04 + - 45.34 + - 228 + - 228 - 640x640x3 - 16.25 - 37.64 - `download `_ - `link `_ - - `download `_ - - 82.9805 - - 150.72 - * - damoyolo_tinynasL35_M + - `download `_ + * - damoyolo_tinynasL35_M - 49.7 - - 47.9 + - 47.7 + - 54 + - 126 - 640x640x3 - 33.98 - 61.64 - `download `_ - `link `_ - - `download `_ - - 56.0805 - - 104.66 - * - detr_resnet_v1_18_bn + - `download `_ + * - detr_resnet_v1_18_bn - 33.91 - - 30.36 + - 30.91 + - 28 + - 75 - 800x800x3 - 32.42 - - 58.97 + - 61.87 - `download `_ - `link `_ - - `download `_ - - 23.815 - - 46.5381 - * - nanodet_repvgg |star| + - `download `_ + * - efficientdet_lite0 + - 27.32 + - 26.49 + - 87 + - 243 + - 320x320x3 + - 3.56 + - 1.94 + - `download `_ + - `link `_ + - `download `_ + * - efficientdet_lite1 + - 32.27 + - 31.72 + - 61 + - 166 + - 384x384x3 + - 4.73 + - 4 + - `download `_ + - `link `_ + - `download `_ + * - efficientdet_lite2 + - 35.95 + - 34.67 + - 42 + - 82 + - 448x448x3 + - 5.93 + - 6.84 + - `download `_ + - `link `_ + - `download `_ + * - nanodet_repvgg |star| - 29.3 - 28.53 + - 820 + - 820 - 416x416x3 - 6.74 - 11.28 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 738.252 - - 738.252 - * - nanodet_repvgg_a12 + - `download `_ + * - nanodet_repvgg_a12 - 33.73 - - 31.33 + - 32.13 + - 400 + - 400 - 640x640x3 - 5.13 - 28.23 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 156.324 - - 255.834 - * - nanodet_repvgg_a1_640 + - `download `_ + * - nanodet_repvgg_a1_640 - 33.28 - 32.88 + - 280 + - 280 - 640x640x3 - 10.79 - 42.8 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 197.231 - - 197.231 - * - ssd_mobilenet_v1 |rocket| |star| + - `download `_ + * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 + - 1015 + - 1015 - 300x300x3 - 6.79 - 2.5 - `download `_ - `link `_ - - `download `_ - - 1123.47 - - 1123.62 - * - ssd_mobilenet_v2 + - `download `_ + * - ssd_mobilenet_v2 - 24.15 - - 22.94 + - 22.95 + - 142 + - 360 - 300x300x3 - 4.46 - 1.52 - `download `_ - `link `_ - - `download `_ - - 180.893 - - 372.195 - * - tiny_yolov3 + - `download `_ + * - tiny_yolov3 - 14.66 - 14.41 + - 1044 + - 1044 - 416x416x3 - 8.85 - 5.58 - `download `_ - `link `_ - - `download `_ - - 1046.5 - - 1046.5 - * - tiny_yolov4 + - `download `_ + * - tiny_yolov4 - 19.18 - 17.73 + - 1337 + - 1337 - 416x416x3 - 6.05 - 6.92 - `download `_ - `link `_ - - `download `_ - - 907.697 - - 907.697 - * - yolov3 |star| + - `download `_ + * - yolov3 |star| - 38.42 - 38.37 + - 33 + - 46 - 608x608x3 - 68.79 - 158.10 - `download `_ - `link `_ - - `download `_ - - 33.9913 - - 45.0514 - * - yolov3_416 + - `download `_ + * - yolov3_416 - 37.73 - 37.53 + - 45 + - 97 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 50.8084 - - 79.9575 - * - yolov3_gluon |rocket| |star| + - `download `_ + * - yolov3_gluon |star| - 37.28 - 35.64 + - 37 + - 68 - 608x608x3 - 68.79 - 158.1 - `download `_ - `link `_ - - `download `_ - - 33.2606 - - 45.0436 - * - yolov3_gluon_416 |star| + - `download `_ + * - yolov3_gluon_416 |star| - 36.27 - 34.92 + - 47 + - 98 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 60.0707 - - 114.062 - * - yolov4_leaky |star| + - `download `_ + * - yolov4_leaky |star| - 42.37 - 41.08 + - 44 + - 88 - 512x512x3 - 64.33 - 91.04 - `download `_ - `link `_ - - `download `_ - - 44.5305 - - 68.2979 - * - yolov5m + - `download `_ + * - yolov5m - 42.59 - 41.19 + - 156 + - 156 - 640x640x3 - 21.78 - 52.17 - `download `_ - `link `_ - - `download `_ - - 76.0775 - - 123.094 - * - yolov5m6_6.1 + - `download `_ + * - yolov5m6_6.1 - 50.67 - 48.97 + - 25 + - 38 - 1280x1280x3 - 35.70 - 200.04 - `download `_ - `link `_ - - `download `_ - - 24.7172 - - 32.6838 - * - yolov5m_6.1 + - `download `_ + * - yolov5m_6.1 - 44.8 - 43.36 + - 81 + - 150 - 640x640x3 - 21.17 - 48.96 - `download `_ - `link `_ - - `download `_ - - 78.6189 - - 125.754 - * - yolov5m_wo_spp |rocket| + - `download `_ + * - yolov5m_wo_spp |rocket| - 43.06 - - 40.76 + - 41.06 + - 217.983 + - 217.98 - 640x640x3 - 22.67 - 52.88 - `download `_ - `link `_ - - `download `_ - - 90.4388 - - 147.557 - * - yolov5s |star| + - `download `_ + * - yolov5s |star| - 35.33 - 33.98 + - 379 + - 379 - 640x640x3 - 7.46 - 17.44 - `download `_ - `link `_ - - `download `_ - - 158.054 - - 275.213 - * - yolov5s_c3tr + - `download `_ + * - yolov5s_c3tr - 37.13 - 35.63 + - 133 + - 288 - 640x640x3 - 10.29 - 17.02 - `download `_ - `link `_ - - `download `_ - - 126.247 - - 253.666 - * - yolov5xs_wo_spp + - `download `_ + * - yolov5xs_wo_spp - 33.18 - 32.2 + - 168 + - 427 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 239.945 - - 475.362 - * - yolov5xs_wo_spp_nms_core + - `download `_ + * - yolov5xs_wo_spp_nms_core - 32.57 - - 31.06 + - 30.86 + - 100 + - 100 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 239.95 - - 239.95 - * - yolov6n + - `download `_ + * - yolov6n - 34.28 - - 32.18 + - 32.28 + - 1251 + - 1251 - 640x640x3 - 4.32 - 11.12 - `download `_ - `link `_ - - `download `_ - - 228.947 - - 452.785 - * - yolov6n_0.2.1 + - `download `_ + * - yolov6n_0.2.1 - 35.16 - - 33.66 + - 33.87 + - 805 + - 805 - 640x640x3 - 4.33 - 11.06 - `download `_ - `link `_ - - `download `_ - - 242.407 - - 505.88 - * - yolov7 + - `download `_ + * - yolov7 - 50.59 - 47.89 + - 45 + - 80 - 640x640x3 - 36.91 - 104.51 - `download `_ - `link `_ - - `download `_ - - 43.2622 - - 62.5084 - * - yolov7_tiny + - `download `_ + * - yolov7_tiny - 37.07 - - 35.97 + - 36.07 + - 373 + - 373 - 640x640x3 - 6.22 - 13.74 - `download `_ - `link `_ - - `download `_ - - 170.697 - - 296.923 - * - yolov7e6 + - `download `_ + * - yolov7e6 - 55.37 - 53.47 + - 6 + - 9 - 1280x1280x3 - 97.20 - 515.12 - `download `_ - `link `_ - - `download `_ - - 9.17825 - - 9.17825 - * - yolov8l + - `download `_ + * - yolov8l - 52.44 - 51.78 + - 29 + - 50 - 640x640x3 - 43.7 - 165.3 - `download `_ - `link `_ - - `download `_ - - 26.9025 - - 39.9644 - * - yolov8m + - `download `_ + * - yolov8m - 49.91 - 49.11 + - 59 + - 110 - 640x640x3 - 25.9 - 78.93 - `download `_ - `link `_ - - `download `_ - - 53.0796 - - 87.9441 - * - yolov8n + - `download `_ + * - yolov8n - 37.02 - 36.32 + - 1024 + - 1024 - 640x640x3 - 3.2 - 8.74 - `download `_ - `link `_ - - `download `_ - - 215.045 - - 452.741 - * - yolov8s + - `download `_ + * - yolov8s - 44.58 - 43.98 + - 396 + - 396 - 640x640x3 - 11.2 - 28.6 - `download `_ - `link `_ - - `download `_ - - 113.704 - - 202.772 - * - yolov8x + - `download `_ + * - yolov8x - 53.45 - 52.75 + - 18 + - 29 - 640x640x3 - 68.2 - 258 - `download `_ - `link `_ - - `download `_ - - 18.5412 - - 25.4315 - * - yolox_l_leaky |star| + - `download `_ + * - yolov9c + - 52.8 + - 50.7 + - 36 + - 67 + - 640x640x3 + - 25.3 + - 102.1 + - `download `_ + - `link `_ + - `download `_ + * - yolox_l_leaky |star| - 48.69 - - 46.71 + - 46.59 + - 32 + - 54 - 640x640x3 - 54.17 - 155.3 - `download `_ - `link `_ - - `download `_ - - 29.9008 - - 42.1534 - * - yolox_s_leaky + - `download `_ + * - yolox_s_leaky - 38.12 - 37.27 + - 250 + - 250 - 640x640x3 - 8.96 - 26.74 - `download `_ - `link `_ - - `download `_ - - 127.238 - - 207.861 - * - yolox_s_wide_leaky + - `download `_ + * - yolox_s_wide_leaky - 42.4 - 40.97 + - 73 + - 131 - 640x640x3 - 20.12 - 59.46 - `download `_ - `link `_ - - `download `_ - - 77.0694 - - 111.198 - * - yolox_tiny + - `download `_ + * - yolox_tiny - 32.64 - 31.39 + - 226 + - 635 - 416x416x3 - 5.05 - 6.44 - `download `_ - `link `_ - - `download `_ - - 257.942 - - 564.028 + - `download `_ VisDrone ^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - ssd_mobilenet_v1_visdrone |star| + - Compiled + * - ssd_mobilenet_v1_visdrone |star| - 2.37 - 2.22 + - 1212 + - 1212 - 300x300x3 - 5.64 - 2.3 - `download `_ - `link `_ - - `download `_ - - 319.08 - - 814.14 + - `download `_ diff --git a/docs/public_models/HAILO8_Person_Attribute.rst b/docs/public_models/HAILO8/HAILO8_person_attribute.rst similarity index 50% rename from docs/public_models/HAILO8_Person_Attribute.rst rename to docs/public_models/HAILO8/HAILO8_person_attribute.rst index a181d71d..1ae38aef 100644 --- a/docs/public_models/HAILO8_Person_Attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_person_attribute.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Attribute`_ + + .. _Person Attribute: Person Attribute @@ -16,28 +28,28 @@ PETA ^^^^ .. list-table:: - :widths: 24 14 12 14 9 8 10 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Mean Accuracy - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - person_attr_resnet_v1_18 + - Compiled + * - person_attr_resnet_v1_18 - 82.5 - 82.61 + - 2523 + - 2523 - 224x224x3 - 11.19 - 3.64 - `download `_ - `link `_ - - `download `_ - - 2533.86 - - 2533.77 + - `download `_ diff --git a/docs/public_models/HAILO8_Person_Re_ID.rst b/docs/public_models/HAILO8/HAILO8_person_re_id.rst similarity index 55% rename from docs/public_models/HAILO8_Person_Re_ID.rst rename to docs/public_models/HAILO8/HAILO8_person_re_id.rst index 4d36f078..56dbe68b 100644 --- a/docs/public_models/HAILO8_Person_Re_ID.rst +++ b/docs/public_models/HAILO8/HAILO8_person_re_id.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Re-ID`_ + + .. _Person Re-ID: Person Re-ID @@ -17,39 +28,39 @@ Market1501 ^^^^^^^^^^ .. list-table:: - :widths: 28 8 9 13 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - rank1 - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - osnet_x1_0 + - Compiled + * - osnet_x1_0 - 94.43 - 93.63 + - 157 + - 539 - 256x128x3 - 2.19 - 1.98 - `download `_ - `link `_ - - `download `_ - - 159.273 - - 545.392 - * - repvgg_a0_person_reid_512 |star| + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 + - 5204 + - 5204 - 256x128x3 - 7.68 - 1.78 - `download `_ - `link `_ - - `download `_ - - 5204.97 - - 5204.94 + - `download `_ diff --git a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst new file mode 100644 index 00000000..4156adc7 --- /dev/null +++ b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst @@ -0,0 +1,99 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Pose Estimation`_ + + +.. _Pose Estimation: + +Pose Estimation +--------------- + +COCO +^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - mAP + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - centerpose_regnetx_1.6gf_fpn |star| + - 53.54 + - 53.53 + - 132 + - 132 + - 640x640x3 + - 14.28 + - 64.58 + - `download `_ + - `link `_ + - `download `_ + * - centerpose_regnetx_800mf + - 44.07 + - 43.07 + - 132 + - 132 + - 512x512x3 + - 12.31 + - 86.12 + - `download `_ + - `link `_ + - `download `_ + * - centerpose_repvgg_a0 |star| + - 39.17 + - 37.17 + - 512 + - 512 + - 416x416x3 + - 11.71 + - 28.27 + - `download `_ + - `link `_ + - `download `_ + * - yolov8m_pose + - 64.26 + - 61.66 + - 54 + - 108 + - 640x640x3 + - 26.4 + - 81.02 + - `download `_ + - `link `_ + - `download `_ + * - yolov8s_pose + - 59.2 + - 55.6 + - 125 + - 265 + - 640x640x3 + - 11.6 + - 30.2 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO8_Semantic_Segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst similarity index 67% rename from docs/public_models/HAILO8_Semantic_Segmentation.rst rename to docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index db73191e..8b3b039f 100644 --- a/docs/public_models/HAILO8_Semantic_Segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Semantic Segmentation`_ + + .. _Semantic Segmentation: Semantic Segmentation @@ -17,121 +28,121 @@ Cityscapes ^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fcn8_resnet_v1_18 |star| + - Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 + - 42 + - 42 - 1024x1920x3 - 11.20 - 142.82 - `download `_ - `link `_ - - `download `_ - - 42.6506 - - 42.6503 - * - segformer_b0_bn + - `download `_ + * - segformer_b0_bn - 69.81 - 68.01 + - 8 + - 17 - 512x1024x3 - 3.72 - - 27.2 + - 35.76 - `download `_ - `link `_ - - `download `_ - - 8.33 - - 11.729 - * - stdc1 |rocket| + - `download `_ + * - stdc1 |rocket| - 74.57 - 73.92 + - 52 + - 52 - 1024x1920x3 - 8.27 - 126.47 - `download `_ - `link `_ - - `download `_ - - 59.6441 - - 59.644 + - `download `_ Oxford-IIIT Pet ^^^^^^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - unet_mobilenet_v2 + - Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 + - 445 + - 445 - 256x256x3 - 10.08 - 28.88 - `download `_ - `link `_ - - `download `_ - - 445.763 - - 445.762 + - `download `_ Pascal VOC ^^^^^^^^^^ .. list-table:: - :widths: 36 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - deeplab_v3_mobilenet_v2 + - Compiled + * - deeplab_v3_mobilenet_v2 - 76.05 - 74.8 + - 122 + - 122 - 513x513x3 - 2.10 - 17.65 - `download `_ - `link `_ - - `download `_ - - 89.4331 - - 89.432 - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 + - 262 + - 262 - 513x513x3 - 2.10 - 3.21 - `download `_ - `link `_ - - `download `_ - - 262.235 - - 262.228 + - `download `_ diff --git a/docs/public_models/HAILO8L_Person_Re_ID.rst b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst similarity index 56% rename from docs/public_models/HAILO8L_Person_Re_ID.rst rename to docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst index 984d43f7..e3e16181 100644 --- a/docs/public_models/HAILO8L_Person_Re_ID.rst +++ b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Single Person Pose Estimation`_ + + .. _Single Person Pose Estimation: Single Person Pose Estimation @@ -17,50 +28,39 @@ COCO ^^^^ .. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - AP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - mspn_regnetx_800mf |star| + - Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 + - 1716 + - 1725 - 256x192x3 - 7.17 - 2.94 - `download `_ - `link `_ - - `download `_ - - 176.8 - - 534.372 - * - vit_pose_small - - 74.16 - - 71.6 - - 256x192x3 - - 24.29 - - 17.17 - - `download `_ - - `link `_ - - `download `_ - - 25.6655 - - 91.1646 - * - vit_pose_small_bn + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 + - 66 + - 267 - 256x192x3 - 24.32 - 17.17 - `download `_ - `link `_ - - `download `_ - - 58.664 - - 199.924 + - `download `_ diff --git a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst new file mode 100644 index 00000000..d920e3eb --- /dev/null +++ b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst @@ -0,0 +1,55 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Stereo Depth Estimation`_ + + +.. _Stereo Depth Estimation: + +Stereo Depth Estimation +----------------------- + +N/A +^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - EPE + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - stereonet + - 91.79 + - 89.14 + - 5 + - 3 + - 368x1232x3, 368x1232x3 + - 5.91 + - 126.28 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO15H_Super_Resolution.rst b/docs/public_models/HAILO8/HAILO8_super_resolution.rst similarity index 59% rename from docs/public_models/HAILO15H_Super_Resolution.rst rename to docs/public_models/HAILO8/HAILO8_super_resolution.rst index 18d42b6a..145b6ba7 100644 --- a/docs/public_models/HAILO15H_Super_Resolution.rst +++ b/docs/public_models/HAILO8/HAILO8_super_resolution.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Super Resolution`_ + + .. _Super Resolution: Super Resolution @@ -17,50 +28,50 @@ BSD100 ^^^^^^ .. list-table:: - :widths: 32 8 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - espcn_x2 + - Compiled + * - espcn_x2 - 31.4 - 30.3 + - 1164 + - 1164 - 156x240x1 - 0.02 - 1.6 - `download `_ - `link `_ - - `download `_ - - 1637.76 - - 1637.76 - * - espcn_x3 + - `download `_ + * - espcn_x3 - 28.41 - 28.06 + - 2218 + - 2218 - 104x160x1 - 0.02 - 0.76 - `download `_ - `link `_ - - `download `_ - - 1917.92 - - 1917.92 - * - espcn_x4 + - `download `_ + * - espcn_x4 - 26.83 - 26.58 + - 2189 + - 2189 - 78x120x1 - 0.02 - 0.46 - `download `_ - `link `_ - - `download `_ - - 1893.66 - - 1893.66 + - `download `_ diff --git a/docs/public_models/HAILO8_Zero_shot_Classification.rst b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst similarity index 50% rename from docs/public_models/HAILO8_Zero_shot_Classification.rst rename to docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst index 12bb70b6..3a1615b0 100644 --- a/docs/public_models/HAILO8_Zero_shot_Classification.rst +++ b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Zero-shot Classification`_ + + .. _Zero-shot Classification: Zero-shot Classification @@ -17,28 +28,28 @@ CIFAR100 ^^^^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Accuracy (top1) - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - clip_resnet_50 + - Compiled + * - clip_resnet_50 - 42.07 - 38.57 + - 88 + - 366 - 224x224x3 - 38.72 - 11.62 - `download `_ - `link `_ - - `download `_ - - 94.6622 - - 383.319 + - `download `_ diff --git a/docs/public_models/HAILO8L_Classification.rst b/docs/public_models/HAILO8L/HAILO8l_classificaion.rst similarity index 72% rename from docs/public_models/HAILO8L_Classification.rst rename to docs/public_models/HAILO8L/HAILO8l_classificaion.rst index 5eb01aef..eb5196cd 100644 --- a/docs/public_models/HAILO8L_Classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_classificaion.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Classification`_ + + .. _Classification: Classification @@ -17,347 +28,347 @@ ImageNet ^^^^^^^^ .. list-table:: - :widths: 31 9 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Accuracy (top1) - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - efficientnet_l + - Compiled + * - efficientnet_l - 80.46 - 79.36 + - 56 + - 109 - 300x300x3 - 10.55 - 19.4 - `download `_ - `link `_ - - `download `_ - - 56.71 - - 109.825 - * - efficientnet_lite0 + - `download `_ + * - efficientnet_lite0 - 74.99 - 73.81 + - 202 + - 595 - 224x224x3 - 4.63 - 0.78 - `download `_ - `link `_ - - `download `_ - - 200.791 - - 592.842 - * - efficientnet_lite1 + - `download `_ + * - efficientnet_lite1 - 76.68 - 76.21 + - 148 + - 466 - 240x240x3 - 5.39 - 1.22 - `download `_ - `link `_ - - `download `_ - - 149.46 - - 473.42 - * - efficientnet_lite2 + - `download `_ + * - efficientnet_lite2 - 77.45 - 76.74 + - 106 + - 270 - 260x260x3 - 6.06 - 1.74 - `download `_ - `link `_ - - `download `_ - - 104.729 - - 269.654 - * - efficientnet_lite3 + - `download `_ + * - efficientnet_lite3 - 79.29 - - 78.33 + - 78.42 + - 83 + - 201 - 280x280x3 - 8.16 - 2.8 - `download `_ - `link `_ - - `download `_ - - 82.263 - - 202.658 - * - efficientnet_lite4 + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 + - 60 + - 139 - 300x300x3 - 12.95 - 5.10 - `download `_ - `link `_ - - `download `_ - - 59.644 - - 139.63 - * - efficientnet_m |rocket| + - `download `_ + * - efficientnet_m |rocket| - 78.91 - 78.63 + - 113 + - 303 - 240x240x3 - 6.87 - 7.32 - `download `_ - `link `_ - - `download `_ - - 114.462 - - 305.483 - * - efficientnet_s + - `download `_ + * - efficientnet_s - 77.64 - 77.32 + - 158 + - 441 - 224x224x3 - 5.41 - 4.72 - `download `_ - `link `_ - - `download `_ - - 161.139 - - 443.031 - * - hardnet39ds + - `download `_ + * - hardnet39ds - 73.43 - 72.92 + - 247 + - 766 - 224x224x3 - 3.48 - 0.86 - `download `_ - `link `_ - - `download `_ - - 240.189 - - 762.636 - * - hardnet68 + - `download `_ + * - hardnet68 - 75.47 - 75.04 + - 90 + - 203 - 224x224x3 - 17.56 - 8.5 - `download `_ - `link `_ - - `download `_ - - 90.4991 - - 208.453 - * - inception_v1 + - `download `_ + * - inception_v1 - 69.74 - 69.54 + - 230 + - 559 - 224x224x3 - 6.62 - 3 - `download `_ - `link `_ - - `download `_ - - 200.725 - - 517.308 - * - mobilenet_v1 + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 + - 1866 + - 1866 - 224x224x3 - 4.22 - 1.14 - `download `_ - `link `_ - - `download `_ - - 1867.24 - - 1867.24 - * - mobilenet_v2_1.0 |rocket| + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 + - 1738 + - 1738 - 224x224x3 - 3.49 - 0.62 - `download `_ - `link `_ - - `download `_ - - 280.588 - - 769.309 - * - mobilenet_v2_1.4 + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 + - 185 + - 590 - 224x224x3 - 6.09 - 1.18 - `download `_ - `link `_ - - `download `_ - - 192.125 - - 604.039 - * - mobilenet_v3 + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 + - 220 + - 790 - 224x224x3 - 4.07 - 2 - `download `_ - `link `_ - - `download `_ - - 224.057 - - 793.835 - * - mobilenet_v3_large_minimalistic + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - - 70.96 + - 70.61 + - 378 + - 1154 - 224x224x3 - 3.91 - 0.42 - `download `_ - `link `_ - - `download `_ - - 378.25 - - 1154.23 - * - regnetx_1.6gf + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 + - 223 + - 655 - 224x224x3 - 9.17 - 3.22 - `download `_ - `link `_ - - `download `_ - - 231.978 - - 666.948 - * - regnetx_800mf + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 + - 280 + - 933 - 224x224x3 - 7.24 - 1.6 - `download `_ - `link `_ - - `download `_ - - 3506.03 - - 3506.02 - * - repvgg_a1 + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 + - 233 + - 666 - 224x224x3 - 12.79 - 4.7 - `download `_ - `link `_ - - `download `_ - - 231.257 - - 664.813 - * - repvgg_a2 + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 + - 121 + - 305 - 224x224x3 - 25.5 - 10.2 - `download `_ - `link `_ - - `download `_ - - 136.261 - - 329.01 - * - resmlp12_relu + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 + - 45 + - 191 - 224x224x3 - 15.77 - 6.04 - `download `_ - `link `_ - - `download `_ - - 45.157 - - 191.254 - * - resnet_v1_18 + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 + - 915 + - 915 - 224x224x3 - 11.68 - 3.64 - `download `_ - `link `_ - - `download `_ - - 915.657 - - 915.653 - * - resnet_v1_34 + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 + - 131 + - 412 - 224x224x3 - 21.79 - 7.34 - `download `_ - `link `_ - - `download `_ - - 156.259 - - 414.778 - * - resnet_v1_50 |rocket| |star| + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 + - 120 + - 426 - 224x224x3 - 25.53 - 6.98 - `download `_ - `link `_ - - `download `_ - - 136.806 - - 503.725 - * - resnext26_32x4d + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 + - 194 + - 495 - 224x224x3 - 15.37 - 4.96 - `download `_ - `link `_ - - `download `_ - - 174.067 - - 474.646 - * - resnext50_32x4d + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 + - 104 + - 287 - 224x224x3 - 24.99 - 8.48 - `download `_ - `link `_ - - `download `_ - - 107.693 - - 285.32 - * - squeezenet_v1.1 + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 + - 1730 + - 1730 - 224x224x3 - 1.24 - 0.78 - `download `_ - `link `_ - - `download `_ - - 1726.48 - - 1730.74 - * - vit_base_bn + - `download `_ + * - vit_base_bn - 79.98 - 78.58 + - 29 + - 79 - 224x224x3 - 86.5 - - 34.25 + - 35.188 - `download `_ - `link `_ - - `download `_ - - 28.056 - - 77.86 - * - vit_small_bn + - `download `_ + * - vit_small_bn - 78.12 - 77.02 + - 86 + - 304 - 224x224x3 - 21.12 - 8.62 - `download `_ - `link `_ - - `download `_ - - 83.197 - - 310.282 - * - vit_tiny_bn + - `download `_ + * - vit_tiny_bn - 68.95 - - 66.75 + - 67.15 + - 126 + - 555 - 224x224x3 - 5.73 - 2.2 - `download `_ - `link `_ - - `download `_ - - 138.727 - - 596.574 + - `download `_ diff --git a/docs/public_models/HAILO8_Depth_Estimation.rst b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst similarity index 55% rename from docs/public_models/HAILO8_Depth_Estimation.rst rename to docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst index ca25db26..a44650a0 100644 --- a/docs/public_models/HAILO8_Depth_Estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Depth Estimation`_ + + .. _Depth Estimation: Depth Estimation @@ -17,39 +28,39 @@ NYU ^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - RMSE - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fast_depth |star| + - Compiled + * - fast_depth |star| - 0.6 - 0.62 + - 299 + - 299 - 224x224x3 - 1.35 - 0.74 - `download `_ - `link `_ - - `download `_ - - 1819.24 - - 1820.23 - * - scdepthv3 + - `download `_ + * - scdepthv3 - 0.48 - 0.51 + - 114 + - 238 - 256x320x3 - 14.8 - 10.7 - `download `_ - `link `_ - - `download `_ - - 777.772 - - 777.764 + - `download `_ diff --git a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst new file mode 100644 index 00000000..a2bbd86e --- /dev/null +++ b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst @@ -0,0 +1,55 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Attribute`_ + + +.. _Face Attribute: + +Face Attribute +-------------- + +CELEBA +^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - Mean Accuracy + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - face_attr_resnet_v1_18 + - 81.19 + - 81.09 + - 670 + - 670 + - 218x178x3 + - 11.74 + - 3 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Face_Detection.rst b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst similarity index 63% rename from docs/public_models/HAILO8L_Face_Detection.rst rename to docs/public_models/HAILO8L/HAILO8l_face_detection.rst index 3a13bc93..16ad2f0e 100644 --- a/docs/public_models/HAILO8L_Face_Detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Detection`_ + + .. _Face Detection: Face Detection @@ -17,72 +28,72 @@ WiderFace ^^^^^^^^^ .. list-table:: - :widths: 24 7 12 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - lightface_slim |star| + - Compiled + * - lightface_slim |star| - 39.7 - 39.22 + - 1249 + - 1249 - 240x320x3 - 0.26 - 0.16 - `download `_ - `link `_ - - `download `_ - - 1,249.25 - - 1,250.17 - * - retinaface_mobilenet_v1 |star| + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 + - 45 + - 59 - 736x1280x3 - 3.49 - 25.14 - `download `_ - `link `_ - - `download `_ - - 45.849 - - 59.705 - * - scrfd_10g + - `download `_ + * - scrfd_10g - 82.13 - 82.03 + - 93 + - 146 - 640x640x3 - 4.23 - 26.74 - `download `_ - `link `_ - - `download `_ - - 93.2965 - - 145.538 - * - scrfd_2.5g + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 + - 195 + - 333 - 640x640x3 - 0.82 - 6.88 - `download `_ - `link `_ - - `download `_ - - 184.055 - - 324.389 - * - scrfd_500m + - `download `_ + * - scrfd_500m - 68.98 - 68.88 + - 206 + - 391 - 640x640x3 - 0.63 - 1.5 - `download `_ - `link `_ - - `download `_ - - 191.591 - - 358.24 + - `download `_ diff --git a/docs/public_models/HAILO15M_Face_Recognition.rst b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst similarity index 56% rename from docs/public_models/HAILO15M_Face_Recognition.rst rename to docs/public_models/HAILO8L/HAILO8l_face_recognition.rst index 8d0a386d..1313f75f 100644 --- a/docs/public_models/HAILO15M_Face_Recognition.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Face Recognition`_ + + .. _Face Recognition: Face Recognition @@ -17,39 +28,39 @@ LFW ^^^ .. list-table:: - :widths: 12 7 12 14 9 8 10 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - lfw verification accuracy - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - arcface_mobilefacenet + - Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 + - 328 + - 1103 - 112x112x3 - 2.04 - 0.88 - `download `_ - `link `_ - - `download `_ - - 444.238 - - 1190.37 - * - arcface_r50 + - `download `_ + * - arcface_r50 - 99.72 - 99.71 + - 80 + - 206 - 112x112x3 - 31.0 - 12.6 - `download `_ - `link `_ - - `download `_ - - 112.901 - - 234.881 + - `download `_ diff --git a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst new file mode 100644 index 00000000..a1b187b4 --- /dev/null +++ b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst @@ -0,0 +1,55 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Facial Landmark Detection`_ + + +.. _Facial Landmark Detection: + +Facial Landmark Detection +------------------------- + +AFLW2k3d +^^^^^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - NME + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - tddfa_mobilenet_v1 |star| + - 3.68 + - 4.05 + - 5401 + - 5401 + - 120x120x3 + - 3.26 + - 0.36 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst new file mode 100644 index 00000000..e2aa3fe0 --- /dev/null +++ b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst @@ -0,0 +1,50 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Hand Landmark detection`_ + + +.. _Hand Landmark detection: + +Hand Landmark detection +----------------------- + +Hand Landmark +^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Network Name + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - hand_landmark_lite + - 292 + - 980 + - 224x224x3 + - 1.01 + - 0.3 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Image_Denoising.rst b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst similarity index 58% rename from docs/public_models/HAILO15M_Image_Denoising.rst rename to docs/public_models/HAILO8L/HAILO8l_image_denoising.rst index 5a6f523b..0cc48160 100644 --- a/docs/public_models/HAILO15M_Image_Denoising.rst +++ b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Image Denoising`_ + + .. _Image Denoising: Image Denoising @@ -17,58 +28,58 @@ BSD68 ^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn3 + - Compiled + * - dncnn3 - 31.46 - 31.26 + - 29 + - 29 - 321x481x1 - 0.66 - 205.26 - `download `_ - `link `_ - - `download `_ - - 20.5436 - - 21.0863 + - `download `_ CBSD68 ^^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - dncnn_color_blind + - Compiled + * - dncnn_color_blind - 33.87 - 32.97 + - 29 + - 29 - 321x481x3 - 0.66 - 205.97 - `download `_ - `link `_ - - `download `_ - - 20.5436 - - 21.0838 + - `download `_ diff --git a/docs/public_models/HAILO15M_Instance_Segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst similarity index 67% rename from docs/public_models/HAILO15M_Instance_Segmentation.rst rename to docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst index 733f0bd5..b77661fb 100644 --- a/docs/public_models/HAILO15M_Instance_Segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Instance Segmentation`_ + + .. _Instance Segmentation: Instance Segmentation @@ -17,116 +28,116 @@ COCO ^^^^ .. list-table:: - :widths: 34 7 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - yolact_regnetx_1.6gf + - Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 + - 31 + - 45 - 512x512x3 - 30.09 - 125.34 - `download `_ - `link `_ - - `download `_ - - 34.3425 - - 43.6273 - * - yolact_regnetx_800mf + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 + - 33 + - 48 - 512x512x3 - 28.3 - 116.75 - `download `_ - `link `_ - - `download `_ - - 40.7949 - - 50.4228 - * - yolov5l_seg + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 + - 18 + - 23 - 640x640x3 - 47.89 - 147.88 - `download `_ - `link `_ - - `download `_ - - 20.3549 - - 24.6877 - * - yolov5m_seg + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 + - 40 + - 60 - 640x640x3 - 32.60 - 70.94 - `download `_ - `link `_ - - `download `_ - - 43.6168 - - 56.6371 - * - yolov5n_seg |star| + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 + - 122 + - 171 - 640x640x3 - 1.99 - 7.1 - `download `_ - `link `_ - - `download `_ - - 148.879 - - 174.184 - * - yolov5s_seg + - `download `_ + * - yolov5s_seg - 31.57 - - 30.49 + - 30.8 + - 77 + - 115 - 640x640x3 - 7.61 - 26.42 - `download `_ - `link `_ - - `download `_ - - 82.509 - - 113.995 - * - yolov8m_seg + - `download `_ + * - yolov8m_seg - 40.6 - - 39.88 + - 39.85 + - 29 + - 43 - 640x640x3 - 27.3 - 110.2 - `download `_ - `link `_ - - `download `_ - - 29.2613 - - 39.2299 - * - yolov8n_seg + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 + - 119 + - 237 - 640x640x3 - 3.4 - 12.04 - `download `_ - `link `_ - - `download `_ - - 132.766 - - 227.347 - * - yolov8s_seg + - `download `_ + * - yolov8s_seg - 36.63 - - 36.03 + - 36.13 + - 60 + - 99 - 640x640x3 - 11.8 - 42.6 - `download `_ - `link `_ - - `download `_ - - 63.7904 - - 90.8048 + - `download `_ diff --git a/docs/public_models/HAILO8L_Low_Light_Enhancement.rst b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst similarity index 54% rename from docs/public_models/HAILO8L_Low_Light_Enhancement.rst rename to docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst index e56bf19c..3a6352b0 100644 --- a/docs/public_models/HAILO8L_Low_Light_Enhancement.rst +++ b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Low Light Enhancement`_ + + .. _Low Light Enhancement: Low Light Enhancement @@ -17,39 +28,39 @@ LOL ^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - zero_dce + - Compiled + * - zero_dce - 16.23 - 16.24 + - 69 + - 59 - 400x600x3 - 0.21 - 38.2 - `download `_ - `link `_ - - `download `_ - - 65.1753 - - 55.9742 - * - zero_dce_pp + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 + - 34 + - 36 - 400x600x3 - 0.02 - 4.84 - `download `_ - `link `_ - - `download `_ - - 34.0537 - - 22.3908 + - `download `_ diff --git a/docs/public_models/HAILO15M_Object_Detection.rst b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst similarity index 66% rename from docs/public_models/HAILO15M_Object_Detection.rst rename to docs/public_models/HAILO8L/HAILO8l_object_detection.rst index b8a7cc39..ef5828e0 100644 --- a/docs/public_models/HAILO15M_Object_Detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Object Detection`_ + + .. _Object Detection: Object Detection @@ -17,487 +28,531 @@ COCO ^^^^ .. list-table:: - :widths: 33 8 7 12 8 8 8 7 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - centernet_resnet_v1_18_postprocess + - Compiled + * - centernet_resnet_v1_18_postprocess - 26.3 - 23.31 + - 85 + - 127 - 512x512x3 - 14.22 - 31.21 - `download `_ - `link `_ - - `download `_ - - 86.5256 - - 116.281 - * - centernet_resnet_v1_50_postprocess + - `download `_ + * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 + - 52 + - 80 - 512x512x3 - 30.07 - 56.92 - `download `_ - `link `_ - - `download `_ - - 53.3531 - - 71.9656 - * - damoyolo_tinynasL20_T + - `download `_ + * - damoyolo_tinynasL20_T - 42.8 - - 41.7 + - 42.3 + - 92 + - 182 - 640x640x3 - 11.35 - 18.02 - `download `_ - `link `_ - - `download `_ - - 100.067 - - 169.343 - * - damoyolo_tinynasL25_S + - `download `_ + * - damoyolo_tinynasL25_S - 46.53 - - 46.04 + - 45.34 + - 80 + - 157 - 640x640x3 - 16.25 - 37.64 - `download `_ - `link `_ - - `download `_ - - 60.8048 - - 97.3912 - * - damoyolo_tinynasL35_M + - `download `_ + * - damoyolo_tinynasL35_M - 49.7 - - 47.9 + - 47.7 + - 36 + - 65 - 640x640x3 - 33.98 - 61.64 - `download `_ - `link `_ - - `download `_ - - 40.5352 - - 63.23 - * - detr_resnet_v1_18_bn + - `download `_ + * - detr_resnet_v1_18_bn - 33.91 - - 30.36 + - 30.91 + - 15 + - 32 - 800x800x3 - 32.42 - - 58.97 + - 61.87 - `download `_ - `link `_ - - `download `_ - - 18.9067 - - 30.5393 - * - nanodet_repvgg |star| + - `download `_ + * - efficientdet_lite0 + - 27.32 + - 26.49 + - 71 + - 174 + - 320x320x3 + - 3.56 + - 1.94 + - `download `_ + - `link `_ + - `download `_ + * - efficientdet_lite1 + - 32.27 + - 31.72 + - 44 + - 93 + - 384x384x3 + - 4.73 + - 4 + - `download `_ + - `link `_ + - `download `_ + * - efficientdet_lite2 + - 35.95 + - 34.67 + - 25 + - 47 + - 448x448x3 + - 5.93 + - 6.84 + - `download `_ + - `link `_ + - `download `_ + * - nanodet_repvgg |star| - 29.3 - 28.53 + - 176 + - 338 - 416x416x3 - 6.74 - 11.28 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 192.699 - - 311.163 - * - nanodet_repvgg_a12 + - `download `_ + * - nanodet_repvgg_a12 - 33.73 - - 31.33 + - 32.13 + - 108 + - 174 - 640x640x3 - 5.13 - 28.23 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 109.333 - - 159.891 - * - nanodet_repvgg_a1_640 + - `download `_ + * - nanodet_repvgg_a1_640 - 33.28 - 32.88 + - 82 + - 123 - 640x640x3 - 10.79 - 42.8 - - `download `_ + - `download `_ - `link `_ - - `download `_ - - 85.5934 - - 117.759 - * - ssd_mobilenet_v1 |rocket| |star| + - `download `_ + * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 + - 147 + - 297 - 300x300x3 - 6.79 - 2.5 - `download `_ - `link `_ - - `download `_ - - 182.389 - - 354.052 - * - ssd_mobilenet_v2 + - `download `_ + * - ssd_mobilenet_v2 - 24.15 - - 22.94 + - 22.95 + - 97 + - 219 - 300x300x3 - 4.46 - 1.52 - `download `_ - `link `_ - - `download `_ - - 136.849 - - 257.357 - * - tiny_yolov3 + - `download `_ + * - tiny_yolov3 |rocket| - 14.66 - 14.41 + - 623 + - 623 - 416x416x3 - 8.85 - 5.58 - `download `_ - `link `_ - - `download `_ - - 269.057 - - 455.898 - * - tiny_yolov4 + - `download `_ + * - tiny_yolov4 - 19.18 - 17.73 + - 474 + - 474 - 416x416x3 - 6.05 - 6.92 - `download `_ - `link `_ - - `download `_ - - 273.062 - - 430.406 - * - yolov3 |star| + - `download `_ + * - yolov3 |star| - 38.42 - 38.37 + - 15 + - 19 - 608x608x3 - 68.79 - 158.10 - `download `_ - `link `_ - - `download `_ - - 20.7228 - - 24.7059 - * - yolov3_416 + - `download `_ + * - yolov3_416 - 37.73 - 37.53 + - 25 + - 38 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 35.1757 - - 50.8807 - * - yolov3_gluon |rocket| |star| + - `download `_ + * - yolov3_gluon |star| - 37.28 - 35.64 + - 14 + - 19 - 608x608x3 - 68.79 - 158.1 - `download `_ - `link `_ - - `download `_ - - 22.0821 - - 27.6056 - * - yolov3_gluon_416 |star| + - `download `_ + * - yolov3_gluon_416 |star| - 36.27 - 34.92 + - 25 + - 38 - 416x416x3 - 61.92 - 65.94 - `download `_ - `link `_ - - `download `_ - - 34.5337 - - 50.1277 - * - yolov4_leaky |star| + - `download `_ + * - yolov4_leaky |star| - 42.37 - 41.08 + - 24 + - 38 - 512x512x3 - 64.33 - 91.04 - `download `_ - `link `_ - - `download `_ - - 29.1715 - - 39.9617 - * - yolov5m + - `download `_ + * - yolov5m - 42.59 - 41.19 + - 46 + - 75 - 640x640x3 - 21.78 - 52.17 - `download `_ - `link `_ - - `download `_ - - 53.809 - - 75.5971 - * - yolov5m6_6.1 + - `download `_ + * - yolov5m6_6.1 - 50.67 - 48.97 + - 14 + - 18 - 1280x1280x3 - 35.70 - 200.04 - `download `_ - `link `_ - - `download `_ - - 16.3575 - - 18.8832 - * - yolov5m_6.1 + - `download `_ + * - yolov5m_6.1 - 44.8 - 43.36 + - 54 + - 83 - 640x640x3 - 21.17 - 48.96 - `download `_ - `link `_ - - `download `_ - - 53.8092 - - 75.5973 - * - yolov5m_wo_spp |rocket| + - `download `_ + * - yolov5m_wo_spp - 43.06 - - 40.76 + - 41.06 + - 50.931 + - 81.035 - 640x640x3 - 22.67 - 52.88 - `download `_ - `link `_ - - `download `_ - - 67.6233 - - 94.4975 - * - yolov5s |star| + - `download `_ + * - yolov5s |star| - 35.33 - 33.98 + - 93 + - 168 - 640x640x3 - 7.46 - 17.44 - `download `_ - `link `_ - - `download `_ - - 117.162 - - 180.261 - * - yolov5s_c3tr + - `download `_ + * - yolov5s_c3tr - 37.13 - 35.63 + - 84 + - 148 - 640x640x3 - 10.29 - 17.02 - `download `_ - `link `_ - - `download `_ - - 100.892 - - 167.908 - * - yolov5xs_wo_spp + - `download `_ + * - yolov5xs_wo_spp - 33.18 - 32.2 + - 131 + - 309 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 178.885 - - 319.815 - * - yolov5xs_wo_spp_nms_core + - `download `_ + * - yolov5xs_wo_spp_nms_core - 32.57 - - 31.06 + - 30.86 + - 130 + - 309 - 512x512x3 - 7.85 - 11.36 - `download `_ - `link `_ - - `download `_ - - 178.878 - - 319.704 - * - yolov6n + - `download `_ + * - yolov6n - 34.28 - - 32.18 + - 32.28 + - 162 + - 341 - 640x640x3 - 4.32 - 11.12 - `download `_ - `link `_ - - `download `_ - - 168.961 - - 285.641 - * - yolov6n_0.2.1 + - `download `_ + * - yolov6n_0.2.1 - 35.16 - - 33.66 + - 33.87 + - 158 + - 346 - 640x640x3 - 4.33 - 11.06 - `download `_ - `link `_ - - `download `_ - - 181.87 - - 320.321 - * - yolov7 + - `download `_ + * - yolov7 - 50.59 - 47.89 + - 24 + - 36 - 640x640x3 - 36.91 - 104.51 - `download `_ - `link `_ - - `download `_ - - 28.8124 - - 37.7975 - * - yolov7_tiny + - `download `_ + * - yolov7_tiny - 37.07 - - 35.97 + - 36.07 + - 121 + - 200 - 640x640x3 - 6.22 - 13.74 - `download `_ - `link `_ - - `download `_ - - 128.247 - - 193.335 - * - yolov7e6 + - `download `_ + * - yolov7e6 - 55.37 - 53.47 + - 4 + - 5 - 1280x1280x3 - 97.20 - 515.12 - `download `_ - `link `_ - - `download `_ - - 6.17829 - - 6.53669 - * - yolov8l + - `download `_ + * - yolov8l - 52.44 - 51.78 + - 19 + - 27 - 640x640x3 - 43.7 - 165.3 - `download `_ - `link `_ - - `download `_ - - 18.7227 - - 23.979 - * - yolov8m + - `download `_ + * - yolov8m - 49.91 - 49.11 + - 38 + - 61 - 640x640x3 - 25.9 - 78.93 - `download `_ - `link `_ - - `download `_ - - 39.082 - - 55.9632 - * - yolov8n + - `download `_ + * - yolov8n - 37.02 - 36.32 + - 144 + - 300 - 640x640x3 - 3.2 - 8.74 - `download `_ - `link `_ - - `download `_ - - 163.514 - - 311.781 - * - yolov8s + - `download `_ + * - yolov8s - 44.58 - 43.98 + - 87 + - 153 - 640x640x3 - 11.2 - 28.6 - `download `_ - `link `_ - - `download `_ - - 82.4367 - - 129.367 - * - yolov8x + - `download `_ + * - yolov8x - 53.45 - 52.75 + - 10 + - 13 - 640x640x3 - 68.2 - 258 - `download `_ - `link `_ - - `download `_ - - 10.9973 - - 13.0795 - * - yolox_l_leaky |star| + - `download `_ + * - yolov9c + - 52.8 + - 50.7 + - None + - None + - 640x640x3 + - 25.3 + - 102.1 + - `download `_ + - `link `_ + - `download `_ + * - yolox_l_leaky |star| - 48.69 - - 46.71 + - 46.59 + - 19 + - 27 - 640x640x3 - 54.17 - 155.3 - `download `_ - `link `_ - - `download `_ - - 17.9951 - - 21.8031 - * - yolox_s_leaky + - `download `_ + * - yolox_s_leaky - 38.12 - 37.27 + - 79 + - 137 - 640x640x3 - 8.96 - 26.74 - `download `_ - `link `_ - - `download `_ - - 87.0709 - - 129.368 - * - yolox_s_wide_leaky + - `download `_ + * - yolox_s_wide_leaky - 42.4 - 40.97 + - 48 + - 71 - 640x640x3 - 20.12 - 59.46 - `download `_ - `link `_ - - `download `_ - - 53.5291 - - 70.4935 - * - yolox_tiny + - `download `_ + * - yolox_tiny - 32.64 - 31.39 + - 152 + - 346 - 416x416x3 - 5.05 - 6.44 - `download `_ - `link `_ - - `download `_ - - 182.502 - - 362.678 + - `download `_ VisDrone ^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mAP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - ssd_mobilenet_v1_visdrone |star| + - Compiled + * - ssd_mobilenet_v1_visdrone |star| - 2.37 - 2.22 + - 200 + - 482 - 300x300x3 - 5.64 - 2.3 - `download `_ - `link `_ - - `download `_ - - 244.12 - - 527.101 + - `download `_ diff --git a/docs/public_models/HAILO8L_Person_Attribute.rst b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst similarity index 50% rename from docs/public_models/HAILO8L_Person_Attribute.rst rename to docs/public_models/HAILO8L/HAILO8l_person_attribute.rst index f46f25be..6d95e49b 100644 --- a/docs/public_models/HAILO8L_Person_Attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Attribute`_ + + .. _Person Attribute: Person Attribute @@ -17,28 +28,28 @@ PETA ^^^^ .. list-table:: - :widths: 24 14 12 14 9 8 10 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Mean Accuracy - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - person_attr_resnet_v1_18 + - Compiled + * - person_attr_resnet_v1_18 - 82.5 - 82.61 + - 1062 + - 1062 - 224x224x3 - 11.19 - 3.64 - `download `_ - `link `_ - - `download `_ - - 1062.53 - - 1062.86 + - `download `_ diff --git a/docs/public_models/HAILO15H_Person_Re_ID.rst b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst similarity index 54% rename from docs/public_models/HAILO15H_Person_Re_ID.rst rename to docs/public_models/HAILO8L/HAILO8l_person_re_id.rst index 5973e862..4197365b 100644 --- a/docs/public_models/HAILO15H_Person_Re_ID.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Person Re-ID`_ + + .. _Person Re-ID: Person Re-ID @@ -17,39 +28,39 @@ Market1501 ^^^^^^^^^^ .. list-table:: - :widths: 28 8 9 13 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - rank1 - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - osnet_x1_0 + - Compiled + * - osnet_x1_0 - 94.43 - 93.63 + - 107 + - 317 - 256x128x3 - 2.19 - 1.98 - `download `_ - `link `_ - - `download `_ - - 161.073 - - 391.232 - * - repvgg_a0_person_reid_512 |star| + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 + - 3526 + - 3526 - 256x128x3 - 7.68 - 1.78 - `download `_ - `link `_ - - `download `_ - - 5082.74 - - 5082.74 + - `download `_ diff --git a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst new file mode 100644 index 00000000..c0b6b65f --- /dev/null +++ b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst @@ -0,0 +1,99 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Pose Estimation`_ + + +.. _Pose Estimation: + +Pose Estimation +--------------- + +COCO +^^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - mAP + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - centerpose_regnetx_1.6gf_fpn |star| + - 53.54 + - 53.53 + - 42 + - 64 + - 640x640x3 + - 14.28 + - 64.58 + - `download `_ + - `link `_ + - `download `_ + * - centerpose_regnetx_800mf + - 44.07 + - 43.07 + - 67 + - 96 + - 512x512x3 + - 12.31 + - 86.12 + - `download `_ + - `link `_ + - `download `_ + * - centerpose_repvgg_a0 |star| + - 39.17 + - 37.17 + - 96 + - 161 + - 416x416x3 + - 11.71 + - 28.27 + - `download `_ + - `link `_ + - `download `_ + * - yolov8m_pose + - 64.26 + - 61.66 + - 36 + - 58 + - 640x640x3 + - 26.4 + - 81.02 + - `download `_ + - `link `_ + - `download `_ + * - yolov8s_pose + - 59.2 + - 55.6 + - 82 + - 141 + - 640x640x3 + - 11.6 + - 30.2 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO8L_Semantic_Segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst similarity index 60% rename from docs/public_models/HAILO8L_Semantic_Segmentation.rst rename to docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst index 9edc8a8e..867e5a9d 100644 --- a/docs/public_models/HAILO8L_Semantic_Segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Semantic Segmentation`_ + + .. _Semantic Segmentation: Semantic Segmentation @@ -17,110 +28,121 @@ Cityscapes ^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - fcn8_resnet_v1_18 |star| + - Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 + - 15 + - 17 - 1024x1920x3 - 11.20 - 142.82 - `download `_ - `link `_ - - `download `_ - - 15.899 - - 17.624 - * - stdc1 |rocket| + - `download `_ + * - segformer_b0_bn + - 69.81 + - 68.01 + - None + - None + - 512x1024x3 + - 3.72 + - 35.76 + - `download `_ + - `link `_ + - `download `_ + * - stdc1 - 74.57 - 73.92 + - 13 + - 18 - 1024x1920x3 - 8.27 - 126.47 - `download `_ - `link `_ - - `download `_ - - 59.6441 - - 59.644 + - `download `_ Oxford-IIIT Pet ^^^^^^^^^^^^^^^ .. list-table:: - :widths: 31 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - unet_mobilenet_v2 + - Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 + - 100 + - 194 - 256x256x3 - 10.08 - 28.88 - `download `_ - `link `_ - - `download `_ - - 100.629 - - 195.658 + - `download `_ Pascal VOC ^^^^^^^^^^ .. list-table:: - :widths: 36 7 9 12 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - mIoU - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - deeplab_v3_mobilenet_v2 + - Compiled + * - deeplab_v3_mobilenet_v2 |rocket| - 76.05 - 74.8 + - 35 + - 45 - 513x513x3 - 2.10 - 17.65 - `download `_ - `link `_ - - `download `_ - - 35.32 - - 43.0 - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 + - 59 + - 112 - 513x513x3 - 2.10 - 3.21 - `download `_ - `link `_ - - `download `_ - - 57.664 - - 111.422 + - `download `_ diff --git a/docs/public_models/HAILO8L_Single_Person_Pose_Estimation.rst b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst similarity index 56% rename from docs/public_models/HAILO8L_Single_Person_Pose_Estimation.rst rename to docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst index 984d43f7..dd2da21f 100644 --- a/docs/public_models/HAILO8L_Single_Person_Pose_Estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Single Person Pose Estimation`_ + + .. _Single Person Pose Estimation: Single Person Pose Estimation @@ -17,50 +28,39 @@ COCO ^^^^ .. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - AP - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - mspn_regnetx_800mf |star| + - Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 + - 173 + - 457 - 256x192x3 - 7.17 - 2.94 - `download `_ - `link `_ - - `download `_ - - 176.8 - - 534.372 - * - vit_pose_small - - 74.16 - - 71.6 - - 256x192x3 - - 24.29 - - 17.17 - - `download `_ - - `link `_ - - `download `_ - - 25.6655 - - 91.1646 - * - vit_pose_small_bn + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 + - 52 + - 169 - 256x192x3 - 24.32 - 17.17 - `download `_ - `link `_ - - `download `_ - - 58.664 - - 199.924 + - `download `_ diff --git a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst new file mode 100644 index 00000000..eff62cfc --- /dev/null +++ b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst @@ -0,0 +1,55 @@ + +Public Pre-Trained Models +========================= + +.. |rocket| image:: images/rocket.png + :width: 18 + +.. |star| image:: images/star.png + :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Stereo Depth Estimation`_ + + +.. _Stereo Depth Estimation: + +Stereo Depth Estimation +----------------------- + +N/A +^^^ + +.. list-table:: + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 + :header-rows: 1 + + * - Network Name + - EPE + - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) + - Input Resolution (HxWxC) + - Params (M) + - OPS (G) + - Pretrained + - Source + - Compiled + * - stereonet + - 91.79 + - 89.14 + - None + - None + - 368x1232x3, 368x1232x3 + - 5.91 + - 126.28 + - `download `_ + - `link `_ + - `download `_ diff --git a/docs/public_models/HAILO15M_Super_Resolution.rst b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst similarity index 58% rename from docs/public_models/HAILO15M_Super_Resolution.rst rename to docs/public_models/HAILO8L/HAILO8l_super_resolution.rst index 70e4a512..698ef27a 100644 --- a/docs/public_models/HAILO15M_Super_Resolution.rst +++ b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst @@ -2,12 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Super Resolution`_ + + .. _Super Resolution: Super Resolution @@ -17,50 +28,50 @@ BSD100 ^^^^^^ .. list-table:: - :widths: 32 8 7 11 9 8 8 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - PSNR - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - espcn_x2 + - Compiled + * - espcn_x2 - 31.4 - 30.3 + - 1164 + - 1164 - 156x240x1 - 0.02 - 1.6 - `download `_ - `link `_ - - `download `_ - - 1746.31 - - 1745.98 - * - espcn_x3 + - `download `_ + * - espcn_x3 - 28.41 - 28.06 + - 2217 + - 2217 - 104x160x1 - 0.02 - 0.76 - `download `_ - `link `_ - - `download `_ - - 1925.37 - - 1924.98 - * - espcn_x4 + - `download `_ + * - espcn_x4 - 26.83 - 26.58 + - 2189 + - 2189 - 78x120x1 - 0.02 - 0.46 - `download `_ - `link `_ - - `download `_ - - 1908.98 - - 1908.56 + - `download `_ diff --git a/docs/public_models/HAILO8L_Zero_shot_Classification.rst b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst similarity index 50% rename from docs/public_models/HAILO8L_Zero_shot_Classification.rst rename to docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst index db95eca7..bcc6283d 100644 --- a/docs/public_models/HAILO8L_Zero_shot_Classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst @@ -2,11 +2,23 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: ../images/rocket.png +.. |rocket| image:: images/rocket.png :width: 18 -.. |star| image:: ../images/star.png +.. |star| image:: images/star.png :width: 18 + +Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. + +* Network available in `Hailo Benchmark `_ are marked with |rocket| +* Networks available in `TAPPAS `_ are marked with |star| +* Benchmark, TAPPAS and Recommended networks run in performance mode +* All models were compiled using Hailo Dataflow Compiler v3.27.0 +* Supported tasks: + + * `Zero-shot Classification`_ + + .. _Zero-shot Classification: Zero-shot Classification @@ -16,28 +28,28 @@ CIFAR100 ^^^^^^^^ .. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 + :widths: 31 9 7 11 9 8 8 8 7 7 7 7 :header-rows: 1 * - Network Name - Accuracy (top1) - Quantized + - FPS (Batch Size=1) + - FPS (Batch Size=8) - Input Resolution (HxWxC) - Params (M) - OPS (G) - Pretrained - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - clip_resnet_50 + - Compiled + * - clip_resnet_50 - 42.07 - 38.57 + - 66 + - 179 - 224x224x3 - 38.72 - 11.62 - `download `_ - `link `_ - - `download `_ - - 63.5303 - - 195.19 + - `download `_ diff --git a/docs/public_models/HAILO8L_Face_Attribute.rst b/docs/public_models/HAILO8L_Face_Attribute.rst deleted file mode 100644 index de7ff821..00000000 --- a/docs/public_models/HAILO8L_Face_Attribute.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Face Attribute: - -Face Attribute --------------- - -CELEBA -^^^^^^ - -.. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 - :header-rows: 1 - - * - Network Name - - Mean Accuracy - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - face_attr_resnet_v1_18 - - 81.19 - - 81.09 - - 218x178x3 - - 11.74 - - 3 - - `download `_ - - `link `_ - - `download `_ - - 670.684 - - 670.88 diff --git a/docs/public_models/HAILO8L_Facial_Landmark_Detection.rst b/docs/public_models/HAILO8L_Facial_Landmark_Detection.rst deleted file mode 100644 index 57ec5d0d..00000000 --- a/docs/public_models/HAILO8L_Facial_Landmark_Detection.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Facial Landmark Detection: - -Facial Landmark Detection -------------------------- - -AFLW2k3d -^^^^^^^^ - -.. list-table:: - :widths: 28 8 8 16 9 8 8 8 7 7 7 - :header-rows: 1 - - * - Network Name - - NME - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - tddfa_mobilenet_v1 |star| - - 3.68 - - 4.05 - - 120x120x3 - - 3.26 - - 0.36 - - `download `_ - - `link `_ - - `download `_ - - 5,397.45 - - 5401.79 diff --git a/docs/public_models/HAILO8L_Hand_Landmark_detection.rst b/docs/public_models/HAILO8L_Hand_Landmark_detection.rst deleted file mode 100644 index 0833404b..00000000 --- a/docs/public_models/HAILO8L_Hand_Landmark_detection.rst +++ /dev/null @@ -1,38 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 -.. _Hand Landmark detection: - -Hand Landmark detection ------------------------ - -Hand Landmark -^^^^^^^^^^^^^ - -.. list-table:: - :header-rows: 1 - - * - Network Name - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - hand_landmark_lite - - 224x224x3 - - 1.01 - - 0.3 - - `download `_ - - `link `_ - - `download `_ - - 292.054 - - 980.211 diff --git a/docs/public_models/HAILO8L_Pose_Estimation.rst b/docs/public_models/HAILO8L_Pose_Estimation.rst deleted file mode 100644 index 71980ade..00000000 --- a/docs/public_models/HAILO8L_Pose_Estimation.rst +++ /dev/null @@ -1,66 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Pose Estimation: - -Pose Estimation ---------------- - -COCO -^^^^ - -.. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 - :header-rows: 1 - - * - Network Name - - AP - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - centerpose_regnetx_1.6gf_fpn |star| - - 53.54 - - 52.84 - - 640x640x3 - - 14.28 - - 64.58 - - `download `_ - - `link `_ - - `download `_ - - 43.449 - - 66.09 - * - centerpose_regnetx_800mf - - 44.07 - - 42.97 - - 512x512x3 - - 12.31 - - 86.12 - - `download `_ - - `link `_ - - `download `_ - - 66.641 - - 96.0218 - * - centerpose_repvgg_a0 |star| - - 39.17 - - 37.17 - - 416x416x3 - - 11.71 - - 28.27 - - `download `_ - - `link `_ - - `download `_ - - 82.301 - - 148.736 diff --git a/docs/public_models/HAILO8_Face_Attribute.rst b/docs/public_models/HAILO8_Face_Attribute.rst deleted file mode 100644 index 417529a1..00000000 --- a/docs/public_models/HAILO8_Face_Attribute.rst +++ /dev/null @@ -1,43 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 -.. _Face Attribute: - -Face Attribute --------------- - -CELEBA -^^^^^^ - -.. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 - :header-rows: 1 - - * - Network Name - - Mean Accuracy - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - face_attr_resnet_v1_18 - - 81.19 - - 81.09 - - 218x178x3 - - 11.74 - - 3 - - `download `_ - - `link `_ - - `download `_ - - 2928.63 - - 2929.11 diff --git a/docs/public_models/HAILO8_Facial_Landmark_Detection.rst b/docs/public_models/HAILO8_Facial_Landmark_Detection.rst deleted file mode 100644 index 2ade5ffd..00000000 --- a/docs/public_models/HAILO8_Facial_Landmark_Detection.rst +++ /dev/null @@ -1,44 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Facial Landmark Detection: - -Facial Landmark Detection -------------------------- - -AFLW2k3d -^^^^^^^^ - -.. list-table:: - :widths: 28 8 8 16 9 8 8 8 7 7 7 - :header-rows: 1 - - * - Network Name - - NME - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - tddfa_mobilenet_v1 |star| - - 3.68 - - 4.05 - - 120x120x3 - - 3.26 - - 0.36 - - `download `_ - - `link `_ - - `download `_ - - 10077.0 - - 10084.0 diff --git a/docs/public_models/HAILO8_Hand_Landmark_detection.rst b/docs/public_models/HAILO8_Hand_Landmark_detection.rst deleted file mode 100644 index 67fe078a..00000000 --- a/docs/public_models/HAILO8_Hand_Landmark_detection.rst +++ /dev/null @@ -1,38 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 -.. _Hand Landmark detection: - -Hand Landmark detection ------------------------ - -Hand Landmark -^^^^^^^^^^^^^ - -.. list-table:: - :header-rows: 1 - - * - Network Name - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - hand_landmark_lite - - 224x224x3 - - 1.01 - - 0.3 - - `download `_ - - `link `_ - - `download `_ - - 2,672.09 - - 2,671.97 diff --git a/docs/public_models/HAILO8_Pose_Estimation.rst b/docs/public_models/HAILO8_Pose_Estimation.rst deleted file mode 100644 index 3608cee3..00000000 --- a/docs/public_models/HAILO8_Pose_Estimation.rst +++ /dev/null @@ -1,66 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Pose Estimation: - -Pose Estimation ---------------- - -COCO -^^^^ - -.. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 - :header-rows: 1 - - * - Network Name - - AP - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - centerpose_regnetx_1.6gf_fpn |star| - - 53.54 - - 52.84 - - 640x640x3 - - 14.28 - - 64.58 - - `download `_ - - `link `_ - - `download `_ - - 132.748 - - 132.746 - * - centerpose_regnetx_800mf - - 44.07 - - 42.97 - - 512x512x3 - - 12.31 - - 86.12 - - `download `_ - - `link `_ - - `download `_ - - 132.415 - - 132.408 - * - centerpose_repvgg_a0 |star| - - 39.17 - - 37.17 - - 416x416x3 - - 11.71 - - 28.27 - - `download `_ - - `link `_ - - `download `_ - - 512.87 - - 512.863 diff --git a/docs/public_models/HAILO8_Single_Person_Pose_Estimation.rst b/docs/public_models/HAILO8_Single_Person_Pose_Estimation.rst deleted file mode 100644 index 6067aebd..00000000 --- a/docs/public_models/HAILO8_Single_Person_Pose_Estimation.rst +++ /dev/null @@ -1,66 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 - -.. _Single Person Pose Estimation: - -Single Person Pose Estimation ------------------------------ - -COCO -^^^^ - -.. list-table:: - :widths: 24 8 9 18 9 8 9 8 7 7 7 - :header-rows: 1 - - * - Network Name - - AP - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - mspn_regnetx_800mf |star| - - 70.8 - - 70.3 - - 256x192x3 - - 7.17 - - 2.94 - - `download `_ - - `link `_ - - `download `_ - - 1843.36 - - 1840.82 - * - vit_pose_small - - 74.16 - - 71.6 - - 256x192x3 - - 24.29 - - 17.17 - - `download `_ - - `link `_ - - `download `_ - - 32.9208 - - 154.658 - * - vit_pose_small_bn - - 72.01 - - 70.81 - - 256x192x3 - - 24.32 - - 17.17 - - `download `_ - - `link `_ - - `download `_ - - 60.9302 - - 247.454 diff --git a/docs/public_models/HAILO8_Stereo_Depth_Estimation.rst b/docs/public_models/HAILO8_Stereo_Depth_Estimation.rst deleted file mode 100644 index 6ff5db77..00000000 --- a/docs/public_models/HAILO8_Stereo_Depth_Estimation.rst +++ /dev/null @@ -1,43 +0,0 @@ - -Public Pre-Trained Models -========================= - -.. |rocket| image:: ../images/rocket.png - :width: 18 - -.. |star| image:: ../images/star.png - :width: 18 -.. _Stereo Depth Estimation: - -Stereo Depth Estimation ------------------------ - -N/A -^^^ - -.. list-table:: - :widths: 30 7 11 14 9 8 12 8 7 7 7 - :header-rows: 1 - - * - Network Name - - EPE - - Quantized - - Input Resolution (HxWxC) - - Params (M) - - OPS (G) - - Pretrained - - Source - - Compiled - - FPS (Batch Size=1) - - FPS (Batch Size=8) - * - stereonet - - 91.79 - - 89.14 - - 368X1232X3, 368X1232X3 - - 5.91 - - 126.28 - - `download `_ - - `link `_ - - `download `_ - - 3.93173 - - 2.66557 diff --git a/hailo_model_zoo/base_parsers.py b/hailo_model_zoo/base_parsers.py new file mode 100644 index 00000000..b1477486 --- /dev/null +++ b/hailo_model_zoo/base_parsers.py @@ -0,0 +1,170 @@ +import argparse +from pathlib import Path + +from hailo_model_zoo.utils.cli_utils import OneResizeValueAction, add_model_name_arg +from hailo_model_zoo.utils.completions import ( + ALLS_COMPLETE, + CKPT_COMPLETE, + FILE_COMPLETE, + HAR_COMPLETE, + HEF_COMPLETE, + TFRECORD_COMPLETE, + YAML_COMPLETE, +) +from hailo_model_zoo.utils.constants import DEVICE_NAMES, TARGETS + + +def make_parsing_base(): + parsing_base_parser = argparse.ArgumentParser(add_help=False) + config_group = parsing_base_parser.add_mutually_exclusive_group() + add_model_name_arg(config_group, optional=True) + config_group.add_argument( + "--yaml", + type=str, + default=None, + dest="yaml_path", + help=("Path to YAML for network configuration." "By default using the default configuration"), + ).complete = YAML_COMPLETE + parsing_base_parser.add_argument( + "--ckpt", + type=str, + default=None, + dest="ckpt_path", + help=("Path to onnx or ckpt to use for parsing." " By default using the model cache location"), + ).complete = CKPT_COMPLETE + parsing_base_parser.add_argument( + "--hw-arch", + type=str, + default="hailo8", + metavar="", + choices=["hailo8", "hailo8l", "hailo15h", "hailo15m"], + help="Which hw arch to run: hailo8 / hailo8l/ hailo15h/ hailo15m. By default using hailo8.", + ) + parsing_base_parser.add_argument( + '--start-node-names', + type=str, + default='', + nargs='+', + help='List of names of the first nodes to parse.\nExample: --start-node-names ...') + parsing_base_parser.add_argument( + '--end-node-names', + type=str, + default='', + nargs='+', + help='List of nodes that indicate the parsing end. The order determines the order of the outputs.' + '\nExample: --end-node-names ...') + parsing_base_parser.set_defaults(results_dir=Path("./")) + return parsing_base_parser + + +def make_optimization_base(): + optimization_base_parser = argparse.ArgumentParser(add_help=False) + optimization_base_parser.add_argument( + "--har", type=str, default=None, help="Use external har file", dest="har_path" + ).complete = HAR_COMPLETE + optimization_base_parser.add_argument( + "--calib-path", + type=Path, + help="Path to external tfrecord for calibration or a directory containing \ + images in jpg or png format", + ).complete = TFRECORD_COMPLETE + optimization_base_parser.add_argument( + "--model-script", + type=str, + default=None, + dest="model_script_path", + help="Path to model script to use. By default using the model script specified" + " in the network YAML configuration", + ).complete = ALLS_COMPLETE + optimization_base_parser.add_argument( + "--performance", + action="store_true", + help="Enable flag for benchmark performance", + ) + optimization_base_parser.add_argument( + "--resize", + type=int, + nargs="+", + action=OneResizeValueAction, + help="Add input resize from given [h,w]", + ) + optimization_base_parser.add_argument( + "--input-conversion", + type=str, + choices=["nv12_to_rgb", "yuy2_to_rgb", "rgbx_to_rgb"], + help="Add input conversion from given type", + ) + optimization_base_parser.add_argument( + '--classes', + type=int, + metavar='', + help='Number of classes for NMS configuration') + return optimization_base_parser + + +def make_hef_base(): + hef_base_parser = argparse.ArgumentParser(add_help=False) + hef_base_parser.add_argument( + "--hef", type=str, default=None, help="Use external HEF files", dest="hef_path" + ).complete = HEF_COMPLETE + return hef_base_parser + + +def make_profiling_base(): + profile_base_parser = argparse.ArgumentParser(add_help=False) + return profile_base_parser + + +def make_evaluation_base(): + evaluation_base_parser = argparse.ArgumentParser(add_help=False) + targets = TARGETS + devices = ", ".join(DEVICE_NAMES) + evaluation_base_parser.add_argument( + "--target", + type=str, + choices=targets, + metavar="", + default="full_precision", + help="Which target to run: full_precision (GPU) / emulator (GPU) / hardware (PCIe).\n" + f"A specific device may be specified. Available devices: {devices}", + ) + + evaluation_base_parser.add_argument( + "--batch-size", + type=int, + help="Batch size for INFERENCE (evaluation and pre-quant stats collection) only " + "(feel free to increase to whatever your GPU can handle). " + " the quant-aware optimizers s.a. QFT & IBC use the calibration batch size parameter from the ALLS", + ) + + evaluation_base_parser.add_argument( + "--data-count", + type=int, + default=None, + dest="eval_num_examples", + help="Maximum number of images to use for evaluation", + ) + + evaluation_base_parser.add_argument( + "--visualize", + action="store_true", + dest="visualize_results", + help="Run visualization without evaluation. The default value is False", + ) + evaluation_base_parser.add_argument( + "--video-outpath", + help="Make a video from the visualizations and save it to this path", + ).complete = FILE_COMPLETE + evaluation_base_parser.add_argument( + "--data-path", + type=Path, + help="Path to external tfrecord for evaluation. In case you use --visualize \ + you can give a directory of images in jpg or png format", + ).complete = TFRECORD_COMPLETE + evaluation_base_parser.set_defaults( + print_num_examples=1e9, + visualize_results=False, + use_lite_inference=False, + use_service=False, + ) + return evaluation_base_parser diff --git a/hailo_model_zoo/cfg/alls/generic/damoyolo_tinynasL20_T.alls b/hailo_model_zoo/cfg/alls/generic/damoyolo_tinynasL20_T.alls index 96cc5330..1b16179e 100644 --- a/hailo_model_zoo/cfg/alls/generic/damoyolo_tinynasL20_T.alls +++ b/hailo_model_zoo/cfg/alls/generic/damoyolo_tinynasL20_T.alls @@ -1,2 +1,2 @@ pre_quantization_optimization(equalization, policy=disabled) -post_quantization_optimization(finetune, policy=enabled, loss_layer_names=[conv53, conv54, conv65, conv66, conv67, conv77, conv51, conv63, conv75], loss_factors=[1, 1, 1, 1, 1, 1, 52, 16, 4]) \ No newline at end of file +post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, loss_layer_names=[conv53, conv54, conv65, conv66, conv67, conv77, conv51, conv63, conv75], loss_factors=[1, 1, 1, 1, 1, 1, 52, 16, 4]) diff --git a/hailo_model_zoo/cfg/alls/generic/detr_resnet_v1_18_bn.alls b/hailo_model_zoo/cfg/alls/generic/detr_resnet_v1_18_bn.alls index fdc7189f..958b09af 100644 --- a/hailo_model_zoo/cfg/alls/generic/detr_resnet_v1_18_bn.alls +++ b/hailo_model_zoo/cfg/alls/generic/detr_resnet_v1_18_bn.alls @@ -4,7 +4,8 @@ pre_quantization_optimization(equalization, policy=enabled) model_optimization_flavor(optimization_level=0, compression_level=0) # model_optimization_config commands -model_optimization_config(calibration,batch_size=8, calibset_size=32) +model_optimization_config(negative_exponent, layers={*}, rank=0) +model_optimization_config(calibration,batch_size=8, calibset_size=32) model_optimization_config(checker_cfg, policy=enabled, batch_size=1) quantization_param([detr_resnet_v1_18_bn/conv116, detr_resnet_v1_18_bn/conv115], precision_mode=a16_w16) quantization_param([detr_resnet_v1_18_bn/conv116, detr_resnet_v1_18_bn/conv115], bias_mode=single_scale_decomposition) @@ -14,45 +15,6 @@ quantization_param({ew_sub*}, activation_fit=disabled) quantization_param({output_layer*}, precision_mode=a16_w16) change_output_activation(detr_resnet_v1_18_bn/conv116, linear) - -resources_param(strategy=greedy, max_control_utilization=0.75, max_compute_utilization=0.75, max_memory_utilization=0.75) - -context_switch_param(mode=enabled) -context_0 = context([input_layer1]) -context_1 = context([conv18, const_input1]) -context_2 = context([matmul1, matmul2, conv25, const_input12, const_input18, const_input19]) -context_3 = context([matmul7, matmul8, conv43, const_input20, const_input21, const_input22, const_input23, const_input24, const_input2, const_input3, const_input4]) -context_4 = context([matmul13, matmul14, const_input6, matmul17, matmul18, matmul21, matmul22, const_input5, const_input6, const_input7, const_input8, const_input9, const_input13, const_input14, const_input15]) -context_5 = context([matmul25, matmul26, conv94, matmul29, matmul30, matmul33, matmul34, const_input10, const_input16, const_input11, const_input17]) - - -reshape1 = format_conversion(format_conversion1, reshape_1xw0_to_hxw) -const_input_reshape1 = format_conversion(const_input1, reshape_1xw0_to_hxw) -const_input_reshape2 = format_conversion(const_input2, reshape_1xw0_to_hxw) -const_input_reshape3 = format_conversion(const_input3, reshape_1xw0_to_hxw) -const_input_reshape4 = format_conversion(const_input4, reshape_1xw0_to_hxw) -const_input_reshape5 = format_conversion(const_input5, reshape_1xw0_to_hxw) -const_input_reshape6 = format_conversion(const_input6, reshape_1xw0_to_hxw) -const_input_reshape7 = format_conversion(const_input7, reshape_1xw0_to_hxw) -const_input_reshape8 = format_conversion(const_input8, reshape_1xw0_to_hxw) -const_input_reshape9 = format_conversion(const_input9, reshape_1xw0_to_hxw) -const_input_reshape10 = format_conversion(const_input10, reshape_1xw0_to_hxw) -const_input_reshape11 = format_conversion(const_input11, reshape_1xw0_to_hxw) -const_input_reshape12 = format_conversion(const_input12, reshape_1xw0_to_hxw) -const_input_reshape13 = format_conversion(const_input13, reshape_1xw0_to_hxw) -const_input_reshape14 = format_conversion(const_input14, reshape_1xw0_to_hxw) -const_input_reshape15 = format_conversion(const_input15, reshape_1xw0_to_hxw) -const_input_reshape16 = format_conversion(const_input16, reshape_1xw0_to_hxw) -const_input_reshape17 = format_conversion(const_input17, reshape_1xw0_to_hxw) -const_input_reshape18 = format_conversion(const_input18, reshape_1xw0_to_hxw) -const_input_reshape19 = format_conversion(const_input19, reshape_1xw0_to_hxw) -const_input_reshape20 = format_conversion(const_input20, reshape_1xw0_to_hxw) -const_input_reshape21 = format_conversion(const_input21, reshape_1xw0_to_hxw) -const_input_reshape22 = format_conversion(const_input22, reshape_1xw0_to_hxw) -const_input_reshape23 = format_conversion(const_input23, reshape_1xw0_to_hxw) -const_input_reshape24 = format_conversion(const_input24, reshape_1xw0_to_hxw) - -reshape2 = format_conversion(conv113, spatial_flatten, 1, 100) -reshape3 = format_conversion(conv116, spatial_flatten, 1, 100) - - +allocator_param(enable_partial_row_buffers=disabled) +context_switch_param(slotter_chances=600) +allocator_param(timeout=43200) diff --git a/hailo_model_zoo/cfg/alls/generic/efficientdet_lite0.alls b/hailo_model_zoo/cfg/alls/generic/efficientdet_lite0.alls index 03bc5d53..4faa766e 100644 --- a/hailo_model_zoo/cfg/alls/generic/efficientdet_lite0.alls +++ b/hailo_model_zoo/cfg/alls/generic/efficientdet_lite0.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.0, 127.0, 127.0], [128.0, 128.0, 128.0]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/efficientdet/efficientdet_lite0/pretrained/2023-04-25/efficientdet_lite0_nms_config.json", ssd, engine=cpu) +nms_postprocess("../../postprocess_config/efficientdet_lite0_nms_config.json", ssd, engine=cpu) change_output_activation(efficientdet_lite0/conv66, sigmoid) change_output_activation(efficientdet_lite0/conv75, sigmoid) change_output_activation(efficientdet_lite0/conv84, sigmoid) diff --git a/hailo_model_zoo/cfg/alls/generic/efficientdet_lite1.alls b/hailo_model_zoo/cfg/alls/generic/efficientdet_lite1.alls index 38c054cb..47c3f2a2 100644 --- a/hailo_model_zoo/cfg/alls/generic/efficientdet_lite1.alls +++ b/hailo_model_zoo/cfg/alls/generic/efficientdet_lite1.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.0, 127.0, 127.0], [128.0, 128.0, 128.0]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/efficientdet/efficientdet_lite1/pretrained/2023-04-25/efficientdet_lite1_nms_config.json", ssd, engine=cpu) +nms_postprocess("../../postprocess_config/efficientdet_lite1_nms_config.json", ssd, engine=cpu) change_output_activation(efficientdet_lite1/conv84, sigmoid) change_output_activation(efficientdet_lite1/conv93, sigmoid) change_output_activation(efficientdet_lite1/conv102, sigmoid) diff --git a/hailo_model_zoo/cfg/alls/generic/efficientdet_lite2.alls b/hailo_model_zoo/cfg/alls/generic/efficientdet_lite2.alls index a40940a2..17d72d85 100644 --- a/hailo_model_zoo/cfg/alls/generic/efficientdet_lite2.alls +++ b/hailo_model_zoo/cfg/alls/generic/efficientdet_lite2.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.0, 127.0, 127.0], [128.0, 128.0, 128.0]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/efficientdet/efficientdet_lite2/pretrained/2023-04-25/efficientdet_lite2_nms_config.json", ssd, engine=cpu) +nms_postprocess("../../postprocess_config/efficientdet_lite2_nms_config.json", ssd, engine=cpu) change_output_activation(efficientdet_lite2/conv92, sigmoid) change_output_activation(efficientdet_lite2/conv101, sigmoid) change_output_activation(efficientdet_lite2/conv110, sigmoid) diff --git a/hailo_model_zoo/cfg/alls/generic/fast_sam_s.alls b/hailo_model_zoo/cfg/alls/generic/fast_sam_s.alls new file mode 100644 index 00000000..f8638a8d --- /dev/null +++ b/hailo_model_zoo/cfg/alls/generic/fast_sam_s.alls @@ -0,0 +1,15 @@ +normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) +change_output_activation(conv46, sigmoid) +change_output_activation(conv62, sigmoid) +change_output_activation(conv75, sigmoid) + +quantization_param(fast_sam_s/conv37, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv38, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv39, precision_mode=a16_w16) +quantization_param(fast_sam_s/deconv1, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv41, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv42, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv44, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv45, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv47, precision_mode=a16_w16) +quantization_param(fast_sam_s/conv48, precision_mode=a16_w16) diff --git a/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg.alls b/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg.alls index db0d6e87..dded77ca 100644 --- a/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg.alls +++ b/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg.alls @@ -1,3 +1,4 @@ normalization1 = normalization([103.53, 116.28, 123.675], [57.375, 57.12, 58.395]) model_optimization_config(calibration, batch_size=4, calibset_size=64) input_conversion = input_conversion(bgr_to_rgb) +nms_postprocess("../../postprocess_config/nanodet_nms_config.json", meta_arch=yolov8, engine=cpu) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a12.alls b/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a12.alls index 1e7a7465..d8211e39 100644 --- a/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a12.alls +++ b/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a12.alls @@ -1,2 +1,5 @@ input_conversion = input_conversion(bgr_to_rgb) normalization1 = normalization([0, 0, 0], [1, 1, 1]) +nms_postprocess("../../postprocess_config/nms_config_nanodet_repvgg_a12.json", meta_arch=yolox, engine=cpu) +model_optimization_flavor(compression_level=0) +post_quantization_optimization(finetune, policy=enabled, dataset_size=1024, epochs=8, learning_rate=0.0001) diff --git a/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a1_640.alls b/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a1_640.alls index 6a2db652..2aa69c06 100644 --- a/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a1_640.alls +++ b/hailo_model_zoo/cfg/alls/generic/nanodet_repvgg_a1_640.alls @@ -1,2 +1,3 @@ normalization1 = normalization([103.53, 116.28, 123.675], [57.375, 57.12, 58.395]) input_conversion = input_conversion(bgr_to_rgb) +nms_postprocess("../../postprocess_config/nanodet_repvgg_a1_640_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1.alls b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1.alls index fda9d997..4bef85b1 100644 --- a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1.alls +++ b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v1/pretrained/2023-07-18/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../postprocess_config/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) quantization_param({conv*}, bias_mode=single_scale_decomposition) post_quantization_optimization(finetune, policy=enabled, dataset_size=4000, epochs=8, learning_rate=0.0001, loss_layer_names=[bbox_decoder13, conv14, bbox_decoder17, conv18, bbox_decoder21, conv22, bbox_decoder25, conv26, bbox_decoder29, conv30, bbox_decoder33, conv34, conv32, conv24, conv28, conv12, conv20, conv16], diff --git a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_no_alls.alls b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_no_alls.alls index c95b225c..c2f33f34 100644 --- a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_no_alls.alls +++ b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_no_alls.alls @@ -1,3 +1,3 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v1/pretrained/2021-07-11/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../postprocess_config/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) post_quantization_optimization(finetune, policy=enabled, loss_factors=[0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], dataset_size=4000, epochs=4, learning_rate=0.0001, loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2], loss_layer_names=[bbox_decoder13, conv14, bbox_decoder17, conv18, bbox_decoder21, conv22, bbox_decoder25, conv26, bbox_decoder29, conv30, bbox_decoder33, conv34, conv32, conv24, conv28, conv12, conv20, conv16]) diff --git a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_visdrone.alls b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_visdrone.alls index 25c87d1a..da0b400b 100644 --- a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_visdrone.alls +++ b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v1_visdrone.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-Visdrone/ssd/ssd_mobilenet_v1_visdrone/pretrained/2023-07-18/mobilenet_ssd_nms_visdrone_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json", ssd, engine=nn_core) model_optimization_config(calibration, batch_size=8, calibset_size=32) quantization_param({conv*}, bias_mode=single_scale_decomposition) post_quantization_optimization(finetune, policy=disabled) diff --git a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v2.alls b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v2.alls index 95a221a2..c5308a34 100644 --- a/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v2.alls +++ b/hailo_model_zoo/cfg/alls/generic/ssd_mobilenet_v2.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v2/pretrained/2023-03-16/mobilenet_v2_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../postprocess_config/mobilenet_v2_ssd_nms_postprocess_config.json", ssd, engine=nn_core) model_optimization_config(calibration, batch_size=8, calibset_size=32) post_quantization_optimization(finetune, policy=disabled) post_quantization_optimization(bias_correction, policy=enabled) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m.alls index f0c9eee1..abe7546c 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m_spp/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_spp_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) quantization_param(conv46, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m6_6.1.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m6_6.1.alls index e4d7eb59..8ff44d3b 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m6_6.1.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m6_6.1.alls @@ -1,5 +1,5 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/ models_files/ObjectDetection/Detection-COCO/yolo/yolov5m6_6.1/pretrained/2023-04-25/yolov5m6_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m6_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=1, calibset_size=64) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel, l2rel], loss_layer_names=[conv107, conv99, conv90, conv81]) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_6.1.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_6.1.alls index 9e33d5ac..191667f4 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_6.1.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_6.1.alls @@ -2,5 +2,5 @@ yolov5m_6_1/normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0 change_output_activation(yolov5m_6_1/conv82, sigmoid) change_output_activation(yolov5m_6_1/conv74, sigmoid) change_output_activation(yolov5m_6_1/conv65, sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m_6.1/pretrained/2023-04-25/yolov5m_6.1_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_6.1_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles.alls index 0162b727..f7cb6c4a 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles.alls @@ -12,4 +12,4 @@ quantization_param(conv91, precision_mode=a8_w4) pre_quantization_optimization(weights_clipping, layers=[conv75], mode=manual, clipping_values=[-32.52, 32.52]) pre_quantization_optimization(weights_clipping, layers=[conv85], mode=manual, clipping_values=[-26.61, 26.61]) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv94, conv85, conv75]) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_nv12.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_nv12.alls index 68f5f030..d15be0ec 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_nv12.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_nv12.alls @@ -14,4 +14,4 @@ pre_quantization_optimization(weights_clipping, layers=[conv85], mode=manual, cl post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv94, conv85, conv75]) yuv_to_rgb1 = input_conversion(yuv_to_rgb) reshape = input_conversion(input_layer1, nv12_to_hailo_yuv, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_yuy2.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_yuy2.alls index 05f530be..1da00853 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_yuy2.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_vehicles_yuy2.alls @@ -14,4 +14,4 @@ pre_quantization_optimization(weights_clipping, layers=[conv85], mode=manual, cl post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv94, conv85, conv75]) yuv_to_rgb1 = input_conversion(yuv_to_rgb) reshape_yuy2 = input_conversion(input_layer1, yuy2_to_hailo_yuv, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp.alls index 86caadf5..43cbc425 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) quantization_param(conv46, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_60p.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_60p.alls index 241ea388..1b2d461e 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_60p.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_60p.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) post_quantization_optimization(bias_correction, policy=disabled) post_quantization_optimization(finetune, policy=enabled, meta_arch=yolo, dataset_size=4096, batch_size=4, epochs=5, learning_rate=0.0002, loss_layer_names=[conv92, conv93, conv82, conv84, conv72, conv74], loss_types=[l2rel, l2rel, l2rel, l2rel, l2rel, l2rel]) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_yuy2.alls b/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_yuy2.alls index 381e090e..60ba2823 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_yuy2.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5m_wo_spp_yuy2.alls @@ -3,7 +3,7 @@ change_output_activation(sigmoid) yuv_to_rgb1 = input_conversion(input_layer1, yuv_to_rgb) resize_input1 = resize(input_layer1, resize_shapes=[720, 1280]) reshape_yuy2 = input_conversion(input_layer1, yuy2_to_hailo_yuv, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5s.alls b/hailo_model_zoo/cfg/alls/generic/yolov5s.alls index ef3b2a42..89133f6c 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5s.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5s.alls @@ -1,4 +1,4 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5s_spp/pretrained/2023-04-25/yolov5s_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5s_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5s_c3tr.alls b/hailo_model_zoo/cfg/alls/generic/yolov5s_c3tr.alls index 0519e0e7..d05a95ff 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5s_c3tr.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5s_c3tr.alls @@ -1,4 +1,4 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5s_c3tr/pretrained/2023-04-25/yolov5s_c3tr_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5s_c3tr_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5s_personface.alls b/hailo_model_zoo/cfg/alls/generic/yolov5s_personface.alls index 7549f150..a0afb5c0 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5s_personface.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5s_personface.alls @@ -1,4 +1,4 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/MCPReID/personface_detector/yolov5s_personface/2023-04-25/yolov5s_personface.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5s_personface.json", yolov5, engine=cpu) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv70, conv63, conv55]) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_nv12.alls b/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_nv12.alls index ae2e6046..d3bbd5ca 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_nv12.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_nv12.alls @@ -3,4 +3,4 @@ change_output_activation(sigmoid) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv70, conv63, conv55]) yuv_to_rgb1 = input_conversion(yuv_to_rgb) reshape = input_conversion(input_layer1, nv12_to_hailo_yuv, emulator_support = True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/MCPReID/personface_detector/yolov5s_personface/2023-04-25/yolov5s_personface.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5s_personface.json", yolov5, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_rgbx.alls b/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_rgbx.alls index 9d959fa7..8281aa68 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_rgbx.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5s_personface_rgbx.alls @@ -2,4 +2,4 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv70, conv63, conv55]) reshape_rgb = input_conversion(input_layer1, tf_rgbx_to_hailo_rgb, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/MCPReID/personface_detector/yolov5s_personface/2023-04-25/yolov5s_personface.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5s_personface.json", yolov5, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp.alls b/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp.alls index 366cc2e9..933a004f 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp.alls @@ -1,5 +1,5 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5xs/pretrained/2023-04-25/yolov5xs_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov5xs_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0002, epochs=8, dataset_size=2048) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp_nms_core.alls b/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp_nms_core.alls index ae5059ba..b25952e7 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp_nms_core.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov5xs_wo_spp_nms_core.alls @@ -1,5 +1,5 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5xs/pretrained/2022-05-10/yolov5xs_wo_spp_nms_config.json", yolov5) +nms_postprocess("../../postprocess_config/yolov5xs_wo_spp_nms_config.json", yolov5) model_optimization_config(calibration, batch_size=2, calibset_size=64) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv69, conv61, conv53]) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov6n.alls b/hailo_model_zoo/cfg/alls/generic/yolov6n.alls index 80d63eac..fd72455c 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov6n.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov6n.alls @@ -1,4 +1,3 @@ +nms_postprocess("../../postprocess_config/nms_config_yolov6n.json", yolox, engine=cpu) model_optimization_config(calibration, batch_size=2, calibset_size=64) - post_quantization_optimization(finetune, policy=enabled, loss_factors=[0.125, 2, 0.25, 0.125, 2, 0.25, 0.125, 2, 0.25, 1, 1, 1], dataset_size=4000, epochs=8, learning_rate=1e-5, loss_layer_names=[conv36, conv37, conv38, conv47, conv48, conv49, conv57, conv58, conv59, conv33, conv43, conv54], loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2rel, l2rel, l2rel]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov6n/pretrained/2023-05-31/nms_config_yolov6n.json", yolox, engine=cpu) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/yolov7.alls b/hailo_model_zoo/cfg/alls/generic/yolov7.alls index 9ce7086d..1087fa8a 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov7.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov7.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov7/pretrained/2023-04-25/yolov7_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov7_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=2, calibset_size=512) pre_quantization_optimization(activation_clipping, layers={*}, mode=percentile, clipping_values=[0, 99.99]) pre_quantization_optimization(activation_clipping, layers=[yolov7/maxpool1, yolov7/maxpool2, yolov7/maxpool3, yolov7/maxpool4, yolov7/maxpool5, yolov7/maxpool6, yolov7/maxpool7, yolov7/maxpool8, yolov7/maxpool8], mode=disabled) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov7_tiny.alls b/hailo_model_zoo/cfg/alls/generic/yolov7_tiny.alls index d78471c2..ea3f1fe7 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov7_tiny.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov7_tiny.alls @@ -1,3 +1,3 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov7_tiny/pretrained/2023-04-25/yolov7_tiny_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov7_tiny_nms_config.json", yolov5, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov7e6.alls b/hailo_model_zoo/cfg/alls/generic/yolov7e6.alls index 5be31f09..36761191 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov7e6.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov7e6.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov7e6/pretrained/2023-04-25/yolov7e6_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../postprocess_config/yolov7e6_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=1, calibset_size=64) post_quantization_optimization(finetune, policy=enabled, batch_size=1) model_optimization_config(checker_cfg, policy=disabled) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/yolov8l.alls b/hailo_model_zoo/cfg/alls/generic/yolov8l.alls index 6da896fc..508b3de5 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov8l.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov8l.alls @@ -7,4 +7,4 @@ model_optimization_flavor(compression_level=0) change_output_activation(conv74, sigmoid) change_output_activation(conv89, sigmoid) change_output_activation(conv103, sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov8l/2023-02-02/yolov8l_nms_config.json", meta_arch=yolov8, engine=cpu) +nms_postprocess("../../postprocess_config/yolov8l_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov8m.alls b/hailo_model_zoo/cfg/alls/generic/yolov8m.alls index 9c288800..4e4b7906 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov8m.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov8m.alls @@ -3,5 +3,5 @@ model_optimization_config(calibration, batch_size=2) change_output_activation(conv58, sigmoid) change_output_activation(conv71, sigmoid) change_output_activation(conv83, sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov8m/2023-02-02/yolov8m_nms_config.json", meta_arch=yolov8, engine=cpu) +nms_postprocess("../../postprocess_config/yolov8m_nms_config.json", meta_arch=yolov8, engine=cpu) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.000025) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/yolov8m_pose.alls b/hailo_model_zoo/cfg/alls/generic/yolov8m_pose.alls index 29f39dfa..268ee68e 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov8m_pose.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov8m_pose.alls @@ -5,4 +5,4 @@ change_output_activation(conv60, sigmoid) quantization_param(output_layer3, precision_mode=a16_w16) quantization_param(output_layer6, precision_mode=a16_w16) quantization_param(output_layer9, precision_mode=a16_w16) -post_quantization_optimization(finetune, policy=enabled, epochs=10, learning_rate=0.00015, loss_layer_names=[output_layer1, output_layer2, output_layer3, output_layer4, output_layer5, output_layer6, output_layer7, output_layer8, output_layer9, conv83, conv67, conv51, conv89, conv72, conv58], loss_factors=[1, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 1]) \ No newline at end of file +post_quantization_optimization(finetune, policy=enabled, epochs=10, learning_rate=0.00015, loss_layer_names=[output_layer1, output_layer2, output_layer3, output_layer4, output_layer5, output_layer6, output_layer7, output_layer8, output_layer9, conv83, conv67, conv51, conv89, conv72, conv58], loss_factors=[1, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 1]) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov8n.alls b/hailo_model_zoo/cfg/alls/generic/yolov8n.alls index b142d0ba..3a61b363 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov8n.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov8n.alls @@ -2,4 +2,4 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(conv42, sigmoid) change_output_activation(conv53, sigmoid) change_output_activation(conv63, sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov8n/2023-01-30/yolov8n_nms_config.json", meta_arch=yolov8, engine=cpu) +nms_postprocess("../../postprocess_config/yolov8n_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov8s.alls b/hailo_model_zoo/cfg/alls/generic/yolov8s.alls index 13b5a60a..ba652ccc 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov8s.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov8s.alls @@ -2,4 +2,4 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(conv42, sigmoid) change_output_activation(conv53, sigmoid) change_output_activation(conv63, sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov8s/2023-02-02/yolov8s_nms_config.json", meta_arch=yolov8, engine=cpu) +nms_postprocess("../../postprocess_config/yolov8s_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov8x.alls b/hailo_model_zoo/cfg/alls/generic/yolov8x.alls index 898ba300..b0dfef7c 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolov8x.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolov8x.alls @@ -7,4 +7,4 @@ model_optimization_flavor(compression_level=0) change_output_activation(conv74, sigmoid) change_output_activation(conv89, sigmoid) change_output_activation(conv103, sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov8x/2023-02-02/yolov8x_nms_config.json", meta_arch=yolov8, engine=cpu) +nms_postprocess("../../postprocess_config/yolov8x_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolov9c.alls b/hailo_model_zoo/cfg/alls/generic/yolov9c.alls new file mode 100644 index 00000000..2cd00017 --- /dev/null +++ b/hailo_model_zoo/cfg/alls/generic/yolov9c.alls @@ -0,0 +1,6 @@ +normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) +change_output_activation(conv100, sigmoid) +change_output_activation(conv122, sigmoid) +change_output_activation(conv144, sigmoid) +model_optimization_config(calibration, batch_size=2) +model_optimization_flavor(compression_level=0, optimization_level=0) diff --git a/hailo_model_zoo/cfg/alls/generic/yolox_l_leaky.alls b/hailo_model_zoo/cfg/alls/generic/yolox_l_leaky.alls index 0ece39b5..ca182301 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolox_l_leaky.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolox_l_leaky.alls @@ -13,4 +13,4 @@ pre_quantization_optimization(weights_clipping, layers=[conv111], mode=manual, c post_quantization_optimization(finetune, policy=disabled) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolox_l_leaky/pretrained/2023-05-31/nms_config_yolox_l_leaky.json", yolox, engine=cpu) +nms_postprocess("../../postprocess_config/nms_config_yolox_l_leaky.json", yolox, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/generic/yolox_s_leaky.alls b/hailo_model_zoo/cfg/alls/generic/yolox_s_leaky.alls index 72cd7030..6f89c6d4 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolox_s_leaky.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolox_s_leaky.alls @@ -1,3 +1,3 @@ input_conversion = input_conversion(bgr_to_rgb) post_quantization_optimization(finetune, policy=disabled) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolox_s_leaky/pretrained/2023-05-31/nms_config_yolox_s_leaky.json", yolox, engine=cpu) \ No newline at end of file +nms_postprocess("../../postprocess_config/nms_config_yolox_s_leaky.json", yolox, engine=cpu) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/yolox_s_wide_leaky.alls b/hailo_model_zoo/cfg/alls/generic/yolox_s_wide_leaky.alls index 751802f7..a33e278f 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolox_s_wide_leaky.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolox_s_wide_leaky.alls @@ -1,3 +1,3 @@ # post_quantization_optimization commands post_quantization_optimization(finetune, policy=disabled) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolox_s_wide_leaky/pretrained/2023-05-31/nms_config_yolox_s_wide_leaky.json", yolox, engine=cpu) \ No newline at end of file +nms_postprocess("../../postprocess_config/nms_config_yolox_s_wide_leaky.json", yolox, engine=cpu) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/generic/yolox_tiny.alls b/hailo_model_zoo/cfg/alls/generic/yolox_tiny.alls index bfe56992..cc184584 100644 --- a/hailo_model_zoo/cfg/alls/generic/yolox_tiny.alls +++ b/hailo_model_zoo/cfg/alls/generic/yolox_tiny.alls @@ -1,2 +1,2 @@ normalization1 = normalization([123.675, 116.28, 103.53], [58.395, 57.12, 57.375]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolox/yolox_tiny/pretrained/2023-05-31/nms_config_yolox_tiny.json", yolox, engine=cpu) \ No newline at end of file +nms_postprocess("../../postprocess_config/nms_config_yolox_tiny.json", yolox, engine=cpu) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/hailo15h/base/resnet_v1_50.alls b/hailo_model_zoo/cfg/alls/hailo15h/base/resnet_v1_50.alls new file mode 100644 index 00000000..b33cc5fe --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/base/resnet_v1_50.alls @@ -0,0 +1,14 @@ +# normalization commands +normalization1 = normalization([123.68, 116.78, 103.94], [1.0, 1.0, 1.0]) + +# New quantization params +post_quantization_optimization(bias_correction, policy=enabled) +post_quantization_optimization(finetune, policy=disabled) + +quantization_param({*}, max_elementwise_feed_repeat=1) + +# model_optimization_config commands +model_optimization_config(calibration, batch_size=2, calibset_size=64) + +# allocation params +resources_param(max_utilization=0.9) diff --git a/hailo_model_zoo/cfg/alls/hailo15h/base/ssd_mobilenet_v1.alls b/hailo_model_zoo/cfg/alls/hailo15h/base/ssd_mobilenet_v1.alls new file mode 100644 index 00000000..a26ab5bd --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/base/ssd_mobilenet_v1.alls @@ -0,0 +1,12 @@ +post_quantization_optimization(finetune, policy=enabled, dataset_size=4000, epochs=8, learning_rate=0.0001, + loss_layer_names=[bbox_decoder13, conv14, bbox_decoder17, conv18, bbox_decoder21, conv22, bbox_decoder25, conv26, bbox_decoder29, conv30, bbox_decoder33, conv34, conv32, conv24, conv28, conv12, conv20, conv16], + loss_factors=[0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], + loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2]) +model_optimization_flavor(compression_level=0) + +performance_param(compiler_optimization_level=max) + +# normalization commands +normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) + +nms_postprocess(meta_arch=ssd) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/hailo15h/performance/deeplab_v3_mobilenet_v2.alls b/hailo_model_zoo/cfg/alls/hailo15h/performance/deeplab_v3_mobilenet_v2.alls new file mode 100644 index 00000000..bc4f883c --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/performance/deeplab_v3_mobilenet_v2.alls @@ -0,0 +1,6 @@ +normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) +model_optimization_config(calibration, batch_size=1, calibset_size=64) +pre_quantization_optimization(equalization, policy=disabled) +post_quantization_optimization(finetune, policy=disabled) +post_quantization_optimization(bias_correction, policy=enabled) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo15h/performance/mobilenet_v2_1.0.alls b/hailo_model_zoo/cfg/alls/hailo15h/performance/mobilenet_v2_1.0.alls new file mode 100644 index 00000000..6585a1ef --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/performance/mobilenet_v2_1.0.alls @@ -0,0 +1,4 @@ +normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) +post_quantization_optimization(finetune, policy=disabled) +post_quantization_optimization(bias_correction, policy=enabled) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo15h/performance/regnetx_800mf.alls b/hailo_model_zoo/cfg/alls/hailo15h/performance/regnetx_800mf.alls new file mode 100644 index 00000000..ed2329f3 --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/performance/regnetx_800mf.alls @@ -0,0 +1,2 @@ +normalization1 = normalization([123.675, 116.28, 103.53], [58.395, 57.12, 57.375]) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo15h/performance/ssd_mobilenet_v1.alls b/hailo_model_zoo/cfg/alls/hailo15h/performance/ssd_mobilenet_v1.alls new file mode 100644 index 00000000..21ea8370 --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/performance/ssd_mobilenet_v1.alls @@ -0,0 +1,8 @@ +normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) +nms_postprocess("../../../postprocess_config/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +quantization_param({conv*}, bias_mode=single_scale_decomposition) +post_quantization_optimization(finetune, policy=enabled, dataset_size=4000, epochs=8, learning_rate=0.0001, + loss_layer_names=[bbox_decoder13, conv14, bbox_decoder17, conv18, bbox_decoder21, conv22, bbox_decoder25, conv26, bbox_decoder29, conv30, bbox_decoder33, conv34, conv32, conv24, conv28, conv12, conv20, conv16], + loss_factors=[0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], + loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2]) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo15h/performance/yolov5m_wo_spp_60p.alls b/hailo_model_zoo/cfg/alls/hailo15h/performance/yolov5m_wo_spp_60p.alls new file mode 100644 index 00000000..471049cb --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo15h/performance/yolov5m_wo_spp_60p.alls @@ -0,0 +1,31 @@ +normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) +change_output_activation(sigmoid) +nms_postprocess("../../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) +model_optimization_config(calibration, batch_size=4, calibset_size=64) +post_quantization_optimization(bias_correction, policy=disabled) +post_quantization_optimization(finetune, policy=enabled, meta_arch=yolo, dataset_size=4096, batch_size=4, epochs=5, learning_rate=0.0002, loss_layer_names=[conv92, conv93, conv82, conv84, conv72, conv74], loss_types=[l2rel, l2rel, l2rel, l2rel, l2rel, l2rel]) +quantization_param(conv28, precision_mode=a8_w4) +quantization_param(conv32, precision_mode=a8_w4) +quantization_param(conv34, precision_mode=a8_w4) +quantization_param(conv36, precision_mode=a8_w4) +quantization_param(conv38, precision_mode=a8_w4) +quantization_param(conv40, precision_mode=a8_w4) +quantization_param(conv42, precision_mode=a8_w4) +quantization_param(conv45, precision_mode=a8_w4) +quantization_param(conv46, precision_mode=a8_w4) +quantization_param(conv50, precision_mode=a8_w4) +quantization_param(conv52, precision_mode=a8_w4) +quantization_param(conv54, precision_mode=a8_w4) +quantization_param(conv55, precision_mode=a8_w4) +quantization_param(conv59, precision_mode=a8_w4) +quantization_param(conv61, precision_mode=a8_w4) +quantization_param(conv73, precision_mode=a8_w4) +quantization_param(conv78, precision_mode=a8_w4) +quantization_param(conv80, precision_mode=a8_w4) +quantization_param(conv83, precision_mode=a8_w4) +quantization_param(conv85, precision_mode=a8_w4) +quantization_param(conv86, precision_mode=a8_w4) +quantization_param(conv88, precision_mode=a8_w4) +quantization_param(conv90, precision_mode=a8_w4) +quantization_param(conv92, precision_mode=a8_w4) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg.alls b/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg.alls index f4c47c06..95342d25 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg.alls @@ -1,5 +1,6 @@ normalization1 = normalization([103.53, 116.28, 123.675], [57.375, 57.12, 58.395]) model_optimization_config(calibration, batch_size=4, calibset_size=64) input_conversion = input_conversion(bgr_to_rgb) +nms_postprocess("../../../postprocess_config/nanodet_nms_config.json", meta_arch=yolov8, engine=cpu) allocator_param(merge_min_layer_utilization=0.1) \ No newline at end of file diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg_a1_640.alls b/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg_a1_640.alls index e688183c..9f0eae8b 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg_a1_640.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/nanodet_repvgg_a1_640.alls @@ -1,4 +1,5 @@ normalization1 = normalization([103.53, 116.28, 123.675], [57.375, 57.12, 58.395]) input_conversion = input_conversion(bgr_to_rgb) allocator_param(merge_min_layer_utilization=0.1) +nms_postprocess("../../../postprocess_config/nanodet_repvgg_a1_640_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1.alls b/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1.alls index fda9d997..fd93be49 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v1/pretrained/2023-07-18/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../../postprocess_config/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) quantization_param({conv*}, bias_mode=single_scale_decomposition) post_quantization_optimization(finetune, policy=enabled, dataset_size=4000, epochs=8, learning_rate=0.0001, loss_layer_names=[bbox_decoder13, conv14, bbox_decoder17, conv18, bbox_decoder21, conv22, bbox_decoder25, conv26, bbox_decoder29, conv30, bbox_decoder33, conv34, conv32, conv24, conv28, conv12, conv20, conv16], diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_no_alls.alls b/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_no_alls.alls index f7045b04..79dd5ae2 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_no_alls.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_no_alls.alls @@ -1,4 +1,4 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v1/pretrained/2021-07-11/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../../postprocess_config/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) context_switch_param(mode=disabled) post_quantization_optimization(finetune, policy=enabled, loss_factors=[0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], dataset_size=4000, epochs=4, learning_rate=0.0001, loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2, l2], loss_layer_names=[bbox_decoder13, conv14, bbox_decoder17, conv18, bbox_decoder21, conv22, bbox_decoder25, conv26, bbox_decoder29, conv30, bbox_decoder33, conv34, conv32, conv24, conv28, conv12, conv20, conv16]) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_visdrone.alls b/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_visdrone.alls index 25c87d1a..8f3a8064 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_visdrone.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/ssd_mobilenet_v1_visdrone.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-Visdrone/ssd/ssd_mobilenet_v1_visdrone/pretrained/2023-07-18/mobilenet_ssd_nms_visdrone_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../../postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json", ssd, engine=nn_core) model_optimization_config(calibration, batch_size=8, calibset_size=32) quantization_param({conv*}, bias_mode=single_scale_decomposition) post_quantization_optimization(finetune, policy=disabled) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m.alls b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m.alls index f0c9eee1..07b13523 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m_spp/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_spp_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) quantization_param(conv46, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m_wo_spp_yuy2.alls b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m_wo_spp_yuy2.alls index 38563596..7f000a9f 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m_wo_spp_yuy2.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5m_wo_spp_yuy2.alls @@ -3,7 +3,7 @@ change_output_activation(sigmoid) yuv_to_rgb1 = input_conversion(input_layer1, yuv_to_rgb) resize_input1 = resize(input_layer1, resize_shapes=[720, 1280]) reshape_yuy2 = input_conversion(input_layer1, yuy2_to_hailo_yuv, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5s_personface_nv12.alls b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5s_personface_nv12.alls index f6fbb99a..c96407ba 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5s_personface_nv12.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5s_personface_nv12.alls @@ -3,7 +3,7 @@ change_output_activation(sigmoid) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv70, conv63, conv55]) yuv_to_rgb1 = input_conversion(yuv_to_rgb) reshape = input_conversion(input_layer1, nv12_to_hailo_yuv, emulator_support = True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/MCPReID/personface_detector/yolov5s_personface/2023-04-25/yolov5s_personface.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5s_personface.json", yolov5, engine=cpu) allocator_param(timeout=1000, cluster_timeout=1000, automatic_ddr=disabled) context_switch_param(mode=disabled) performance_param(fps=320) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5xs_wo_spp_nms_core.alls b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5xs_wo_spp_nms_core.alls index bdce2470..b4d26c7d 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/yolov5xs_wo_spp_nms_core.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/yolov5xs_wo_spp_nms_core.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5xs/pretrained/2022-05-10/yolov5xs_wo_spp_nms_config.json", yolov5) +nms_postprocess("../../../postprocess_config/yolov5xs_wo_spp_nms_config.json", yolov5) model_optimization_config(calibration, batch_size=2, calibset_size=64) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv69, conv61, conv53]) performance_param(fps=100) diff --git a/hailo_model_zoo/cfg/alls/hailo8/base/yolov6n.alls b/hailo_model_zoo/cfg/alls/hailo8/base/yolov6n.alls index 74209570..c5b59cb5 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/base/yolov6n.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/base/yolov6n.alls @@ -1,5 +1,3 @@ +nms_postprocess("../../../postprocess_config/nms_config_yolov6n.json", yolox, engine=cpu) model_optimization_config(calibration, batch_size=2, calibset_size=64) - post_quantization_optimization(finetune, policy=enabled, loss_factors=[0.125, 2, 0.25, 0.125, 2, 0.25, 0.125, 2, 0.25, 1, 1, 1], dataset_size=4000, epochs=8, learning_rate=1e-5, loss_layer_names=[conv36, conv37, conv38, conv47, conv48, conv49, conv57, conv58, conv59, conv33, conv43, conv54], loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2rel, l2rel, l2rel]) -context_switch_param(mode=disabled) -performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/deeplab_v3_mobilenet_v2.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/deeplab_v3_mobilenet_v2.alls new file mode 100644 index 00000000..bc4f883c --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/deeplab_v3_mobilenet_v2.alls @@ -0,0 +1,6 @@ +normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) +model_optimization_config(calibration, batch_size=1, calibset_size=64) +pre_quantization_optimization(equalization, policy=disabled) +post_quantization_optimization(finetune, policy=disabled) +post_quantization_optimization(bias_correction, policy=enabled) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_l.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_l.alls new file mode 100644 index 00000000..079fbb64 --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_l.alls @@ -0,0 +1,3 @@ +normalization1 = normalization([127, 127, 127], [128, 128, 128]) +model_optimization_config(calibration) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_m.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_m.alls index 924e67e6..9e0f7068 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_m.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/efficientnet_m.alls @@ -1,3 +1,2 @@ normalization1 = normalization([127, 127, 127], [128, 128, 128]) -allocator_param(automatic_ddr=False) -performance_param(fps=891) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/mobilenet_v2_1.0.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/mobilenet_v2_1.0.alls new file mode 100644 index 00000000..6585a1ef --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/mobilenet_v2_1.0.alls @@ -0,0 +1,4 @@ +normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) +post_quantization_optimization(finetune, policy=disabled) +post_quantization_optimization(bias_correction, policy=enabled) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/resnet_v1_50.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/resnet_v1_50.alls index eb25210a..e974b679 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/resnet_v1_50.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/resnet_v1_50.alls @@ -1,5 +1,13 @@ normalization1 = normalization([123.68, 116.78, 103.94], [1.0, 1.0, 1.0]) quantization_param({conv*}, max_elementwise_feed_repeat=1) post_quantization_optimization(finetune, policy=disabled) -performance_param(fps=1331) +post_quantization_optimization(bias_correction, policy=enabled) +performance_param(optimization_level=max) +model_optimization_config(calibration, batch_size=2, calibset_size=64) +resources_param(max_utilization=0.9) + + + + + diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1.alls index b15ca4f3..d02deb1f 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v1/pretrained/2023-07-18/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../../postprocess_config/mobilenet_ssd_nms_postprocess_config.json", ssd, engine=nn_core) quantization_param({conv*}, bias_mode=single_scale_decomposition) post_quantization_optimization(finetune, policy=enabled, dataset_size=4000, epochs=8, learning_rate=0.0001, @@ -10,4 +10,4 @@ post_quantization_optimization(finetune, policy=enabled, dataset_size=4000, epoc nms1_d0, nms1_d0_output, nms1_d1, nms1_d1_output = defuse(nms1, 2, defuse_type=NMS) performance_param(compiler_optimization_level=max) -allocator_param(enable_lanes=False) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1_visdrone.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1_visdrone.alls index 9feee17e..cfa65d72 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1_visdrone.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/ssd_mobilenet_v1_visdrone.alls @@ -1,5 +1,5 @@ normalization1 = normalization([127.5, 127.5, 127.5], [127.5, 127.5, 127.5]) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-Visdrone/ssd/ssd_mobilenet_v1_visdrone/pretrained/2023-07-18/mobilenet_ssd_nms_visdrone_postprocess_config.json", ssd, engine=nn_core) +nms_postprocess("../../../postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json", ssd, engine=nn_core) model_optimization_config(calibration, batch_size=8, calibset_size=32) ## Allocator Script Version 1.0 quantization_param({conv*}, bias_mode=single_scale_decomposition) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/stdc1.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/stdc1.alls index 7eeccbda..89a0d963 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/stdc1.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/stdc1.alls @@ -5,4 +5,4 @@ post_quantization_optimization(finetune, policy=enabled, dataset_size=512, epoch resize7_d0, resize7_d1, resize7_d2, resize7_dc = defuse(resize7, 3) performance_param(compiler_optimization_level=1) -allocator_param(timeout=25m) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m.alls index 41e98f4d..7018360c 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m_spp/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_spp_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) quantization_param(conv46, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles.alls index f3328dc5..0e2dac7c 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles.alls @@ -12,7 +12,7 @@ quantization_param(conv91, precision_mode=a8_w4) pre_quantization_optimization(weights_clipping, layers=[conv75], mode=manual, clipping_values=[-32.52, 32.52]) pre_quantization_optimization(weights_clipping, layers=[conv85], mode=manual, clipping_values=[-26.61, 26.61]) post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv94, conv85, conv75]) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) allocator_param(timeout=100000, cluster_timeout=100000, automatic_ddr=disabled) allocator_param(merge_min_layer_utilization=0.1) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_nv12.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_nv12.alls index 6a1b2f69..13511564 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_nv12.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_nv12.alls @@ -14,6 +14,6 @@ pre_quantization_optimization(weights_clipping, layers=[conv85], mode=manual, cl post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv94, conv85, conv75]) yuv_to_rgb1 = input_conversion(yuv_to_rgb) reshape = input_conversion(input_layer1, nv12_to_hailo_yuv, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) compilation_param(yolov5m_vehicles_nv12/resize_input1, resize_bilinear_streaming=enabled) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_yuy2.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_yuy2.alls index 48ad90a1..9c39be43 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_yuy2.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_vehicles_yuy2.alls @@ -15,7 +15,7 @@ post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, e yuv_to_rgb1 = input_conversion(yuv_to_rgb) reshape_yuy2 = input_conversion(input_layer1, yuy2_to_hailo_yuv, emulator_support=True) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_vehicles_nms_config.json", yolov5, engine=cpu) allocator_param(timeout=100000, cluster_timeout=100000, automatic_ddr=disabled) allocator_param(merge_min_layer_utilization=0.1) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp.alls index eb7cfc45..14ae861e 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) quantization_param(conv45, precision_mode=a8_w4) quantization_param(conv46, precision_mode=a8_w4) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp_60p.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp_60p.alls index 2ee74ec5..c67b9f15 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp_60p.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5m_wo_spp_60p.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5m_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4, calibset_size=64) post_quantization_optimization(bias_correction, policy=disabled) post_quantization_optimization(finetune, policy=enabled, meta_arch=yolo, dataset_size=4096, batch_size=4, epochs=5, learning_rate=0.0002, loss_layer_names=[conv92, conv93, conv82, conv84, conv72, conv74], loss_types=[l2rel, l2rel, l2rel, l2rel, l2rel, l2rel]) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s.alls index f44c0b3f..4d3eea27 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s.alls @@ -1,5 +1,5 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolov5s_spp/pretrained/2023-04-25/yolov5s_nms_config.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5s_nms_config.json", yolov5, engine=cpu) model_optimization_config(calibration, batch_size=4) performance_param(fps=375) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s_personface.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s_personface.alls index b99f3f62..262f8ef0 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s_personface.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov5s_personface.alls @@ -1,6 +1,6 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(sigmoid) -nms_postprocess("$HMZ_DATA/models_files/HailoNets/MCPReID/personface_detector/yolov5s_personface/2023-04-25/yolov5s_personface.json", yolov5, engine=cpu) +nms_postprocess("../../../postprocess_config/yolov5s_personface.json", yolov5, engine=cpu) # post_quantization_optimization commands post_quantization_optimization(finetune, policy=enabled, learning_rate=0.0001, epochs=4, dataset_size=4000, loss_factors=[1.0, 1.0, 1.0], loss_types=[l2rel, l2rel, l2rel], loss_layer_names=[conv70, conv63, conv55]) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov6n.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov6n.alls new file mode 100644 index 00000000..85835c0f --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov6n.alls @@ -0,0 +1,4 @@ +nms_postprocess("../../../postprocess_config/nms_config_yolov6n.json", yolox, engine=cpu) +model_optimization_config(calibration, batch_size=2, calibset_size=64) +post_quantization_optimization(finetune, policy=enabled, loss_factors=[0.125, 2, 0.25, 0.125, 2, 0.25, 0.125, 2, 0.25, 1, 1, 1], dataset_size=4000, epochs=8, learning_rate=1e-5, loss_layer_names=[conv36, conv37, conv38, conv47, conv48, conv49, conv57, conv58, conv59, conv33, conv43, conv54], loss_types=[l2, l2, l2, l2, l2, l2, l2, l2, l2, l2rel, l2rel, l2rel]) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8n.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8n.alls index 66ba5779..43d79f40 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8n.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8n.alls @@ -2,4 +2,5 @@ normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) change_output_activation(conv42, sigmoid) change_output_activation(conv53, sigmoid) change_output_activation(conv63, sigmoid) -performance_param(optimization_level=max) \ No newline at end of file +performance_param(optimization_level=max) +nms_postprocess("../../../postprocess_config/yolov8n_nms_config.json", meta_arch=yolov8, engine=cpu) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8s.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8s.alls new file mode 100644 index 00000000..185a3060 --- /dev/null +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolov8s.alls @@ -0,0 +1,6 @@ +normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0]) +change_output_activation(conv42, sigmoid) +change_output_activation(conv53, sigmoid) +change_output_activation(conv63, sigmoid) +nms_postprocess("../../../postprocess_config/yolov8s_nms_config.json", meta_arch=yolov8, engine=cpu) +performance_param(optimization_level=max) diff --git a/hailo_model_zoo/cfg/alls/hailo8/performance/yolox_s_leaky.alls b/hailo_model_zoo/cfg/alls/hailo8/performance/yolox_s_leaky.alls index 29ff712d..42b9158a 100644 --- a/hailo_model_zoo/cfg/alls/hailo8/performance/yolox_s_leaky.alls +++ b/hailo_model_zoo/cfg/alls/hailo8/performance/yolox_s_leaky.alls @@ -2,4 +2,4 @@ allocator_param(timeout=1800) input_conversion = input_conversion(bgr_to_rgb) post_quantization_optimization(finetune, policy=disabled) performance_param(fps=250) -nms_postprocess("$HMZ_DATA/models_files/ObjectDetection/Detection-COCO/yolo/yolox_s_leaky/pretrained/2023-05-31/nms_config_yolox_s_leaky.json", yolox, engine=cpu) +nms_postprocess("../../../postprocess_config/nms_config_yolox_s_leaky.json", yolox, engine=cpu) diff --git a/hailo_model_zoo/cfg/base/base.yaml b/hailo_model_zoo/cfg/base/base.yaml index 80893d24..d1b3b924 100644 --- a/hailo_model_zoo/cfg/base/base.yaml +++ b/hailo_model_zoo/cfg/base/base.yaml @@ -82,8 +82,6 @@ evaluation: meta_arch: null data_count_offset: 0 info: - client_or_rnd: r&d - standard_or_customized: standard task: N/A input_shape: N/A output_shape: N/A diff --git a/hailo_model_zoo/cfg/base/efficientdet_lite.yaml b/hailo_model_zoo/cfg/base/efficientdet_lite.yaml index c860e396..45ba79b4 100644 --- a/hailo_model_zoo/cfg/base/efficientdet_lite.yaml +++ b/hailo_model_zoo/cfg/base/efficientdet_lite.yaml @@ -20,6 +20,7 @@ postprocessing: score_threshold: 0.001 nms_iou_thresh: 0.5 meta_arch: efficientdet + hpp: false evaluation: labels_offset: 1 classes: 89 diff --git a/hailo_model_zoo/cfg/base/efficientnet.yaml b/hailo_model_zoo/cfg/base/efficientnet.yaml index 8bdfd6ae..58f002c4 100644 --- a/hailo_model_zoo/cfg/base/efficientnet.yaml +++ b/hailo_model_zoo/cfg/base/efficientnet.yaml @@ -7,7 +7,7 @@ preprocessing: info: source: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet license_url: https://www.apache.org/licenses/LICENSE-2.0 - license_name": Apache-2.0 + license_name: Apache-2.0 parser: normalization_params: normalize_in_net: true diff --git a/hailo_model_zoo/cfg/base/mobilenet.yaml b/hailo_model_zoo/cfg/base/mobilenet.yaml index 7a8bcb68..22137c6d 100644 --- a/hailo_model_zoo/cfg/base/mobilenet.yaml +++ b/hailo_model_zoo/cfg/base/mobilenet.yaml @@ -4,6 +4,8 @@ preprocessing: meta_arch: mobilenet info: source: https://github.com/tensorflow/models/tree/master/research/slim + license_url: https://github.com/tensorflow/models/blob/master/LICENSE + license_name: Apache-2.0 parser: normalization_params: normalize_in_net: true diff --git a/hailo_model_zoo/cfg/base/nanodet.yaml b/hailo_model_zoo/cfg/base/nanodet.yaml index 7c6de4f9..1d43db6f 100644 --- a/hailo_model_zoo/cfg/base/nanodet.yaml +++ b/hailo_model_zoo/cfg/base/nanodet.yaml @@ -20,6 +20,7 @@ postprocessing: - 8 - 16 - 32 + hpp: false info: source: https://github.com/RangiLyu/nanodet parser: diff --git a/hailo_model_zoo/cfg/base/resnet.yaml b/hailo_model_zoo/cfg/base/resnet.yaml index dbb354d0..2df9271c 100644 --- a/hailo_model_zoo/cfg/base/resnet.yaml +++ b/hailo_model_zoo/cfg/base/resnet.yaml @@ -5,7 +5,7 @@ preprocessing: info: source: https://github.com/tensorflow/models/tree/master/research/slim license_url: https://www.apache.org/licenses/LICENSE-2.0 - license_name": Apache-2.0 + license_name: Apache-2.0 parser: normalization_params: normalize_in_net: true diff --git a/hailo_model_zoo/cfg/base/ssd.yaml b/hailo_model_zoo/cfg/base/ssd.yaml index 637fd72d..62c3c00f 100644 --- a/hailo_model_zoo/cfg/base/ssd.yaml +++ b/hailo_model_zoo/cfg/base/ssd.yaml @@ -17,6 +17,7 @@ postprocessing: min_scale: 0.2 scales: [] num_layers: 6 + hpp: false info: source: https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md parser: diff --git a/hailo_model_zoo/cfg/base/vit.yaml b/hailo_model_zoo/cfg/base/vit.yaml index dc8ae787..9f4226c2 100644 --- a/hailo_model_zoo/cfg/base/vit.yaml +++ b/hailo_model_zoo/cfg/base/vit.yaml @@ -13,3 +13,6 @@ parser: - 127.5 - 127.5 - 127.5 +info: + license_url: https://github.com/huggingface/pytorch-image-models/blob/main/LICENSE + license_name: Apache-2.0 diff --git a/hailo_model_zoo/cfg/base/yolo.yaml b/hailo_model_zoo/cfg/base/yolo.yaml index e65fdd22..f7d458f1 100644 --- a/hailo_model_zoo/cfg/base/yolo.yaml +++ b/hailo_model_zoo/cfg/base/yolo.yaml @@ -36,6 +36,7 @@ postprocessing: - 198 - 373 - 326 + hpp: false info: source: https://github.com/ultralytics/yolov5/releases/tag/v2.0 parser: diff --git a/hailo_model_zoo/cfg/base/yolox.yaml b/hailo_model_zoo/cfg/base/yolox.yaml index dd384469..2ed021f6 100644 --- a/hailo_model_zoo/cfg/base/yolox.yaml +++ b/hailo_model_zoo/cfg/base/yolox.yaml @@ -11,6 +11,7 @@ postprocessing: nms_iou_thresh: 0.65 score_threshold: 0.01 meta_arch: yolox + hpp: false anchors: strides: - 32 diff --git a/hailo_model_zoo/cfg/networks/centernet_resnet_v1_18_postprocess.yaml b/hailo_model_zoo/cfg/networks/centernet_resnet_v1_18_postprocess.yaml index f7bfe442..34d233f4 100644 --- a/hailo_model_zoo/cfg/networks/centernet_resnet_v1_18_postprocess.yaml +++ b/hailo_model_zoo/cfg/networks/centernet_resnet_v1_18_postprocess.yaml @@ -22,7 +22,7 @@ parser: info: task: object detection input_shape: 512x512x3 - output_shape: 128x128x80, 128x128x2, 128x128x2 + output_shape: 128x128x2, 128x128x2, 128x128x80 operations: 31.21G parameters: 14.22M framework: gluoncv diff --git a/hailo_model_zoo/cfg/networks/centernet_resnet_v1_50_postprocess.yaml b/hailo_model_zoo/cfg/networks/centernet_resnet_v1_50_postprocess.yaml index 58d1cc85..634d7eeb 100644 --- a/hailo_model_zoo/cfg/networks/centernet_resnet_v1_50_postprocess.yaml +++ b/hailo_model_zoo/cfg/networks/centernet_resnet_v1_50_postprocess.yaml @@ -25,7 +25,7 @@ parser: info: task: object detection input_shape: 512x512x3 - output_shape: 128x128x80, 128x128x2, 128x128x2 + output_shape: 128x128x2, 128x128x2, 128x128x80 operations: 56.92G parameters: 30.07M framework: gluoncv diff --git a/hailo_model_zoo/cfg/networks/detr_resnet_v1_18_bn.yaml b/hailo_model_zoo/cfg/networks/detr_resnet_v1_18_bn.yaml index 36752576..1d319c14 100644 --- a/hailo_model_zoo/cfg/networks/detr_resnet_v1_18_bn.yaml +++ b/hailo_model_zoo/cfg/networks/detr_resnet_v1_18_bn.yaml @@ -33,8 +33,8 @@ parser: info: task: object detection input_shape: 800x800x3 - output_shape: 100x4, 100x92 - operations: 58.97G + output_shape: 1x100x92, 1x100x4 + operations: 61.87G parameters: 32.42M framework: pytorch training_data: coco train2017 diff --git a/hailo_model_zoo/cfg/networks/dncnn3.yaml b/hailo_model_zoo/cfg/networks/dncnn3.yaml index 9b2ba06b..40ee0af2 100644 --- a/hailo_model_zoo/cfg/networks/dncnn3.yaml +++ b/hailo_model_zoo/cfg/networks/dncnn3.yaml @@ -32,4 +32,5 @@ info: full_precision_result: 31.46 validation_data: BSD68 source: https://github.com/cszn/KAIR - license: https://github.com/cszn/KAIR/blob/master/LICENSE + license_url: https://github.com/cszn/KAIR/blob/master/LICENSE + license_name: MIT diff --git a/hailo_model_zoo/cfg/networks/dncnn_color_blind.yaml b/hailo_model_zoo/cfg/networks/dncnn_color_blind.yaml index 0b8672d0..45bf133a 100644 --- a/hailo_model_zoo/cfg/networks/dncnn_color_blind.yaml +++ b/hailo_model_zoo/cfg/networks/dncnn_color_blind.yaml @@ -36,4 +36,5 @@ info: full_precision_result: 33.87 validation_data: CBSD68 source: https://github.com/cszn/KAIR - license: https://github.com/cszn/KAIR/blob/master/LICENSE + license_url: https://github.com/cszn/KAIR/blob/master/LICENSE + license_name: MIT diff --git a/hailo_model_zoo/cfg/networks/efficientdet_lite0.yaml b/hailo_model_zoo/cfg/networks/efficientdet_lite0.yaml index bb6a6db9..0724a94c 100644 --- a/hailo_model_zoo/cfg/networks/efficientdet_lite0.yaml +++ b/hailo_model_zoo/cfg/networks/efficientdet_lite0.yaml @@ -29,13 +29,11 @@ postprocessing: device_pre_post_layers: nms: true sigmoid: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/efficientdet/efficientdet_lite0/pretrained/2023-04-25/efficientdet_lite0_nms_config.json hpp: true info: task: object detection input_shape: 320x320x3 - output_shape: 40x40x36, 40x40x810, 20x20x36, 20x20x810, 10x10x36, 10x10x810, 5x5x36, - 5x5x810, 3x3x36, 3x3x810 + output_shape: 89x5x100 operations: 1.94G parameters: 3.56M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/efficientdet_lite1.yaml b/hailo_model_zoo/cfg/networks/efficientdet_lite1.yaml index e6365aec..a7db9892 100644 --- a/hailo_model_zoo/cfg/networks/efficientdet_lite1.yaml +++ b/hailo_model_zoo/cfg/networks/efficientdet_lite1.yaml @@ -29,13 +29,11 @@ postprocessing: device_pre_post_layers: nms: true sigmoid: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/efficientdet/efficientdet_lite1/pretrained/2023-04-25/efficientdet_lite1_nms_config.json hpp: true info: task: object detection input_shape: 384x384x3 - output_shape: 48x48x36, 48x48x810, 24x24x36, 24x24x810, 12x12x36, 12x12x810, 6x6x36, - 6x6x810, 3x3x36, 3x3x810 + output_shape: 89x5x100 operations: 4G parameters: 4.73M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/efficientdet_lite2.yaml b/hailo_model_zoo/cfg/networks/efficientdet_lite2.yaml index 18cc8250..9fa38298 100644 --- a/hailo_model_zoo/cfg/networks/efficientdet_lite2.yaml +++ b/hailo_model_zoo/cfg/networks/efficientdet_lite2.yaml @@ -29,13 +29,11 @@ postprocessing: device_pre_post_layers: nms: true sigmoid: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/efficientdet/efficientdet_lite2/pretrained/2023-04-25/efficientdet_lite2_nms_config.json hpp: true info: task: object detection input_shape: 448x448x3 - output_shape: 56x56x36, 56x56x810, 28x28x36, 28x28x810, 14x14x36, 14x14x810, 7x7x36, - 7x7x810, 4x4x36, 4x4x810 + output_shape: 89x5x100 operations: 6.84G parameters: 5.93M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/efficientnet_l.yaml b/hailo_model_zoo/cfg/networks/efficientnet_l.yaml index a211f6e2..d623c43c 100644 --- a/hailo_model_zoo/cfg/networks/efficientnet_l.yaml +++ b/hailo_model_zoo/cfg/networks/efficientnet_l.yaml @@ -10,7 +10,7 @@ paths: info: task: classification input_shape: 300x300x3 - output_shape: 1x1x1001 + output_shape: '1001' operations: 19.4G parameters: 10.55M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18.yaml b/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18.yaml index 5400e1c7..f46595f3 100644 --- a/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18.yaml +++ b/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18.yaml @@ -27,7 +27,7 @@ preprocessing: info: task: face attribute input_shape: 218x178x3 - output_shape: 1x80 + output_shape: '80' operations: 3G parameters: 11.74M framework: pytorch @@ -36,3 +36,5 @@ info: eval_metric: Mean Accuracy full_precision_result: 81.188 source: https://github.com/d-li14/face-attribute-prediction + license_url: https://github.com/d-li14/face-attribute-prediction/blob/master/LICENSE + license_name: MIT diff --git a/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_nv12.yaml b/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_nv12.yaml index cac70f2b..b80293c4 100644 --- a/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_nv12.yaml +++ b/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_nv12.yaml @@ -11,3 +11,4 @@ paths: info: task: pipeline input_shape: 109x178x3 + output_shape: '80' diff --git a/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_rgbx.yaml b/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_rgbx.yaml index d0b36c93..95a0b0c3 100644 --- a/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_rgbx.yaml +++ b/hailo_model_zoo/cfg/networks/face_attr_resnet_v1_18_rgbx.yaml @@ -10,3 +10,4 @@ hn_editor: info: task: pipeline input_shape: 218x178x4 + output_shape: '80' diff --git a/hailo_model_zoo/cfg/networks/fast_depth.yaml b/hailo_model_zoo/cfg/networks/fast_depth.yaml index be5b6a82..4d279de4 100644 --- a/hailo_model_zoo/cfg/networks/fast_depth.yaml +++ b/hailo_model_zoo/cfg/networks/fast_depth.yaml @@ -30,7 +30,7 @@ parser: - 255 - 255 info: - task: Depth Estimation + task: depth estimation input_shape: 224x224x3 output_shape: 224x224x1 operations: 0.74G @@ -39,3 +39,5 @@ info: eval_metric: RMSE source: https://github.com/dwofk/fast-depth full_precision_result: 0.604 + license_url: https://github.com/dwofk/fast-depth/blob/master/LICENSE + license_name: MIT diff --git a/hailo_model_zoo/cfg/networks/fast_depth_nv12_fhd.yaml b/hailo_model_zoo/cfg/networks/fast_depth_nv12_fhd.yaml index 3eb78f9f..c0af14d0 100644 --- a/hailo_model_zoo/cfg/networks/fast_depth_nv12_fhd.yaml +++ b/hailo_model_zoo/cfg/networks/fast_depth_nv12_fhd.yaml @@ -47,3 +47,5 @@ info: eval_metric: RMSE source: https://github.com/dwofk/fast-depth full_precision_result: 0.61 + license_url: https://github.com/dwofk/fast-depth/blob/master/LICENSE + license_name: MIT diff --git a/hailo_model_zoo/cfg/networks/fast_sam_s.yaml b/hailo_model_zoo/cfg/networks/fast_sam_s.yaml new file mode 100644 index 00000000..b2a1c340 --- /dev/null +++ b/hailo_model_zoo/cfg/networks/fast_sam_s.yaml @@ -0,0 +1,44 @@ +base: +- base/yolov8_seg.yaml +postprocessing: + score_threshold: 0.25 +evaluation: + labels_offset: 0 + classes: 1 + labels_map: + - 0 +network: + network_name: fast_sam_s +paths: + alls_script: fast_sam_s.alls + network_path: + - models_files/SegmentAnything/coco/fast_sam/fast_sam_s/pretrained/2023-03-06/fast_sam_s.onnx + url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/SegmentAnything/coco/fast_sam/fast_sam_s/pretrained/2023-03-06/fast_sam_s.zip +parser: + nodes: + - null + - - Conv_261 + - Conv_268 + - Conv_216 + - Conv_246 + - Conv_253 + - Conv_205 + - Conv_231 + - Conv_238 + - Conv_194 + - Mul_184 +info: + task: zero-shot instance segmentation + input_shape: 640x640x3 + output_shape: 20x20x64, 20x20x1, 20x20x32, 40x40x64, 40x40x1, 40x40x32, 80x80x64, + 80x80x1, 80x80x32, 160x160x32 + operations: 42.4G + parameters: 11.1M + framework: pytorch + training_data: SA-1B + validation_data: coco instances val2017 + eval_metric: AR1000 + full_precision_result: 40.1 + source: https://github.com/CASIA-IVA-Lab/FastSAM + license_url: https://github.com/CASIA-IVA-Lab/FastSAM/blob/main/LICENSE + license_name: Apache-2.0 diff --git a/hailo_model_zoo/cfg/networks/hand_landmark_lite.yaml b/hailo_model_zoo/cfg/networks/hand_landmark_lite.yaml index 7652f5fe..036e2e91 100644 --- a/hailo_model_zoo/cfg/networks/hand_landmark_lite.yaml +++ b/hailo_model_zoo/cfg/networks/hand_landmark_lite.yaml @@ -29,13 +29,12 @@ network: info: task: hand landmark detection input_shape: 224x224x3 - output_shape: 1x1, 1x1, 1x63, 1x63 + output_shape: 63, 63, 1, 1 operations: 0.3G parameters: 1.01M framework: tflite training_data: internal validation_data: internal - validation_data_set: N/A eval_metric: MNAE full_precision_result: 12.02 source: https://github.com/google/mediapipe diff --git a/hailo_model_zoo/cfg/networks/hardnet39ds.yaml b/hailo_model_zoo/cfg/networks/hardnet39ds.yaml index 3498c346..3e60a7d6 100644 --- a/hailo_model_zoo/cfg/networks/hardnet39ds.yaml +++ b/hailo_model_zoo/cfg/networks/hardnet39ds.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 0.86G parameters: 3.48M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/hardnet68.yaml b/hailo_model_zoo/cfg/networks/hardnet68.yaml index 972de5b4..c64a8840 100644 --- a/hailo_model_zoo/cfg/networks/hardnet68.yaml +++ b/hailo_model_zoo/cfg/networks/hardnet68.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 8.5G parameters: 17.56M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/inception_v1.yaml b/hailo_model_zoo/cfg/networks/inception_v1.yaml index 56a0def5..ef19fda4 100644 --- a/hailo_model_zoo/cfg/networks/inception_v1.yaml +++ b/hailo_model_zoo/cfg/networks/inception_v1.yaml @@ -10,7 +10,7 @@ paths: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1001 + output_shape: '1001' operations: 3G parameters: 6.62M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/lightface_slim_nv12_fhd.yaml b/hailo_model_zoo/cfg/networks/lightface_slim_nv12_fhd.yaml index f60a61ba..1b341131 100644 --- a/hailo_model_zoo/cfg/networks/lightface_slim_nv12_fhd.yaml +++ b/hailo_model_zoo/cfg/networks/lightface_slim_nv12_fhd.yaml @@ -20,4 +20,4 @@ preprocessing: info: task: pipeline_hailo15 input_shape: 540x1920x3 - full_percision_result: 39.043 + full_precision_result: 39.043 diff --git a/hailo_model_zoo/cfg/networks/mobilenet_v1.yaml b/hailo_model_zoo/cfg/networks/mobilenet_v1.yaml index 40830941..e79eee6d 100644 --- a/hailo_model_zoo/cfg/networks/mobilenet_v1.yaml +++ b/hailo_model_zoo/cfg/networks/mobilenet_v1.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1001 + output_shape: '1001' operations: 1.14G parameters: 4.22M framework: tensorflow @@ -23,5 +23,3 @@ info: eval_metric: Accuracy (top1) full_precision_result: 70.97 source: https://github.com/tensorflow/models/tree/v1.13.0/research/slim - license_url: https://github.com/tensorflow/models/blob/v1.13.0/LICENSE - license_name: Apache-2.0 diff --git a/hailo_model_zoo/cfg/networks/mobilenet_v2_1.0.yaml b/hailo_model_zoo/cfg/networks/mobilenet_v2_1.0.yaml index bcc61625..f88b863e 100644 --- a/hailo_model_zoo/cfg/networks/mobilenet_v2_1.0.yaml +++ b/hailo_model_zoo/cfg/networks/mobilenet_v2_1.0.yaml @@ -25,5 +25,3 @@ info: eval_metric: Accuracy (top1) full_precision_result: 71.78 source: https://github.com/tensorflow/models/tree/v1.13.0/research/slim - license_url: https://github.com/tensorflow/models/blob/v1.13.0/LICENSE - license_name: Apache-2.0 diff --git a/hailo_model_zoo/cfg/networks/mobilenet_v2_1.4.yaml b/hailo_model_zoo/cfg/networks/mobilenet_v2_1.4.yaml index 72aa3401..14367d76 100644 --- a/hailo_model_zoo/cfg/networks/mobilenet_v2_1.4.yaml +++ b/hailo_model_zoo/cfg/networks/mobilenet_v2_1.4.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1001 + output_shape: '1001' operations: 1.18G parameters: 6.09M framework: tensorflow @@ -23,5 +23,3 @@ info: eval_metric: Accuracy (top1) full_precision_result: 74.18 source: https://github.com/tensorflow/models/tree/v1.13.0/research/slim - license_url: https://github.com/tensorflow/models/blob/v1.13.0/LICENSE - license_name: Apache-2.0 diff --git a/hailo_model_zoo/cfg/networks/nanodet_repvgg.yaml b/hailo_model_zoo/cfg/networks/nanodet_repvgg.yaml index e059a8ba..b7210ce7 100644 --- a/hailo_model_zoo/cfg/networks/nanodet_repvgg.yaml +++ b/hailo_model_zoo/cfg/networks/nanodet_repvgg.yaml @@ -8,20 +8,25 @@ preprocessing: network: network_name: nanodet_repvgg paths: - url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg/pretrained/2022-02-07/nanodet.zip + url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg/pretrained/2024-11-01/nanodet.zip alls_script: nanodet_repvgg.alls network_path: - - models_files/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg/pretrained/2022-02-07/nanodet.onnx + - models_files/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg/pretrained/2024-11-01/nanodet.onnx parser: nodes: - null - - Conv_65 - Conv_81 - Conv_97 +postprocessing: + device_pre_post_layers: + nms: true + meta_arch: yolov8 + hpp: true info: task: object detection input_shape: 416x416x3 - output_shape: 52x52x124, 26x26x124, 13x13x124 + output_shape: 80x5x100 operations: 11.28G parameters: 6.74M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/nanodet_repvgg_a12.yaml b/hailo_model_zoo/cfg/networks/nanodet_repvgg_a12.yaml index ecba4a2e..c34585f3 100644 --- a/hailo_model_zoo/cfg/networks/nanodet_repvgg_a12.yaml +++ b/hailo_model_zoo/cfg/networks/nanodet_repvgg_a12.yaml @@ -4,9 +4,9 @@ network: network_name: nanodet_repvgg_a12 paths: network_path: - - models_files/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a12/pretrained/2023-05-31/nanodet_repvgg_a12_640x640.onnx + - models_files/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a12/pretrained/2024-01-31/nanodet_repvgg_a12_640x640.onnx alls_script: nanodet_repvgg_a12.alls - url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a12/pretrained/2023-05-31/nanodet_repvgg_a12_640x640.zip + url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a12/pretrained/2024-01-31/nanodet_repvgg_a12_640x640.zip parser: nodes: - null @@ -19,11 +19,15 @@ parser: - Conv_102 - Sigmoid_104 - Sigmoid_105 +postprocessing: + device_pre_post_layers: + nms: true + meta_arch: yolox + hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x4, 80x80x1, 80x80x80, 40x40x4, 40x40x1, 40x40x80, 20x20x4, 20x20x1, - 20x20x80 + output_shape: 80x5x100 operations: 28.23G parameters: 5.13M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/nanodet_repvgg_a1_640.yaml b/hailo_model_zoo/cfg/networks/nanodet_repvgg_a1_640.yaml index 7bf5c984..4ab9dacc 100644 --- a/hailo_model_zoo/cfg/networks/nanodet_repvgg_a1_640.yaml +++ b/hailo_model_zoo/cfg/networks/nanodet_repvgg_a1_640.yaml @@ -5,18 +5,23 @@ network: paths: alls_script: nanodet_repvgg_a1_640.alls network_path: - - models_files/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a1_640/pretrained/2022-07-19/nanodet_repvgg_a1_640.onnx - url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a1_640/pretrained/2022-07-19/nanodet_repvgg_a1_640.zip + - models_files/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a1_640/pretrained/2024-01-25/nanodet_repvgg_a1_640.onnx + url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/nanodet/nanodet_repvgg_a1_640/pretrained/2024-01-25/nanodet_repvgg_a1_640.zip parser: nodes: - null - - Conv_65 - Conv_81 - Conv_97 +postprocessing: + device_pre_post_layers: + nms: true + meta_arch: yolov8 + hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x124, 40x40x124, 20x20x124 + output_shape: 80x5x100 operations: 42.8G parameters: 10.79M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/osnet_x1_0.yaml b/hailo_model_zoo/cfg/networks/osnet_x1_0.yaml index a515e93f..600969bd 100644 --- a/hailo_model_zoo/cfg/networks/osnet_x1_0.yaml +++ b/hailo_model_zoo/cfg/networks/osnet_x1_0.yaml @@ -26,7 +26,7 @@ evaluation: info: task: person re-id input_shape: 256x128x3 - output_shape: 1x512 + output_shape: '512' operations: 1.98G parameters: 2.19M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/person_attr_resnet_v1_18.yaml b/hailo_model_zoo/cfg/networks/person_attr_resnet_v1_18.yaml index 20248197..9f99e92f 100644 --- a/hailo_model_zoo/cfg/networks/person_attr_resnet_v1_18.yaml +++ b/hailo_model_zoo/cfg/networks/person_attr_resnet_v1_18.yaml @@ -33,7 +33,7 @@ postprocessing: info: task: person attribute input_shape: 224x224x3 - output_shape: 1x35 + output_shape: '35' operations: 3.64G parameters: 11.19M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/repvgg_a0_person_reid_512.yaml b/hailo_model_zoo/cfg/networks/repvgg_a0_person_reid_512.yaml index 24257140..993a1246 100644 --- a/hailo_model_zoo/cfg/networks/repvgg_a0_person_reid_512.yaml +++ b/hailo_model_zoo/cfg/networks/repvgg_a0_person_reid_512.yaml @@ -26,7 +26,7 @@ evaluation: info: task: person re-id input_shape: 256x128x3 - output_shape: 1x512 + output_shape: '512' operations: 1.78G parameters: 7.68M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/repvgg_a1.yaml b/hailo_model_zoo/cfg/networks/repvgg_a1.yaml index ee00b94f..eab3d714 100644 --- a/hailo_model_zoo/cfg/networks/repvgg_a1.yaml +++ b/hailo_model_zoo/cfg/networks/repvgg_a1.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 4.7G parameters: 12.79M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/repvgg_a2.yaml b/hailo_model_zoo/cfg/networks/repvgg_a2.yaml index 6951b063..a6e50209 100644 --- a/hailo_model_zoo/cfg/networks/repvgg_a2.yaml +++ b/hailo_model_zoo/cfg/networks/repvgg_a2.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 10.2G parameters: 25.5M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/resmlp12_relu.yaml b/hailo_model_zoo/cfg/networks/resmlp12_relu.yaml index a2c2f36f..9fa4bb5a 100644 --- a/hailo_model_zoo/cfg/networks/resmlp12_relu.yaml +++ b/hailo_model_zoo/cfg/networks/resmlp12_relu.yaml @@ -21,7 +21,7 @@ preprocessing: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 6.04G parameters: 15.77M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/resnet_v1_18.yaml b/hailo_model_zoo/cfg/networks/resnet_v1_18.yaml index 03985d59..f83d2761 100644 --- a/hailo_model_zoo/cfg/networks/resnet_v1_18.yaml +++ b/hailo_model_zoo/cfg/networks/resnet_v1_18.yaml @@ -20,7 +20,7 @@ postprocessing: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 3.64G parameters: 11.68M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/resnet_v1_34.yaml b/hailo_model_zoo/cfg/networks/resnet_v1_34.yaml index 204dd20f..9e736bc1 100644 --- a/hailo_model_zoo/cfg/networks/resnet_v1_34.yaml +++ b/hailo_model_zoo/cfg/networks/resnet_v1_34.yaml @@ -16,7 +16,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 7.34G parameters: 21.79M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/resnet_v1_50.yaml b/hailo_model_zoo/cfg/networks/resnet_v1_50.yaml index f4558659..43bec9f5 100644 --- a/hailo_model_zoo/cfg/networks/resnet_v1_50.yaml +++ b/hailo_model_zoo/cfg/networks/resnet_v1_50.yaml @@ -28,7 +28,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 6.98G parameters: 25.53M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/resnext26_32x4d.yaml b/hailo_model_zoo/cfg/networks/resnext26_32x4d.yaml index 168da386..4e0fa4a4 100644 --- a/hailo_model_zoo/cfg/networks/resnext26_32x4d.yaml +++ b/hailo_model_zoo/cfg/networks/resnext26_32x4d.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 4.96G parameters: 15.37M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/resnext50_32x4d.yaml b/hailo_model_zoo/cfg/networks/resnext50_32x4d.yaml index 75c2c031..01f66f40 100644 --- a/hailo_model_zoo/cfg/networks/resnext50_32x4d.yaml +++ b/hailo_model_zoo/cfg/networks/resnext50_32x4d.yaml @@ -14,7 +14,7 @@ parser: info: task: classification input_shape: 224x224x3 - output_shape: 1x1x1000 + output_shape: '1000' operations: 8.48G parameters: 24.99M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/scdepthv3.yaml b/hailo_model_zoo/cfg/networks/scdepthv3.yaml index a3430162..c1e3452a 100644 --- a/hailo_model_zoo/cfg/networks/scdepthv3.yaml +++ b/hailo_model_zoo/cfg/networks/scdepthv3.yaml @@ -29,7 +29,7 @@ parser: - 57.375 - 57.375 info: - task: Depth Estimation + task: depth estimation input_shape: 256x320x3 output_shape: 256x320x1 operations: 10.7G @@ -38,3 +38,5 @@ info: eval_metric: RMSE source: https://github.com/JiawangBian/sc_depth_pl/ full_precision_result: 0.481 + license_url: https://github.com/JiawangBian/sc_depth_pl/blob/master/LICENSE + license_name: GPL-3.0 diff --git a/hailo_model_zoo/cfg/networks/scrfd_10g.yaml b/hailo_model_zoo/cfg/networks/scrfd_10g.yaml index eb88e1fb..eba0ee01 100644 --- a/hailo_model_zoo/cfg/networks/scrfd_10g.yaml +++ b/hailo_model_zoo/cfg/networks/scrfd_10g.yaml @@ -22,8 +22,8 @@ parser: info: task: face detection input_shape: 640x640x3 - output_shape: 80x80x2, 80x80x8, 80x80x10, 40x40x2, 40x40x8, 40x40x10, 20x20x2, 20x20x8, - 20x20x10 + output_shape: 80x80x8, 80x80x2, 80x80x20, 40x40x8, 40x40x2, 40x40x20, 20x20x8, 20x20x2, + 20x20x20 operations: 26.74G parameters: 4.23M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/scrfd_10g_nv12_fhd.yaml b/hailo_model_zoo/cfg/networks/scrfd_10g_nv12_fhd.yaml index ffce8083..6e7c8b6f 100644 --- a/hailo_model_zoo/cfg/networks/scrfd_10g_nv12_fhd.yaml +++ b/hailo_model_zoo/cfg/networks/scrfd_10g_nv12_fhd.yaml @@ -20,8 +20,8 @@ preprocessing: info: task: pipeline_hailo15 input_shape: 540x1920x3 - output_shape: 80x80x2, 80x80x8, 80x80x10, 40x40x2, 40x40x8, 40x40x10, 20x20x2, 20x20x8, - 20x20x10 + output_shape: 80x80x8, 80x80x2, 80x80x20, 40x40x8, 40x40x2, 40x40x20, 20x20x8, 20x20x2, + 20x20x20 operations: 26.74G parameters: 4.23M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/scrfd_2.5g.yaml b/hailo_model_zoo/cfg/networks/scrfd_2.5g.yaml index 0be9de05..68f651cc 100644 --- a/hailo_model_zoo/cfg/networks/scrfd_2.5g.yaml +++ b/hailo_model_zoo/cfg/networks/scrfd_2.5g.yaml @@ -22,8 +22,8 @@ parser: info: task: face detection input_shape: 640x640x3 - output_shape: 80x80x2, 80x80x8, 80x80x10, 40x40x2, 40x40x8, 40x40x10, 20x20x2, 20x20x8, - 20x20x10 + output_shape: 80x80x8, 80x80x2, 80x80x20, 40x40x8, 40x40x2, 40x40x20, 20x20x8, 20x20x2, + 20x20x20 operations: 6.88G parameters: 0.82M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/scrfd_500m.yaml b/hailo_model_zoo/cfg/networks/scrfd_500m.yaml index 959bd691..14eea463 100644 --- a/hailo_model_zoo/cfg/networks/scrfd_500m.yaml +++ b/hailo_model_zoo/cfg/networks/scrfd_500m.yaml @@ -22,8 +22,8 @@ parser: info: task: face detection input_shape: 640x640x3 - output_shape: 80x80x2, 80x80x8, 80x80x10, 40x40x2, 40x40x8, 40x40x10, 20x20x2, 20x20x8, - 20x20x10 + output_shape: 80x80x8, 80x80x2, 80x80x20, 40x40x8, 40x40x2, 40x40x20, 20x20x8, 20x20x2, + 20x20x20 operations: 1.5G parameters: 0.63M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/segformer_b0_bn.yaml b/hailo_model_zoo/cfg/networks/segformer_b0_bn.yaml index 27e00fc8..7bd8576e 100644 --- a/hailo_model_zoo/cfg/networks/segformer_b0_bn.yaml +++ b/hailo_model_zoo/cfg/networks/segformer_b0_bn.yaml @@ -7,7 +7,7 @@ network: parser: nodes: - null - - '1553' + - null paths: alls_script: segformer_b0_bn.alls network_path: @@ -17,7 +17,7 @@ info: task: semantic segmentation input_shape: 512x1024x3 output_shape: 512x1024x1 - operations: 27.2G + operations: 35.76G parameters: 3.72M framework: pytorch training_data: cityscapes train diff --git a/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1.yaml b/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1.yaml index 79e5b20f..76eaac50 100644 --- a/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1.yaml +++ b/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1.yaml @@ -7,7 +7,6 @@ postprocessing: softmax: false bilinear: false nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v1/pretrained/2023-07-18/mobilenet_ssd_nms_postprocess_config.json meta_arch: ssd anchors: predefined: true @@ -36,7 +35,7 @@ parser: info: task: object detection input_shape: 300x300x3 - output_shape: 90x5x20 + output_shape: 90x8x1 operations: 2.5G parameters: 6.79M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1_visdrone.yaml b/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1_visdrone.yaml index 68243a2f..5c7f3995 100644 --- a/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1_visdrone.yaml +++ b/hailo_model_zoo/cfg/networks/ssd_mobilenet_v1_visdrone.yaml @@ -7,7 +7,6 @@ postprocessing: softmax: false bilinear: false nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-Visdrone/ssd/ssd_mobilenet_v1_visdrone/pretrained/2023-07-18/mobilenet_ssd_nms_visdrone_postprocess_config.json meta_arch: ssd anchors: predefined: true @@ -36,7 +35,7 @@ parser: info: task: object detection input_shape: 300x300x3 - output_shape: 2244x8x1 + output_shape: 11x8x1 operations: 2.3G parameters: 5.64M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/ssd_mobilenet_v2.yaml b/hailo_model_zoo/cfg/networks/ssd_mobilenet_v2.yaml index 92fdaf26..9e2161fc 100644 --- a/hailo_model_zoo/cfg/networks/ssd_mobilenet_v2.yaml +++ b/hailo_model_zoo/cfg/networks/ssd_mobilenet_v2.yaml @@ -7,7 +7,6 @@ postprocessing: softmax: false bilinear: false nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/ssd/ssd_mobilenet_v2/pretrained/2023-03-16/mobilenet_v2_ssd_nms_postprocess_config.json meta_arch: ssd anchors: predefined: true @@ -38,7 +37,7 @@ parser: info: task: object detection input_shape: 300x300x3 - output_shape: 2244x8x1 + output_shape: 90x8x1 operations: 1.52G parameters: 4.46M framework: tensorflow diff --git a/hailo_model_zoo/cfg/networks/stereonet.yaml b/hailo_model_zoo/cfg/networks/stereonet.yaml index f9db0068..522689e5 100644 --- a/hailo_model_zoo/cfg/networks/stereonet.yaml +++ b/hailo_model_zoo/cfg/networks/stereonet.yaml @@ -38,8 +38,8 @@ evaluation: dataset_name: kitti_stereo info: task: stereo depth estimation - input_shape: 368X1232X3, 368X1232X3 - output_shape: 368X1232X1 + input_shape: 368x1232x3, 368x1232x3 + output_shape: 368x1232x1 operations: 126.28G parameters: 5.91M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/tiny_yolov3.yaml b/hailo_model_zoo/cfg/networks/tiny_yolov3.yaml index d6ea4c73..1a6aa143 100644 --- a/hailo_model_zoo/cfg/networks/tiny_yolov3.yaml +++ b/hailo_model_zoo/cfg/networks/tiny_yolov3.yaml @@ -39,7 +39,7 @@ parser: info: task: object detection input_shape: 416x416x3 - output_shape: 13x13x6, 13x13x6, 13x13x3, 13x13x240, 26x26x6, 26x26x6, 26x26x3, 26x26x240 + output_shape: 13x13x255, 26x26x255 operations: 5.58G parameters: 8.85M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/vit_base_bn.yaml b/hailo_model_zoo/cfg/networks/vit_base_bn.yaml index 07b46f63..f3eae766 100644 --- a/hailo_model_zoo/cfg/networks/vit_base_bn.yaml +++ b/hailo_model_zoo/cfg/networks/vit_base_bn.yaml @@ -15,7 +15,7 @@ info: task: classification input_shape: 224x224x3 output_shape: '1000' - operations: 34.25G + operations: 35.188G parameters: 86.5M framework: pytorch training_data: imagenet train diff --git a/hailo_model_zoo/cfg/networks/vit_pose_small.yaml b/hailo_model_zoo/cfg/networks/vit_pose_small.yaml index e6fb5c38..5eafc386 100644 --- a/hailo_model_zoo/cfg/networks/vit_pose_small.yaml +++ b/hailo_model_zoo/cfg/networks/vit_pose_small.yaml @@ -37,7 +37,7 @@ info: parameters: 24.29M framework: pytorch eval_metric: AP - full_precision_result: 73.8 + full_precision_result: 74.16 source: https://github.com/ViTAE-Transformer/ViTPose license_url: https://github.com/ViTAE-Transformer/ViTPose/blob/main/LICENSE license_name: Apache-2.0 diff --git a/hailo_model_zoo/cfg/networks/yolov5m.yaml b/hailo_model_zoo/cfg/networks/yolov5m.yaml index 477e074d..be3c7b56 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m.yaml @@ -3,7 +3,6 @@ base: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5m_spp/pretrained/2023-04-25/yolov5m_nms_config.json meta_arch: yolo hpp: true quantization: @@ -25,7 +24,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 20x20x255, 40x40x255, 80x80x255 + output_shape: 80x5x80 operations: 52.17G parameters: 21.78M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m6_6.1.yaml b/hailo_model_zoo/cfg/networks/yolov5m6_6.1.yaml index 9c6f0d68..8b5a147d 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m6_6.1.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m6_6.1.yaml @@ -12,7 +12,6 @@ preprocessing: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5m6_6.1/pretrained/2023-04-25/yolov5m6_nms_config.json meta_arch: yolo hpp: true paths: @@ -30,7 +29,7 @@ parser: info: task: object detection input_shape: 1280x1280x3 - output_shape: 20x20x255, 40x40x255, 80x80x255, 160x160x255 + output_shape: 80x5x80 operations: 200.04G parameters: 35.70M framework: pytorch @@ -38,4 +37,6 @@ info: validation_data: coco val2017 eval_metric: mAP source: https://github.com/ultralytics/yolov5/releases/tag/v6.1 + license_url: https://github.com/ultralytics/yolov5/blob/v6.1/LICENSE + license_name: GPL-3.0 full_precision_result: 50.67 diff --git a/hailo_model_zoo/cfg/networks/yolov5m_6.1.yaml b/hailo_model_zoo/cfg/networks/yolov5m_6.1.yaml index 56b9aba4..746ca118 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_6.1.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_6.1.yaml @@ -16,13 +16,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5m_6.1/pretrained/2023-04-25/yolov5m_6.1_nms_config.json meta_arch: yolo hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 20x20x255, 40x40x255, 80x80x255, + output_shape: 80x5x80 operations: 48.96G parameters: 21.17M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m_vehicles.yaml b/hailo_model_zoo/cfg/networks/yolov5m_vehicles.yaml index c65e5aa7..85594f10 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_vehicles.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_vehicles.yaml @@ -30,13 +30,12 @@ evaluation: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json meta_arch: yolo hpp: true info: task: pipeline input_shape: 1080x1920x3 - output_shape: 20x20x18, 40x40x18, 80x80x18 + output_shape: 1x5x80 operations: 51.19G parameters: 21.47M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m_vehicles_nv12.yaml b/hailo_model_zoo/cfg/networks/yolov5m_vehicles_nv12.yaml index 4e5c1e21..d25c31b3 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_vehicles_nv12.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_vehicles_nv12.yaml @@ -15,7 +15,6 @@ network: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json meta_arch: yolo hpp: true paths: @@ -38,7 +37,7 @@ evaluation: info: task: pipeline input_shape: 540x1920x3 - output_shape: 20x20x18, 40x40x18, 80x80x18 + output_shape: 1x5x80 operations: 51.19G parameters: 21.47M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m_vehicles_yuy2.yaml b/hailo_model_zoo/cfg/networks/yolov5m_vehicles_yuy2.yaml index ccd74378..a5aca285 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_vehicles_yuy2.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_vehicles_yuy2.yaml @@ -15,7 +15,6 @@ network: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/HailoNets/LPR/vehicle_detector/yolov5m_vehicles/2023-04-25/yolov5m_vehicles_nms_config.json meta_arch: yolo hpp: true paths: @@ -38,7 +37,7 @@ evaluation: info: task: pipeline input_shape: 1080x1920x2 - output_shape: 20x20x18, 40x40x18, 80x80x18 + output_shape: 1x5x80 operations: 51.19G parameters: 21.47M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m_wo_spp.yaml b/hailo_model_zoo/cfg/networks/yolov5m_wo_spp.yaml index 4ed0f6c8..5b750d93 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_wo_spp.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_wo_spp.yaml @@ -19,12 +19,11 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json meta_arch: yolo hpp: true info: input_shape: 640x640x3 - output_shape: 20x20x255, 40x40x255, 80x80x255 + output_shape: 80x5x80 operations: 52.88G parameters: 22.67M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_60p.yaml b/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_60p.yaml index 74ce646d..b24f1fa7 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_60p.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_60p.yaml @@ -16,13 +16,13 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5m/pretrained/2023-04-25/yolov5m_nms_config.json meta_arch: yolo hpp: true + score_threshold: 0.001 info: task: object detection input_shape: 640x640x3 - output_shape: 20x20x255, 40x40x255, 80x80x255 + output_shape: 80x5x80 operations: 52.88G parameters: 22.67M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_yuy2.yaml b/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_yuy2.yaml index 8805b95c..533db267 100644 --- a/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_yuy2.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5m_wo_spp_yuy2.yaml @@ -15,6 +15,6 @@ paths: info: task: pipeline input_shape: 720x1280x2 - output_shape: 20x20x255, 40x40x255, 80x80x255 + output_shape: 80x5x80 operations: 52.89G parameters: 22.67M diff --git a/hailo_model_zoo/cfg/networks/yolov5s.yaml b/hailo_model_zoo/cfg/networks/yolov5s.yaml index 9b0c41c0..0f5032f9 100644 --- a/hailo_model_zoo/cfg/networks/yolov5s.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5s.yaml @@ -16,13 +16,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5s_spp/pretrained/2023-04-25/yolov5s_nms_config.json meta_arch: yolo hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x255, 40x40x255, 20x20x255 + output_shape: 80x5x80 operations: 17.44G parameters: 7.46M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5s_c3tr.yaml b/hailo_model_zoo/cfg/networks/yolov5s_c3tr.yaml index 057af713..c0fd834e 100644 --- a/hailo_model_zoo/cfg/networks/yolov5s_c3tr.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5s_c3tr.yaml @@ -19,13 +19,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5s_c3tr/pretrained/2023-04-25/yolov5s_c3tr_nms_config.json meta_arch: yolo hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 20x20x255, 40x40x255, 80x80x255 + output_shape: 80x5x80 operations: 17.02G parameters: 10.29M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5s_personface.yaml b/hailo_model_zoo/cfg/networks/yolov5s_personface.yaml index 6b386cdf..efc97ab5 100644 --- a/hailo_model_zoo/cfg/networks/yolov5s_personface.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5s_personface.yaml @@ -18,7 +18,6 @@ preprocessing: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/HailoNets/MCPReID/personface_detector/yolov5s_personface/2023-04-25/yolov5s_personface.json meta_arch: yolo hpp: true quantization: @@ -32,7 +31,7 @@ evaluation: info: task: pipeline input_shape: 640x640x3 - output_shape: 80x80x21, 40x40x21, 20x20x21 + output_shape: 2x5x80 operations: 16.71G parameters: 7.25M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5s_personface_nv12_fhd.yaml b/hailo_model_zoo/cfg/networks/yolov5s_personface_nv12_fhd.yaml index 9c12bfe4..7fa4ae05 100644 --- a/hailo_model_zoo/cfg/networks/yolov5s_personface_nv12_fhd.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5s_personface_nv12_fhd.yaml @@ -34,7 +34,7 @@ hn_editor: info: task: pipeline_hailo15 input_shape: 540x1920x3 - output_shape: 80x80x21, 40x40x21, 20x20x21 + output_shape: 20x20x21, 40x40x21, 80x80x21 operations: 16.76G parameters: 7.25M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp.yaml b/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp.yaml index 14caf139..4274cdb9 100644 --- a/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp.yaml @@ -21,13 +21,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5xs/pretrained/2023-04-25/yolov5xs_nms_config.json meta_arch: yolo hpp: true info: task: object detection input_shape: 512x512x3 - output_shape: 16x16x255, 32x32x255, 64x64x255 + output_shape: 80x5x80 operations: 11.36G parameters: 7.85M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp_nms_core.yaml b/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp_nms_core.yaml index 485dce42..2085466e 100644 --- a/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp_nms_core.yaml +++ b/hailo_model_zoo/cfg/networks/yolov5xs_wo_spp_nms_core.yaml @@ -21,12 +21,11 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov5xs/pretrained/2022-05-10/yolov5xs_wo_spp_nms_config.json meta_arch: yolo info: task: object detection input_shape: 512x512x3 - output_shape: 16x16x255, 32x32x255, 64x64x255 + output_shape: 80x5x80 operations: 11.36G parameters: 7.85M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov6n.yaml b/hailo_model_zoo/cfg/networks/yolov6n.yaml index 48a4a8f3..74308744 100644 --- a/hailo_model_zoo/cfg/networks/yolov6n.yaml +++ b/hailo_model_zoo/cfg/networks/yolov6n.yaml @@ -30,6 +30,8 @@ parser: - 255.0 - 255.0 fold_normalization: true +postprocessing: + hpp: true info: task: object detection input_shape: 640x640x3 diff --git a/hailo_model_zoo/cfg/networks/yolov7.yaml b/hailo_model_zoo/cfg/networks/yolov7.yaml index 038fb218..6e1133df 100644 --- a/hailo_model_zoo/cfg/networks/yolov7.yaml +++ b/hailo_model_zoo/cfg/networks/yolov7.yaml @@ -5,7 +5,6 @@ inference: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov7/pretrained/2023-04-25/yolov7_nms_config.json meta_arch: yolo hpp: true network: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x255, 40x40x255, 20x20x255 + output_shape: 80x5x80 operations: 104.51G parameters: 36.91M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov7_tiny.yaml b/hailo_model_zoo/cfg/networks/yolov7_tiny.yaml index 4b72ffff..f6fa5960 100644 --- a/hailo_model_zoo/cfg/networks/yolov7_tiny.yaml +++ b/hailo_model_zoo/cfg/networks/yolov7_tiny.yaml @@ -5,7 +5,6 @@ inference: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov7_tiny/pretrained/2023-04-25/yolov7_tiny_nms_config.json meta_arch: yolo hpp: true network: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x255, 40x40x255, 20x20x255 + output_shape: 80x5x80 operations: 13.74G parameters: 6.22M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov7e6.yaml b/hailo_model_zoo/cfg/networks/yolov7e6.yaml index 1b1ab562..d022876f 100644 --- a/hailo_model_zoo/cfg/networks/yolov7e6.yaml +++ b/hailo_model_zoo/cfg/networks/yolov7e6.yaml @@ -12,7 +12,6 @@ preprocessing: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov7e6/pretrained/2023-04-25/yolov7e6_nms_config.json meta_arch: yolo hpp: true paths: @@ -30,7 +29,7 @@ parser: info: task: object detection input_shape: 1280x1280x3 - output_shape: 20x20x255, 40x40x255, 80x80x255, 160x160x255 + output_shape: 80x5x80 operations: 515.12G parameters: 97.20M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov8l.yaml b/hailo_model_zoo/cfg/networks/yolov8l.yaml index 52c59093..5ad1db4a 100644 --- a/hailo_model_zoo/cfg/networks/yolov8l.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8l.yaml @@ -4,7 +4,6 @@ postprocessing: device_pre_post_layers: nms: true hpp: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov8l/2023-02-02/yolov8l_nms_config.json network: network_name: yolov8l paths: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x80, 80X80X64, 40X40X80, 40X40X64, 20X20X80, 20X20X64 + output_shape: 80x5x100 operations: 165.3G parameters: 43.7M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov8m.yaml b/hailo_model_zoo/cfg/networks/yolov8m.yaml index 3c9a6a2c..ebfac6ef 100644 --- a/hailo_model_zoo/cfg/networks/yolov8m.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8m.yaml @@ -4,7 +4,6 @@ postprocessing: device_pre_post_layers: nms: true hpp: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov8m/2023-02-02/yolov8m_nms_config.json network: network_name: yolov8m paths: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x80, 80X80X64, 40X40X80, 40X40X64, 20X20X80, 20X20X64 + output_shape: 80x5x100 operations: 78.93G parameters: 25.9M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov8m_pose.yaml b/hailo_model_zoo/cfg/networks/yolov8m_pose.yaml index 08ed141b..6fbc6aa9 100644 --- a/hailo_model_zoo/cfg/networks/yolov8m_pose.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8m_pose.yaml @@ -20,7 +20,7 @@ parser: - /model.22/cv3.0/cv3.0.2/Conv - /model.22/cv4.0/cv4.0.2/Conv info: - task: pose_estimation + task: pose estimation input_shape: 640x640x3 output_shape: 20x20x64, 20x20x1, 20x20x51, 40x40x64, 40x40x1, 40x40x51, 80x80x64, 80x80x1, 80x80x51 diff --git a/hailo_model_zoo/cfg/networks/yolov8n.yaml b/hailo_model_zoo/cfg/networks/yolov8n.yaml index b595d4e9..309a005e 100644 --- a/hailo_model_zoo/cfg/networks/yolov8n.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8n.yaml @@ -4,7 +4,6 @@ postprocessing: device_pre_post_layers: nms: true hpp: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov8n/2023-01-30/yolov8n_nms_config.json network: network_name: yolov8n paths: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x80, 80X80X64, 40X40X80, 40X40X64, 20X20X80, 20X20X64 + output_shape: 80x5x100 operations: 8.74G parameters: 3.2M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov8s.yaml b/hailo_model_zoo/cfg/networks/yolov8s.yaml index fecc536c..e0380209 100644 --- a/hailo_model_zoo/cfg/networks/yolov8s.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8s.yaml @@ -4,7 +4,6 @@ postprocessing: device_pre_post_layers: nms: true hpp: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov8s/2023-02-02/yolov8s_nms_config.json network: network_name: yolov8s paths: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x80, 80X80X64, 40X40X80, 40X40X64, 20X20X80, 20X20X64 + output_shape: 80x5x100 operations: 28.6G parameters: 11.2M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov8s_pose.yaml b/hailo_model_zoo/cfg/networks/yolov8s_pose.yaml index 5d87f75a..ea867363 100644 --- a/hailo_model_zoo/cfg/networks/yolov8s_pose.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8s_pose.yaml @@ -20,7 +20,7 @@ parser: - /model.22/cv3.0/cv3.0.2/Conv - /model.22/cv4.0/cv4.0.2/Conv info: - task: pose_estimation + task: pose estimation input_shape: 640x640x3 output_shape: 20x20x64, 20x20x1, 20x20x51, 40x40x64, 40x40x1, 40x40x51, 80x80x64, 80x80x1, 80x80x51 diff --git a/hailo_model_zoo/cfg/networks/yolov8x.yaml b/hailo_model_zoo/cfg/networks/yolov8x.yaml index f3daa6ae..e3e39ed0 100644 --- a/hailo_model_zoo/cfg/networks/yolov8x.yaml +++ b/hailo_model_zoo/cfg/networks/yolov8x.yaml @@ -4,7 +4,6 @@ postprocessing: device_pre_post_layers: nms: true hpp: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolov8x/2023-02-02/yolov8x_nms_config.json network: network_name: yolov8x paths: @@ -24,7 +23,7 @@ parser: info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x80, 80X80X64, 40X40X80, 40X40X64, 20X20X80, 20X20X64 + output_shape: 80x5x100 operations: 258G parameters: 68.2M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolov9c.yaml b/hailo_model_zoo/cfg/networks/yolov9c.yaml new file mode 100644 index 00000000..6b929a90 --- /dev/null +++ b/hailo_model_zoo/cfg/networks/yolov9c.yaml @@ -0,0 +1,32 @@ +base: +- base/yolov8.yaml +network: + network_name: yolov9c +paths: + network_path: + - models_files/ObjectDetection/Detection-COCO/yolo/yolov9c/pretrained/2024-02-24/yolov9c.onnx + url: https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ObjectDetection/Detection-COCO/yolo/yolov9c/pretrained/2024-02-24/yolov9c.zip + alls_script: yolov9c.alls +parser: + nodes: + - null + - - Conv_1058 + - Conv_1065 + - Conv_1088 + - Conv_1095 + - Conv_1118 + - Conv_1125 +info: + task: object detection + input_shape: 640x640x3 + output_shape: 80x80x64, 80x80x80, 40x40x64, 40x40x80, 20x20x64, 20x20x80 + operations: 102.1G + parameters: 25.3M + framework: pytorch + training_data: coco train2017 + validation_data: coco val2017 + eval_metric: mAP + full_precision_result: 52.8 + source: https://github.com/WongKinYiu/yolov9 + license_url: https://github.com/WongKinYiu/yolov7/blob/main/LICENSE.md + license_name: GPL-3.0 diff --git a/hailo_model_zoo/cfg/networks/yolox_l_leaky.yaml b/hailo_model_zoo/cfg/networks/yolox_l_leaky.yaml index 6c3618cf..5ee36c13 100644 --- a/hailo_model_zoo/cfg/networks/yolox_l_leaky.yaml +++ b/hailo_model_zoo/cfg/networks/yolox_l_leaky.yaml @@ -22,14 +22,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolox_l_leaky/pretrained/2023-05-31/nms_config_yolox_l_leaky.json meta_arch: yolox hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x4, 80x80x1, 80x80x80, 40x40x4, 40x40x1, 40x40x80, 20x20x4, 20x20x1, - 20x20x80 + output_shape: 80x5x100 operations: 155.3G parameters: 54.17M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolox_s_leaky.yaml b/hailo_model_zoo/cfg/networks/yolox_s_leaky.yaml index 547ee49a..7ac08068 100644 --- a/hailo_model_zoo/cfg/networks/yolox_s_leaky.yaml +++ b/hailo_model_zoo/cfg/networks/yolox_s_leaky.yaml @@ -22,14 +22,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolox_s_leaky/pretrained/2023-05-31/nms_config_yolox_s_leaky.json meta_arch: yolox hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x4, 80x80x1, 80x80x80, 40x40x4, 40x40x1, 40x40x80, 20x20x4, 20x20x1, - 20x20x80 + output_shape: 80x5x100 operations: 26.74G parameters: 8.96M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolox_s_wide_leaky.yaml b/hailo_model_zoo/cfg/networks/yolox_s_wide_leaky.yaml index 201c0447..91b20efb 100644 --- a/hailo_model_zoo/cfg/networks/yolox_s_wide_leaky.yaml +++ b/hailo_model_zoo/cfg/networks/yolox_s_wide_leaky.yaml @@ -24,14 +24,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolox_s_wide_leaky/pretrained/2023-05-31/nms_config_yolox_s_wide_leaky.json meta_arch: yolox hpp: true info: task: object detection input_shape: 640x640x3 - output_shape: 80x80x4, 80x80x1, 80x80x80, 40x40x4, 40x40x1, 40x40x80, 20x20x4, 20x20x1, - 20x20x80 + output_shape: 80x5x100 operations: 59.46G parameters: 20.12M framework: pytorch diff --git a/hailo_model_zoo/cfg/networks/yolox_tiny.yaml b/hailo_model_zoo/cfg/networks/yolox_tiny.yaml index cf1c11a5..036ba21b 100644 --- a/hailo_model_zoo/cfg/networks/yolox_tiny.yaml +++ b/hailo_model_zoo/cfg/networks/yolox_tiny.yaml @@ -37,14 +37,12 @@ parser: postprocessing: device_pre_post_layers: nms: true - postprocess_config_file: models_files/ObjectDetection/Detection-COCO/yolo/yolox/yolox_tiny/pretrained/2023-05-31/nms_config_yolox_tiny.json meta_arch: yolox hpp: true info: task: object detection input_shape: 416x416x3 - output_shape: 52x52x4, 52x52x1, 52x52x80, 26x26x4, 26x26x1, 26x26x80, 13x13x4, 13x13x1, - 13x13x80 + output_shape: 80x5x100 operations: 6.44G parameters: 5.05M framework: pytorch diff --git a/hailo_model_zoo/cfg/postprocess_config/centernet_res18_with_division_nms_for_AP_test_threshold_0.json b/hailo_model_zoo/cfg/postprocess_config/centernet_res18_with_division_nms_for_AP_test_threshold_0.json new file mode 100644 index 00000000..5843dd85 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/centernet_res18_with_division_nms_for_AP_test_threshold_0.json @@ -0,0 +1,18 @@ +{ + "nms_scores_th": 0.0, + "max_proposals_per_class": 100, + "centers_scale_factor": 10, + "bbox_dimensions_scale_factor": 5, + "classes": 80, + "background_removal": false, + "background_removal_index": 0, + "input_division_factor": 4, + "bbox_decoders": [ + { + "name": "bbox_decoder_conv29conv27", + "reg_layer_h": "conv29", + "reg_layer_w": "conv27", + "cls_layer": "ew_add1" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite0_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite0_nms_config.json new file mode 100644 index 00000000..474f9cbe --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite0_nms_config.json @@ -0,0 +1,146 @@ +{ + "nms_scores_th": 0.5, + "nms_iou_th": 0.5, + "image_dims": [ + 320, + 320 + ], + "max_proposals_per_class": 100, + "centers_scale_factor": 1, + "bbox_dimensions_scale_factor": 1, + "classes": 90, + "background_removal": true, + "background_removal_index": 89, + "bbox_decoders": [ + { + "w": [ + 0.075, + 0.10606602, + 0.05303301, + 0.09449408, + 0.13363481, + 0.06681741, + 0.11905508, + 0.1683693, + 0.08418465 + ], + "h": [ + 0.075, + 0.05303301, + 0.10606602, + 0.09449408, + 0.06681741, + 0.13363481, + 0.11905508, + 0.08418465, + 0.1683693 + ], + "reg_layer": "efficientdet_lite0/conv65", + "cls_layer": "efficientdet_lite0/conv66" + }, + { + "w": [ + 0.15, + 0.21213203, + 0.10606602, + 0.18898816, + 0.26726963, + 0.13363481, + 0.23811015, + 0.3367386, + 0.1683693 + ], + "h": [ + 0.15, + 0.10606602, + 0.21213203, + 0.18898816, + 0.13363481, + 0.26726963, + 0.23811015, + 0.1683693, + 0.3367386 + ], + "reg_layer": "efficientdet_lite0/conv74", + "cls_layer": "efficientdet_lite0/conv75" + }, + { + "w": [ + 0.3, + 0.42426407, + 0.21213203, + 0.37797633, + 0.53453925, + 0.26726963, + 0.4762203, + 0.67347721, + 0.3367386 + ], + "h": [ + 0.3, + 0.21213203, + 0.42426407, + 0.37797633, + 0.26726963, + 0.53453925, + 0.4762203, + 0.3367386, + 0.67347721 + ], + "reg_layer": "efficientdet_lite0/conv83", + "cls_layer": "efficientdet_lite0/conv84" + }, + { + "w": [ + 0.6, + 0.84852814, + 0.42426407, + 0.75595266, + 1.0690785, + 0.53453925, + 0.9524406, + 1.34695441, + 0.67347721 + ], + "h": [ + 0.6, + 0.42426407, + 0.84852814, + 0.75595266, + 0.53453925, + 1.0690785, + 0.9524406, + 0.67347721, + 1.34695441 + ], + "reg_layer": "efficientdet_lite0/conv92", + "cls_layer": "efficientdet_lite0/conv93" + }, + { + "w": [ + 1.0, + 1.41421356, + 0.70710678, + 1.2599211, + 1.78179751, + 0.89089875, + 1.587401, + 2.24492402, + 1.12246201 + ], + "h": [ + 1.0, + 0.70710678, + 1.41421356, + 1.2599211, + 0.89089875, + 1.78179751, + 1.587401, + 1.12246201, + 2.24492402 + ], + "reg_layer": "efficientdet_lite0/conv101", + "cls_layer": "efficientdet_lite0/conv102" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite1_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite1_nms_config.json new file mode 100644 index 00000000..25914e22 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite1_nms_config.json @@ -0,0 +1,146 @@ +{ + "nms_scores_th": 0.5, + "nms_iou_th": 0.5, + "image_dims": [ + 384, + 384 + ], + "max_proposals_per_class": 100, + "centers_scale_factor": 1, + "bbox_dimensions_scale_factor": 1, + "classes": 90, + "background_removal": true, + "background_removal_index": 89, + "bbox_decoders": [ + { + "w": [ + 0.0625, + 0.08838835, + 0.04419417, + 0.07874507, + 0.11136234, + 0.05568117, + 0.09921256, + 0.14030775, + 0.07015388 + ], + "h": [ + 0.0625, + 0.04419417, + 0.08838835, + 0.07874507, + 0.05568117, + 0.11136234, + 0.09921256, + 0.07015388, + 0.14030775 + ], + "reg_layer": "efficientdet_lite1/conv83", + "cls_layer": "efficientdet_lite1/conv84" + }, + { + "w": [ + 0.125, + 0.1767767, + 0.08838835, + 0.15749014, + 0.22272469, + 0.11136234, + 0.19842513, + 0.2806155, + 0.14030775 + ], + "h": [ + 0.125, + 0.08838835, + 0.1767767, + 0.15749014, + 0.11136234, + 0.22272469, + 0.19842513, + 0.14030775, + 0.2806155 + ], + "reg_layer": "efficientdet_lite1/conv92", + "cls_layer": "efficientdet_lite1/conv93" + }, + { + "w": [ + 0.25, + 0.35355339, + 0.1767767, + 0.31498027, + 0.44544938, + 0.22272469, + 0.39685025, + 0.56123101, + 0.2806155 + ], + "h": [ + 0.25, + 0.1767767, + 0.35355339, + 0.31498027, + 0.22272469, + 0.44544938, + 0.39685025, + 0.2806155, + 0.56123101 + ], + "reg_layer": "efficientdet_lite1/conv101", + "cls_layer": "efficientdet_lite1/conv102" + }, + { + "w": [ + 0.5, + 0.70710678, + 0.35355339, + 0.62996055, + 0.89089875, + 0.44544938, + 0.7937005, + 1.12246201, + 0.56123101 + ], + "h": [ + 0.5, + 0.35355339, + 0.70710678, + 0.62996055, + 0.44544938, + 0.89089875, + 0.7937005, + 0.56123101, + 1.12246201 + ], + "reg_layer": "efficientdet_lite1/conv110", + "cls_layer": "efficientdet_lite1/conv111" + }, + { + "w": [ + 1.0, + 1.41421356, + 0.70710678, + 1.2599211, + 1.78179751, + 0.89089875, + 1.587401, + 2.24492402, + 1.12246201 + ], + "h": [ + 1.0, + 0.70710678, + 1.41421356, + 1.2599211, + 0.89089875, + 1.78179751, + 1.587401, + 1.12246201, + 2.24492402 + ], + "reg_layer": "efficientdet_lite1/conv119", + "cls_layer": "efficientdet_lite1/conv120" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite2_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite2_nms_config.json new file mode 100644 index 00000000..68a72c67 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/efficientdet_lite2_nms_config.json @@ -0,0 +1,146 @@ +{ + "nms_scores_th": 0.5, + "nms_iou_th": 0.5, + "image_dims": [ + 448, + 448 + ], + "max_proposals_per_class": 100, + "centers_scale_factor": 1, + "bbox_dimensions_scale_factor": 1, + "classes": 90, + "background_removal": true, + "background_removal_index": 89, + "bbox_decoders": [ + { + "w": [ + 0.05357143, + 0.07576144, + 0.03788072, + 0.06749577, + 0.09545344, + 0.04772672, + 0.08503934, + 0.12026379, + 0.06013189 + ], + "h": [ + 0.05357143, + 0.03788072, + 0.07576144, + 0.06749577, + 0.04772672, + 0.09545344, + 0.08503934, + 0.06013189, + 0.12026379 + ], + "reg_layer": "efficientdet_lite2/conv91", + "cls_layer": "efficientdet_lite2/conv92" + }, + { + "w": [ + 0.10714286, + 0.15152288, + 0.07576144, + 0.13499155, + 0.19090688, + 0.09545344, + 0.17007868, + 0.24052757, + 0.12026379 + ], + "h": [ + 0.10714286, + 0.07576144, + 0.15152288, + 0.13499155, + 0.09545344, + 0.19090688, + 0.17007868, + 0.12026379, + 0.24052757 + ], + "reg_layer": "efficientdet_lite2/conv100", + "cls_layer": "efficientdet_lite2/conv101" + }, + { + "w": [ + 0.21428571, + 0.30304576, + 0.15152288, + 0.26998309, + 0.38181375, + 0.19090688, + 0.34015736, + 0.48105515, + 0.24052757 + ], + "h": [ + 0.21428571, + 0.15152288, + 0.30304576, + 0.26998309, + 0.19090688, + 0.38181375, + 0.34015736, + 0.24052757, + 0.48105515 + ], + "reg_layer": "efficientdet_lite2/conv109", + "cls_layer": "efficientdet_lite2/conv110" + }, + { + "w": [ + 0.42857143, + 0.60609153, + 0.30304576, + 0.53996619, + 0.7636275, + 0.38181375, + 0.68031471, + 0.9621103, + 0.48105515 + ], + "h": [ + 0.42857143, + 0.30304576, + 0.60609153, + 0.53996619, + 0.38181375, + 0.7636275, + 0.68031471, + 0.48105515, + 0.9621103 + ], + "reg_layer": "efficientdet_lite2/conv118", + "cls_layer": "efficientdet_lite2/conv119" + }, + { + "w": [ + 0.75, + 1.06066017, + 0.53033009, + 0.94494082, + 1.33634813, + 0.66817407, + 1.19055075, + 1.68369302, + 0.84184651 + ], + "h": [ + 0.75, + 0.53033009, + 1.06066017, + 0.94494082, + 0.66817407, + 1.33634813, + 1.19055075, + 0.84184651, + 1.68369302 + ], + "reg_layer": "efficientdet_lite2/conv126", + "cls_layer": "efficientdet_lite2/conv128" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_hd_nms_postprocess_config.json b/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_hd_nms_postprocess_config.json new file mode 100644 index 00000000..721e348e --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_hd_nms_postprocess_config.json @@ -0,0 +1,136 @@ +{ + "nms_scores_th": 0.3, + "nms_iou_th": 0.5, + "max_proposals_per_class": 20, + "centers_scale_factor": 10, + "bbox_dimensions_scale_factor": 5, + "image_dims": [ + 720, + 1280 + ], + "classes": 91, + "background_removal": true, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "bbox_decoder13", + "h": [ + 0.1, + 0.1414213562373095, + 0.28284273 + ], + "w": [ + 0.05625, + 0.15909903, + 0.07954951 + ], + "reg_layer": "conv13", + "cls_layer": "conv14" + }, + { + "name": "bbox_decoder17", + "h": [ + 0.35, + 0.24748737, + 0.49497473, + 0.20207259, + 0.6062481, + 0.41833 + ], + "w": [ + 0.19687499, + 0.27842328, + 0.13921164, + 0.3409975, + 0.11366015, + 0.23531063 + ], + "reg_layer": "conv17", + "cls_layer": "conv18" + }, + { + "name": "bbox_decoder21", + "h": [ + 0.5, + 0.35355338, + 0.70710677, + 0.28867513, + 0.8660687, + 0.57008773 + ], + "w": [ + 0.28125, + 0.39774755, + 0.19887377, + 0.48713928, + 0.16237165, + 0.32067436 + ], + "reg_layer": "conv21", + "cls_layer": "conv22" + }, + { + "name": "bbox_decoder25", + "h": [ + 0.65, + 0.4596194, + 0.9192388, + 0.37527767, + 1.1258893, + 0.7211103 + ], + "w": [ + 0.365625, + 0.5170718, + 0.2585359, + 0.633281, + 0.21108313, + 0.40562454 + ], + "reg_layer": "conv25", + "cls_layer": "conv26" + }, + { + "name": "bbox_decoder29", + "h": [ + 0.8, + 0.56568545, + 1.1313709, + 0.46188024, + 1.3857099, + 0.8717798 + ], + "w": [ + 0.45000002, + 0.6363961, + 0.31819806, + 0.7794229, + 0.25979465, + 0.49037614 + ], + "reg_layer": "conv29", + "cls_layer": "conv30" + }, + { + "name": "bbox_decoder33", + "h": [ + 0.95, + 0.67175144, + 1.3435029, + 0.5484828, + 1.6455305, + 0.9746794 + ], + "w": [ + 0.534375, + 0.7557204, + 0.3778602, + 0.92556465, + 0.30850613, + 0.5482572 + ], + "reg_layer": "conv33", + "cls_layer": "conv34" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_postprocess_config.json b/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_postprocess_config.json new file mode 100644 index 00000000..8284da9c --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_postprocess_config.json @@ -0,0 +1,133 @@ +{ + "nms_scores_th": 0.3, + "nms_iou_th": 0.6, + "max_proposals_per_class": 20, + "centers_scale_factor": 10, + "bbox_dimensions_scale_factor": 5, + "image_dims": [300, 300], + "classes": 91, + "background_removal": true, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "bbox_decoder13", + "h": [ + 0.1, + 0.1414213562373095, + 0.282842712474619 + ], + "w": [ + 0.1, + 0.282842712474619, + 0.1414213562373095 + ], + "reg_layer": "conv13", + "cls_layer": "conv14" + }, + { + "name": "bbox_decoder17", + "h": [ + 0.35, + 0.2474873734152916, + 0.4949747468305832, + 0.20207259421636903, + 0.6062480958117455, + 0.4183300132670378 + ], + "w": [ + 0.35, + 0.4949747468305833, + 0.24748737341529164, + 0.606217782649107, + 0.20206249033405482, + 0.4183300132670378 + ], + "reg_layer": "conv17", + "cls_layer": "conv18" + }, + { + "name": "bbox_decoder21", + "h": [ + 0.5, + 0.35355339059327373, + 0.7071067811865475, + 0.2886751345948129, + 0.8660687083024937, + 0.570087712549569 + ], + "w": [ + 0.5, + 0.7071067811865476, + 0.3535533905932738, + 0.8660254037844386, + 0.2886607004772212, + 0.570087712549569 + ], + "reg_layer": "conv21", + "cls_layer": "conv22" + }, + { + "name": "bbox_decoder25", + "h": [ + 0.65, + 0.4596194077712559, + 0.9192388155425117, + 0.37527767497325676, + 1.1258893207932419, + 0.7211102550927979 + ], + "w": [ + 0.65, + 0.9192388155425119, + 0.45961940777125593, + 1.12583302491977, + 0.3752589106203876, + 0.7211102550927979 + ], + "reg_layer": "conv25", + "cls_layer": "conv26" + }, + { + "name": "bbox_decoder29", + "h": [ + 0.8, + 0.565685424949238, + 1.131370849898476, + 0.46188021535170065, + 1.38570993328399, + 0.8717797887081347 + ], + "w": [ + 0.8, + 1.1313708498984762, + 0.5656854249492381, + 1.3856406460551018, + 0.46185712076355395, + 0.8717797887081347 + ], + "reg_layer": "conv29", + "cls_layer": "conv30" + }, + { + "name": "bbox_decoder33", + "h": [ + 0.95, + 0.67175144212722, + 1.34350288425444, + 0.5484827557301445, + 1.645530545774738, + 0.9746794344808963 + ], + "w": [ + 0.95, + 1.3435028842544403, + 0.6717514421272202, + 1.6454482671904334, + 0.5484553309067203, + 0.9746794344808963 + ], + "reg_layer": "conv33", + "cls_layer": "conv34" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json b/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json new file mode 100644 index 00000000..a2105c16 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/mobilenet_ssd_nms_visdrone_postprocess_config.json @@ -0,0 +1,132 @@ +{ + "nms_scores_th": 0.3, + "nms_iou_th": 0.6, + "max_proposals_per_class": 40, + "centers_scale_factor": 10, + "bbox_dimensions_scale_factor": 5, + "classes": 12, + "background_removal": true, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "bbox_decoder13", + "h": [ + 0.1, + 0.1414213562373095, + 0.282842712474619 + ], + "w": [ + 0.1, + 0.282842712474619, + 0.1414213562373095 + ], + "reg_layer": "conv13", + "cls_layer": "conv14" + }, + { + "name": "bbox_decoder17", + "h": [ + 0.35, + 0.2474873734152916, + 0.4949747468305832, + 0.20207259421636903, + 0.6062480958117455, + 0.4183300132670378 + ], + "w": [ + 0.35, + 0.4949747468305833, + 0.24748737341529164, + 0.606217782649107, + 0.20206249033405482, + 0.4183300132670378 + ], + "reg_layer": "conv17", + "cls_layer": "conv18" + }, + { + "name": "bbox_decoder21", + "h": [ + 0.5, + 0.35355339059327373, + 0.7071067811865475, + 0.2886751345948129, + 0.8660687083024937, + 0.570087712549569 + ], + "w": [ + 0.5, + 0.7071067811865476, + 0.3535533905932738, + 0.8660254037844386, + 0.2886607004772212, + 0.570087712549569 + ], + "reg_layer": "conv21", + "cls_layer": "conv22" + }, + { + "name": "bbox_decoder25", + "h": [ + 0.65, + 0.4596194077712559, + 0.9192388155425117, + 0.37527767497325676, + 1.1258893207932419, + 0.7211102550927979 + ], + "w": [ + 0.65, + 0.9192388155425119, + 0.45961940777125593, + 1.12583302491977, + 0.3752589106203876, + 0.7211102550927979 + ], + "reg_layer": "conv25", + "cls_layer": "conv26" + }, + { + "name": "bbox_decoder29", + "h": [ + 0.8, + 0.565685424949238, + 1.131370849898476, + 0.46188021535170065, + 1.38570993328399, + 0.8717797887081347 + ], + "w": [ + 0.8, + 1.1313708498984762, + 0.5656854249492381, + 1.3856406460551018, + 0.46185712076355395, + 0.8717797887081347 + ], + "reg_layer": "conv29", + "cls_layer": "conv30" + }, + { + "name": "bbox_decoder33", + "h": [ + 0.95, + 0.67175144212722, + 1.34350288425444, + 0.5484827557301445, + 1.645530545774738, + 0.9746794344808963 + ], + "w": [ + 0.95, + 1.3435028842544403, + 0.6717514421272202, + 1.6454482671904334, + 0.5484553309067203, + 0.9746794344808963 + ], + "reg_layer": "conv33", + "cls_layer": "conv34" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/mobilenet_v2_ssd_nms_postprocess_config.json b/hailo_model_zoo/cfg/postprocess_config/mobilenet_v2_ssd_nms_postprocess_config.json new file mode 100644 index 00000000..9c4133dd --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/mobilenet_v2_ssd_nms_postprocess_config.json @@ -0,0 +1,133 @@ +{ + "nms_scores_th": 0.3, + "nms_iou_th": 0.6, + "max_proposals_per_class": 20, + "image_dims": [300, 300], + "centers_scale_factor": 10, + "bbox_dimensions_scale_factor": 5, + "classes": 91, + "background_removal": true, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "bbox_decoder28", + "h": [ + 0.1, + 0.1414213562373095, + 0.282842712474619 + ], + "w": [ + 0.1, + 0.282842712474619, + 0.1414213562373095 + ], + "reg_layer": "ssd_mobilenet_v2/conv28", + "cls_layer": "ssd_mobilenet_v2/conv29" + }, + { + "name": "bbox_decoder39", + "h": [ + 0.35, + 0.2474873734152916, + 0.4949747468305832, + 0.20207259421636903, + 0.6062480958117455, + 0.4183300132670378 + ], + "w": [ + 0.35, + 0.4949747468305833, + 0.24748737341529164, + 0.606217782649107, + 0.20206249033405482, + 0.4183300132670378 + ], + "reg_layer": "ssd_mobilenet_v2/conv39", + "cls_layer": "ssd_mobilenet_v2/conv40" + }, + { + "name": "bbox_decoder43", + "h": [ + 0.5, + 0.35355339059327373, + 0.7071067811865475, + 0.2886751345948129, + 0.8660687083024937, + 0.570087712549569 + ], + "w": [ + 0.5, + 0.7071067811865476, + 0.3535533905932738, + 0.8660254037844386, + 0.2886607004772212, + 0.570087712549569 + ], + "reg_layer": "ssd_mobilenet_v2/conv43", + "cls_layer": "ssd_mobilenet_v2/conv44" + }, + { + "name": "bbox_decoder47", + "h": [ + 0.65, + 0.4596194077712559, + 0.9192388155425117, + 0.37527767497325676, + 1.1258893207932419, + 0.7211102550927979 + ], + "w": [ + 0.65, + 0.9192388155425119, + 0.45961940777125593, + 1.12583302491977, + 0.3752589106203876, + 0.7211102550927979 + ], + "reg_layer": "ssd_mobilenet_v2/conv47", + "cls_layer": "ssd_mobilenet_v2/conv48" + }, + { + "name": "bbox_decoder51", + "h": [ + 0.8, + 0.565685424949238, + 1.131370849898476, + 0.46188021535170065, + 1.38570993328399, + 0.8717797887081347 + ], + "w": [ + 0.8, + 1.1313708498984762, + 0.5656854249492381, + 1.3856406460551018, + 0.46185712076355395, + 0.8717797887081347 + ], + "reg_layer": "ssd_mobilenet_v2/conv51", + "cls_layer": "ssd_mobilenet_v2/conv52" + }, + { + "name": "bbox_decoder54", + "h": [ + 0.95, + 0.67175144212722, + 1.34350288425444, + 0.5484827557301445, + 1.645530545774738, + 0.9746794344808963 + ], + "w": [ + 0.95, + 1.3435028842544403, + 0.6717514421272202, + 1.6454482671904334, + 0.5484553309067203, + 0.9746794344808963 + ], + "reg_layer": "ssd_mobilenet_v2/conv54", + "cls_layer": "ssd_mobilenet_v2/conv55" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/nanodet_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/nanodet_nms_config.json new file mode 100644 index 00000000..0f582e72 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nanodet_nms_config.json @@ -0,0 +1,30 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 416, + 416 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 11, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "bbox_decoder1", + "stride": 8, + "combined_layer": "" + }, + { + "name": "bbox_decoder2", + "stride": 16, + "combined_layer": "" + }, + { + "name": "bbox_decoder3", + "stride": 32, + "combined_layer": "" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nanodet_repvgg_a1_640_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/nanodet_repvgg_a1_640_nms_config.json new file mode 100644 index 00000000..b3ec67a1 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nanodet_repvgg_a1_640_nms_config.json @@ -0,0 +1,30 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 11, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "bbox_decoder1", + "stride": 8, + "combined_layer": "" + }, + { + "name": "bbox_decoder2", + "stride": 16, + "combined_layer": "" + }, + { + "name": "bbox_decoder3", + "stride": 32, + "combined_layer": "" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nms_config_nanodet_repvgg_a12.json b/hailo_model_zoo/cfg/postprocess_config/nms_config_nanodet_repvgg_a12.json new file mode 100644 index 00000000..e1576549 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nms_config_nanodet_repvgg_a12.json @@ -0,0 +1,34 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.65, + "number_of_detection_heads": 3, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder_8", + "stride": 8, + "reg_layer": "conv32", + "objectness_layer": "conv33", + "cls_layer": "conv31" + }, + { + "name": "bbox_decoder_16", + "stride": 16, + "reg_layer": "conv40", + "objectness_layer": "conv41", + "cls_layer": "conv39" + }, + { + "name": "bbox_decoder_32", + "stride": 32, + "reg_layer": "conv48", + "objectness_layer": "conv49", + "cls_layer": "conv47" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nms_config_yolov6n.json b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolov6n.json new file mode 100644 index 00000000..e019e72f --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolov6n.json @@ -0,0 +1,34 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.65, + "number_of_detection_heads": 3, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder_8", + "stride": 8, + "reg_layer": "conv37", + "objectness_layer": "conv38", + "cls_layer": "conv36" + }, + { + "name": "bbox_decoder_16", + "stride": 16, + "reg_layer": "conv48", + "objectness_layer": "conv49", + "cls_layer": "conv47" + }, + { + "name": "bbox_decoder_32", + "stride": 32, + "reg_layer": "conv58", + "objectness_layer": "conv59", + "cls_layer": "conv57" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_l_leaky.json b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_l_leaky.json new file mode 100644 index 00000000..9fb37e67 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_l_leaky.json @@ -0,0 +1,34 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.65, + "number_of_detection_heads": 3, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder_8", + "stride": 8, + "reg_layer": "conv95", + "objectness_layer": "conv96", + "cls_layer": "conv94" + }, + { + "name": "bbox_decoder_16", + "stride": 16, + "reg_layer": "conv113", + "objectness_layer": "conv114", + "cls_layer": "conv112" + }, + { + "name": "bbox_decoder_32", + "stride": 32, + "reg_layer": "conv130", + "objectness_layer": "conv131", + "cls_layer": "conv129" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_leaky.json b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_leaky.json new file mode 100644 index 00000000..49b933a3 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_leaky.json @@ -0,0 +1,34 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.65, + "number_of_detection_heads": 3, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder_8", + "stride": 8, + "reg_layer": "conv55", + "objectness_layer": "conv56", + "cls_layer": "conv54" + }, + { + "name": "bbox_decoder_16", + "stride": 16, + "reg_layer": "conv69", + "objectness_layer": "conv70", + "cls_layer": "conv68" + }, + { + "name": "bbox_decoder_32", + "stride": 32, + "reg_layer": "conv82", + "objectness_layer": "conv83", + "cls_layer": "conv81" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_wide_leaky.json b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_wide_leaky.json new file mode 100644 index 00000000..49b933a3 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_s_wide_leaky.json @@ -0,0 +1,34 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.65, + "number_of_detection_heads": 3, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder_8", + "stride": 8, + "reg_layer": "conv55", + "objectness_layer": "conv56", + "cls_layer": "conv54" + }, + { + "name": "bbox_decoder_16", + "stride": 16, + "reg_layer": "conv69", + "objectness_layer": "conv70", + "cls_layer": "conv68" + }, + { + "name": "bbox_decoder_32", + "stride": 32, + "reg_layer": "conv82", + "objectness_layer": "conv83", + "cls_layer": "conv81" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_tiny.json b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_tiny.json new file mode 100644 index 00000000..98c9a11d --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/nms_config_yolox_tiny.json @@ -0,0 +1,34 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.45, + "number_of_detection_heads": 3, + "image_dims": [ + 416, + 416 + ], + "max_proposals_per_class": 100, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder_8", + "stride": 8, + "reg_layer": "conv55", + "objectness_layer": "conv56", + "cls_layer": "conv54" + }, + { + "name": "bbox_decoder_16", + "stride": 16, + "reg_layer": "conv69", + "objectness_layer": "conv70", + "cls_layer": "conv68" + }, + { + "name": "bbox_decoder_32", + "stride": 32, + "reg_layer": "conv82", + "objectness_layer": "conv83", + "cls_layer": "conv81" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/validation_only_yolov5s_vehicles_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/validation_only_yolov5s_vehicles_nms_config.json new file mode 100644 index 00000000..447f9b9c --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/validation_only_yolov5s_vehicles_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 10, + "bbox_decoders": [ + { + "name": "bbox_decoder55", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv55" + }, + { + "name": "bbox_decoder63", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv63" + }, + { + "name": "bbox_decoder70", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv70" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5l_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5l_nms_config.json new file mode 100644 index 00000000..f5f36613 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5l_nms_config.json @@ -0,0 +1,55 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [640, 640], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder95", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv95" + }, + { + "name": "bbox_decoder107", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv107" + }, + { + "name": "bbox_decoder118", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv118" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5m6_leaky_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5m6_leaky_nms_config.json new file mode 100644 index 00000000..b7dc1c6c --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5m6_leaky_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder81", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv81" + }, + { + "name": "bbox_decoder90", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv90" + }, + { + "name": "bbox_decoder99", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv99" + }, + { + "name": "bbox_decoder107", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv107" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5m6_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5m6_nms_config.json new file mode 100644 index 00000000..b7dc1c6c --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5m6_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder81", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv81" + }, + { + "name": "bbox_decoder90", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv90" + }, + { + "name": "bbox_decoder99", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv99" + }, + { + "name": "bbox_decoder107", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv107" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5m_6.1_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5m_6.1_nms_config.json new file mode 100644 index 00000000..ea30938d --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5m_6.1_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder65", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv65" + }, + { + "name": "bbox_decoder74", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv74" + }, + { + "name": "bbox_decoder82", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv82" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5m_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5m_nms_config.json new file mode 100644 index 00000000..1ca7ad89 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5m_nms_config.json @@ -0,0 +1,55 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [640, 640], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder74", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv74" + }, + { + "name": "bbox_decoder84", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv84" + }, + { + "name": "bbox_decoder93", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv93" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5m_spp_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5m_spp_nms_config.json new file mode 100644 index 00000000..215eaf98 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5m_spp_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder75", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv75" + }, + { + "name": "bbox_decoder85", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv85" + }, + { + "name": "bbox_decoder94", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv94" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5m_vehicles_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5m_vehicles_nms_config.json new file mode 100644 index 00000000..135e032e --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5m_vehicles_nms_config.json @@ -0,0 +1,55 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [640, 640], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 1, + "bbox_decoders": [ + { + "name": "bbox_decoder75", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv75" + }, + { + "name": "bbox_decoder85", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv85" + }, + { + "name": "bbox_decoder94", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv94" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5n6_6.1_leaky_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5n6_6.1_leaky_nms_config.json new file mode 100644 index 00000000..82266dde --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5n6_6.1_leaky_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder59", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv59" + }, + { + "name": "bbox_decoder66", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv66" + }, + { + "name": "bbox_decoder73", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv73" + }, + { + "name": "bbox_decoder79", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv79" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5n6_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5n6_nms_config.json new file mode 100644 index 00000000..82266dde --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5n6_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder59", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv59" + }, + { + "name": "bbox_decoder66", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv66" + }, + { + "name": "bbox_decoder73", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv73" + }, + { + "name": "bbox_decoder79", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv79" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5n_6.1_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5n_6.1_nms_config.json new file mode 100644 index 00000000..1fef13cc --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5n_6.1_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder47", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv47" + }, + { + "name": "bbox_decoder54", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv54" + }, + { + "name": "bbox_decoder60", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv60" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5n_seg_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5n_seg_nms_config.json new file mode 100644 index 00000000..252b54a3 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5n_seg_nms_config.json @@ -0,0 +1,66 @@ +{ + "nms_scores_th": 0.6, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "background_removal": false, + "background_removal_index": 0, + "max_total_proposals": 100, + "bbox_decoders": [ + { + "name": "bbox_decoder_48", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv48" + }, + { + "name": "bbox_decoder_55", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv55" + }, + { + "name": "bbox_decoder_61", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv61" + } + ], + "proto": [ + { + "number": 32, + "stride": 4 + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s6_leaky_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s6_leaky_nms_config.json new file mode 100644 index 00000000..82266dde --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s6_leaky_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder59", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv59" + }, + { + "name": "bbox_decoder66", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv66" + }, + { + "name": "bbox_decoder73", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv73" + }, + { + "name": "bbox_decoder79", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv79" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s6_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s6_nms_config.json new file mode 100644 index 00000000..82266dde --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s6_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder59", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv59" + }, + { + "name": "bbox_decoder66", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv66" + }, + { + "name": "bbox_decoder73", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv73" + }, + { + "name": "bbox_decoder79", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv79" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s_6.0_leaky_focus_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s_6.0_leaky_focus_nms_config.json new file mode 100644 index 00000000..1fef13cc --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s_6.0_leaky_focus_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder47", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv47" + }, + { + "name": "bbox_decoder54", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv54" + }, + { + "name": "bbox_decoder60", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv60" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s_6.1_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s_6.1_nms_config.json new file mode 100644 index 00000000..1fef13cc --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s_6.1_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder47", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv47" + }, + { + "name": "bbox_decoder54", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv54" + }, + { + "name": "bbox_decoder60", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv60" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s_c3tr_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s_c3tr_nms_config.json new file mode 100644 index 00000000..3c70cea5 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s_c3tr_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder57", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv57" + }, + { + "name": "bbox_decoder64", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv64" + }, + { + "name": "bbox_decoder70", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv70" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s_nms_config.json new file mode 100644 index 00000000..05b78c74 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder55", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv55" + }, + { + "name": "bbox_decoder63", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv63" + }, + { + "name": "bbox_decoder70", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv70" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s_personface.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s_personface.json new file mode 100644 index 00000000..aefba630 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s_personface.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 2, + "bbox_decoders": [ + { + "name": "bbox_decoder55", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv55" + }, + { + "name": "bbox_decoder63", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv63" + }, + { + "name": "bbox_decoder70", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv70" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5s_vehicles_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5s_vehicles_nms_config.json new file mode 100644 index 00000000..35d81f07 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5s_vehicles_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 1, + "bbox_decoders": [ + { + "name": "bbox_decoder55", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv55" + }, + { + "name": "bbox_decoder63", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv63" + }, + { + "name": "bbox_decoder70", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv70" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5x_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5x_nms_config.json new file mode 100644 index 00000000..3f2e9103 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5x_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder115", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv115" + }, + { + "name": "bbox_decoder129", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv129" + }, + { + "name": "bbox_decoder142", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv142" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5xs_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_nms_config.json new file mode 100644 index 00000000..992af69e --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 512, + 512 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder53", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv53" + }, + { + "name": "bbox_decoder61", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv61" + }, + { + "name": "bbox_decoder69", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv69" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config.json new file mode 100644 index 00000000..df64fe0d --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config.json @@ -0,0 +1,56 @@ +{ + "nms_scores_th": 0.01, + "nms_iou_th": 1.0, + "image_dims": [512, 512], + "max_proposals_per_class": 80, + "background_removal": false, + "input_division_factor": 8, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder53", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv53" + }, + { + "name": "bbox_decoder61", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv61" + }, + { + "name": "bbox_decoder69", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv69" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_10class.json b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_10class.json new file mode 100644 index 00000000..f3267fd8 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_10class.json @@ -0,0 +1,56 @@ +{ + "nms_scores_th": 0.01, + "nms_iou_th": 1.0, + "image_dims": [512, 512], + "max_proposals_per_class": 80, + "background_removal": false, + "input_division_factor": 8, + "classes": 10, + "bbox_decoders": [ + { + "name": "bbox_decoder53", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv53" + }, + { + "name": "bbox_decoder61", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv61" + }, + { + "name": "bbox_decoder69", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv69" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_1class.json b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_1class.json new file mode 100644 index 00000000..fdef9025 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_1class.json @@ -0,0 +1,56 @@ +{ + "nms_scores_th": 0.01, + "nms_iou_th": 1.0, + "image_dims": [512, 512], + "max_proposals_per_class": 80, + "background_removal": false, + "input_division_factor": 8, + "classes": 1, + "bbox_decoders": [ + { + "name": "bbox_decoder53", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv53" + }, + { + "name": "bbox_decoder61", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv61" + }, + { + "name": "bbox_decoder69", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv69" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_20class.json b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_20class.json new file mode 100644 index 00000000..7332104f --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_20class.json @@ -0,0 +1,56 @@ +{ + "nms_scores_th": 0.01, + "nms_iou_th": 1.0, + "image_dims": [512, 512], + "max_proposals_per_class": 80, + "background_removal": false, + "input_division_factor": 8, + "classes": 20, + "bbox_decoders": [ + { + "name": "bbox_decoder53", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv53" + }, + { + "name": "bbox_decoder61", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv61" + }, + { + "name": "bbox_decoder69", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv69" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_5class.json b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_5class.json new file mode 100644 index 00000000..acce5c26 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov5xs_wo_spp_nms_config_5class.json @@ -0,0 +1,56 @@ +{ + "nms_scores_th": 0.01, + "nms_iou_th": 1.0, + "image_dims": [512, 512], + "max_proposals_per_class": 80, + "background_removal": false, + "input_division_factor": 8, + "classes": 5, + "bbox_decoders": [ + { + "name": "bbox_decoder53", + "w": [ + 10, + 16, + 33 + ], + "h": [ + 13, + 30, + 23 + ], + "stride": 8, + "encoded_layer": "conv53" + }, + { + "name": "bbox_decoder61", + "w": [ + 30, + 62, + 59 + ], + "h": [ + 61, + 45, + 119 + ], + "stride": 16, + "encoded_layer": "conv61" + }, + { + "name": "bbox_decoder69", + "w": [ + 116, + 156, + 373 + ], + "h": [ + 90, + 198, + 326 + ], + "stride": 32, + "encoded_layer": "conv69" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov7_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov7_nms_config.json new file mode 100644 index 00000000..69ab64e3 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov7_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder70", + "w": [ + 12, + 19, + 40 + ], + "h": [ + 16, + 36, + 28 + ], + "stride": 8, + "encoded_layer": "conv70" + }, + { + "name": "bbox_decoder82", + "w": [ + 36, + 76, + 72 + ], + "h": [ + 75, + 55, + 146 + ], + "stride": 16, + "encoded_layer": "conv82" + }, + { + "name": "bbox_decoder92", + "w": [ + 142, + 192, + 459 + ], + "h": [ + 110, + 243, + 401 + ], + "stride": 32, + "encoded_layer": "conv92" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov7_tiny_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov7_tiny_nms_config.json new file mode 100644 index 00000000..0ed573cc --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov7_tiny_nms_config.json @@ -0,0 +1,58 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder43", + "w": [ + 12, + 19, + 40 + ], + "h": [ + 16, + 36, + 28 + ], + "stride": 8, + "encoded_layer": "conv43" + }, + { + "name": "bbox_decoder51", + "w": [ + 36, + 76, + 72 + ], + "h": [ + 75, + 55, + 146 + ], + "stride": 16, + "encoded_layer": "conv51" + }, + { + "name": "bbox_decoder58", + "w": [ + 142, + 192, + 459 + ], + "h": [ + 110, + 243, + 401 + ], + "stride": 32, + "encoded_layer": "conv58" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov7e6_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov7e6_nms_config.json new file mode 100644 index 00000000..6fb0590b --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov7e6_nms_config.json @@ -0,0 +1,73 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.6, + "image_dims": [ + 1280, + 1280 + ], + "max_proposals_per_class": 80, + "background_removal": false, + "classes": 80, + "bbox_decoders": [ + { + "name": "bbox_decoder105", + "w": [ + 19, + 44, + 38 + ], + "h": [ + 27, + 40, + 94 + ], + "stride": 8, + "encoded_layer": "conv105" + }, + { + "name": "bbox_decoder119", + "w": [ + 96, + 86, + 180 + ], + "h": [ + 68, + 152, + 137 + ], + "stride": 16, + "encoded_layer": "conv119" + }, + { + "name": "bbox_decoder133", + "w": [ + 140, + 303, + 238 + ], + "h": [ + 301, + 264, + 542 + ], + "stride": 32, + "encoded_layer": "conv133" + }, + { + "name": "bbox_decoder145", + "w": [ + 436, + 739, + 925 + ], + "h": [ + 615, + 380, + 792 + ], + "stride": 64, + "encoded_layer": "conv145" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov8l_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov8l_nms_config.json new file mode 100644 index 00000000..24542a3a --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov8l_nms_config.json @@ -0,0 +1,33 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.7, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 16, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "yolov8l/bbox_decoder73", + "stride": 8, + "reg_layer": "yolov8l/conv73", + "cls_layer": "yolov8l/conv74" + }, + { + "name": "yolov8l/bbox_decoder88", + "stride": 16, + "reg_layer": "yolov8l/conv88", + "cls_layer": "yolov8l/conv89" + }, + { + "name": "yolov8l/bbox_decoder100", + "stride": 32, + "reg_layer": "yolov8l/conv100", + "cls_layer": "yolov8l/conv103" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov8m_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov8m_nms_config.json new file mode 100644 index 00000000..639a4fbb --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov8m_nms_config.json @@ -0,0 +1,33 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.7, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 16, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "yolov8m/bbox_decoder57", + "stride": 8, + "reg_layer": "yolov8m/conv57", + "cls_layer": "yolov8m/conv58" + }, + { + "name": "yolov8m/bbox_decoder70", + "stride": 16, + "reg_layer": "yolov8m/conv70", + "cls_layer": "yolov8m/conv71" + }, + { + "name": "yolov8m/bbox_decoder82", + "stride": 32, + "reg_layer": "yolov8m/conv82", + "cls_layer": "yolov8m/conv83" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov8n_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov8n_nms_config.json new file mode 100644 index 00000000..4a9794e5 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov8n_nms_config.json @@ -0,0 +1,32 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.7, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 16, + "background_removal": false, + "bbox_decoders": [ + { + "name": "yolov8n/bbox_decoder41", + "stride": 8, + "reg_layer": "yolov8n/conv41", + "cls_layer": "yolov8n/conv42" + }, + { + "name": "yolov8n/bbox_decoder52", + "stride": 16, + "reg_layer": "yolov8n/conv52", + "cls_layer": "yolov8n/conv53" + }, + { + "name": "yolov8n/bbox_decoder62", + "stride": 32, + "reg_layer": "yolov8n/conv62", + "cls_layer": "yolov8n/conv63" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov8s_bbox_decoding_only_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov8s_bbox_decoding_only_nms_config.json new file mode 100644 index 00000000..b802e63e --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov8s_bbox_decoding_only_nms_config.json @@ -0,0 +1,33 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.7, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 16, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "yolov8s/bbox_decoder41", + "stride": 8, + "reg_layer": "", + "cls_layer": "" + }, + { + "name": "yolov8s/bbox_decoder52", + "stride": 16, + "reg_layer": "", + "cls_layer": "" + }, + { + "name": "yolov8s/bbox_decoder62", + "stride": 32, + "reg_layer": "", + "cls_layer": "" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov8s_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov8s_nms_config.json new file mode 100644 index 00000000..7b1dfc6c --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov8s_nms_config.json @@ -0,0 +1,33 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.7, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 16, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "yolov8s/bbox_decoder41", + "stride": 8, + "reg_layer": "yolov8s/conv41", + "cls_layer": "yolov8s/conv42" + }, + { + "name": "yolov8s/bbox_decoder52", + "stride": 16, + "reg_layer": "yolov8s/conv52", + "cls_layer": "yolov8s/conv53" + }, + { + "name": "yolov8s/bbox_decoder62", + "stride": 32, + "reg_layer": "yolov8s/conv62", + "cls_layer": "yolov8s/conv63" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolov8x_nms_config.json b/hailo_model_zoo/cfg/postprocess_config/yolov8x_nms_config.json new file mode 100644 index 00000000..3235a431 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolov8x_nms_config.json @@ -0,0 +1,33 @@ +{ + "nms_scores_th": 0.2, + "nms_iou_th": 0.7, + "image_dims": [ + 640, + 640 + ], + "max_proposals_per_class": 100, + "classes": 80, + "regression_length": 16, + "background_removal": false, + "background_removal_index": 0, + "bbox_decoders": [ + { + "name": "yolov8x/bbox_decoder73", + "stride": 8, + "reg_layer": "yolov8x/conv73", + "cls_layer": "yolov8x/conv74" + }, + { + "name": "yolov8x/bbox_decoder88", + "stride": 16, + "reg_layer": "yolov8x/conv88", + "cls_layer": "yolov8x/conv89" + }, + { + "name": "yolov8x/bbox_decoder100", + "stride": 32, + "reg_layer": "yolov8x/conv100", + "cls_layer": "yolov8x/conv103" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/cfg/postprocess_config/yolox_hailo_nms.json b/hailo_model_zoo/cfg/postprocess_config/yolox_hailo_nms.json new file mode 100644 index 00000000..e27b4ed2 --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolox_hailo_nms.json @@ -0,0 +1,30 @@ +{ + "nms_scores_th": 0.03, + "nms_iou_th": 0.65, + "image_dims": [ + 736, + 960 + ], + "max_proposals_per_class": 100, + "classes": 6, + "bbox_decoders": [ + { + "name": "fused_bbox_decoder_8", + "stride": 8, + "reg_layer":"", + "cls_layer":"" + }, + { + "name": "fused_bbox_decoder_16", + "stride": 16, + "reg_layer":"", + "cls_layer":"" + }, + { + "name": "fused_bbox_decoder_32", + "stride": 32, + "reg_layer":"", + "cls_layer":"" + } + ] +} diff --git a/hailo_model_zoo/cfg/postprocess_config/yolox_hailo_pas_nms_4cls_pas20k.json b/hailo_model_zoo/cfg/postprocess_config/yolox_hailo_pas_nms_4cls_pas20k.json new file mode 100644 index 00000000..73ef30fb --- /dev/null +++ b/hailo_model_zoo/cfg/postprocess_config/yolox_hailo_pas_nms_4cls_pas20k.json @@ -0,0 +1,30 @@ +{ + "nms_scores_th": 0.03, + "nms_iou_th": 0.65, + "image_dims": [ + 736, + 960 + ], + "max_proposals_per_class": 67, + "classes": 4, + "bbox_decoders": [ + { + "name": "fused_bbox_decoder_8", + "stride": 8, + "reg_layer": "conv36", + "cls_layer": "conv35" + }, + { + "name": "fused_bbox_decoder_16", + "stride": 16, + "reg_layer": "conv46", + "cls_layer": "conv45" + }, + { + "name": "fused_bbox_decoder_32", + "stride": 32, + "reg_layer": "conv54", + "cls_layer": "conv53" + } + ] +} \ No newline at end of file diff --git a/hailo_model_zoo/core/datasets/dataset_factory.py b/hailo_model_zoo/core/datasets/dataset_factory.py index 8346152d..b37c43ee 100644 --- a/hailo_model_zoo/core/datasets/dataset_factory.py +++ b/hailo_model_zoo/core/datasets/dataset_factory.py @@ -1,9 +1,14 @@ -from hailo_model_zoo.core.datasets import (parse_imagenet, parse_coco, parse_facenet, parse_afw, parse_kitti_depth, - parse_widerface, parse_utkfaces, parse_mot, parse_tusimple, parse_landmarks, - parse_div2k, parse_pascal, parse_kitti_3d, parse_aflw2k3d, - parse_aflw2k3d_tddfa, parse_nyu_depth_v2, parse_300w_lp_tddfa, - parse_lp_ocr, parse_market, parse_peta, parse_bsd100, parse_cifar, parse_lol, - parse_kitti_stereo, parse_bsd68, parse_gustavosta_prompts) +"""Contains a factory for network infer.""" +import importlib + +import hailo_model_zoo.core.datasets +from hailo_model_zoo.core.factory import DATASET_FACTORY +from hailo_model_zoo.utils.plugin_utils import iter_namespace + +discovered_plugins = { + name: importlib.import_module(name) + for _, name, _ in iter_namespace(hailo_model_zoo.core.datasets) +} def get_dataset_parse_func(ds_name): @@ -12,50 +17,4 @@ def get_dataset_parse_func(ds_name): image: image tensor image_info: dictionary that contains other information of the image (e.g., the label) """ - return { - 'hand_detection': parse_landmarks.parse_hand_record, - 'hand_landmark': parse_landmarks.parse_hand_record, - 'imagenet': parse_imagenet.parse_record, - 'coco_segmentation': parse_coco.parse_segmentation_record, - 'cityscapes': parse_coco.parse_segmentation_record, - 'oxford_pet': parse_coco.parse_segmentation_record, - 'facenet': parse_facenet.parse_facenet_record, - 'face_landmarks': parse_landmarks.parse_record, - 'kitti_depth': parse_kitti_depth.parse_record, - 'kitti_3d': parse_kitti_3d.parse_record, - 'kitti_stereo': parse_kitti_stereo.parse_record, - 'coco_detection': parse_coco.parse_detection_record, - 'open_images': parse_coco.parse_detection_record, - 'visdrone_detection': parse_coco.parse_detection_record, - 'd2s_detection': parse_coco.parse_detection_record, - 'd2s_fruits_detection': parse_coco.parse_detection_record, - 'coco_2017_detection': parse_coco.parse_detection_record, - 'cocopose': parse_coco.parse_pose_estimation_record, - 'cocopose_single_person': parse_coco.parse_single_person_pose_estimation_record, - 'coco_cityscapes_combined': parse_coco.parse_combined_pas_record, - 'afw': parse_afw.parse_record, - 'widerface': parse_widerface.parse_detection_record, - 'utkfaces': parse_utkfaces.parse_age_gender_record, - 'mot16': parse_mot.parse_mot_record, - 'tusimple': parse_tusimple.parse, - 'div2k': parse_div2k.parse_record, - 'lol': parse_lol.parse_record, - 'pascal': parse_pascal.parse_record, - 'aflw2k3d': parse_aflw2k3d.parse_record, - 'aflw2k3d_tddfa': parse_aflw2k3d_tddfa.parse_record, - 'nyu_depth_v2': parse_nyu_depth_v2.parse_record, - 'vehicle_detection': parse_coco.parse_detection_record, - '300w-lp_tddfa': parse_300w_lp_tddfa.parse_record, - 'license_plates': parse_coco.parse_detection_record, - 'lp_ocr': parse_lp_ocr.parse_lp_ocr_record, - 'market1501': parse_market.parse_market_record, - 'personface_detection': parse_coco.parse_detection_record, - 'peta': parse_peta.parse_classification_record, - 'celeba': parse_peta.parse_classification_record, - 'cifar100': parse_cifar.parse_cifar100_record, - 'bsd100': parse_bsd100.parse_record, - 'bsd68': parse_bsd68.parse_record, - 'cbsd68': parse_bsd68.parse_record, - 'gustavosta_prompts_vae': parse_gustavosta_prompts.vae, - 'gustavosta_prompts_unet': parse_gustavosta_prompts.unet, - }[ds_name] + return DATASET_FACTORY.get(ds_name) diff --git a/hailo_model_zoo/core/datasets/parse_300w_lp_tddfa.py b/hailo_model_zoo/core/datasets/parse_300w_lp_tddfa.py index 730eec93..9f70a57d 100644 --- a/hailo_model_zoo/core/datasets/parse_300w_lp_tddfa.py +++ b/hailo_model_zoo/core/datasets/parse_300w_lp_tddfa.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="300w-lp_tddfa") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_aflw2k3d.py b/hailo_model_zoo/core/datasets/parse_aflw2k3d.py index 5c9c919e..48f905ab 100644 --- a/hailo_model_zoo/core/datasets/parse_aflw2k3d.py +++ b/hailo_model_zoo/core/datasets/parse_aflw2k3d.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="aflw2k3d") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_aflw2k3d_tddfa.py b/hailo_model_zoo/core/datasets/parse_aflw2k3d_tddfa.py index bd192606..e2b4f1d0 100644 --- a/hailo_model_zoo/core/datasets/parse_aflw2k3d_tddfa.py +++ b/hailo_model_zoo/core/datasets/parse_aflw2k3d_tddfa.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="aflw2k3d_tddfa") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_afw.py b/hailo_model_zoo/core/datasets/parse_afw.py index c6db33df..eac01618 100644 --- a/hailo_model_zoo/core/datasets/parse_afw.py +++ b/hailo_model_zoo/core/datasets/parse_afw.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="afw") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_bsd100.py b/hailo_model_zoo/core/datasets/parse_bsd100.py index ece286ba..cd6158c3 100644 --- a/hailo_model_zoo/core/datasets/parse_bsd100.py +++ b/hailo_model_zoo/core/datasets/parse_bsd100.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="bsd100") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_bsd68.py b/hailo_model_zoo/core/datasets/parse_bsd68.py index 365f89ef..d280290b 100644 --- a/hailo_model_zoo/core/datasets/parse_bsd68.py +++ b/hailo_model_zoo/core/datasets/parse_bsd68.py @@ -1,6 +1,10 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="bsd68") +@DATASET_FACTORY.register(name="cbsd68") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_cifar.py b/hailo_model_zoo/core/datasets/parse_cifar.py index 2002e1af..c30bee7c 100644 --- a/hailo_model_zoo/core/datasets/parse_cifar.py +++ b/hailo_model_zoo/core/datasets/parse_cifar.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="cifar100") def parse_cifar100_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_coco.py b/hailo_model_zoo/core/datasets/parse_coco.py index 769b83a5..6b781a31 100644 --- a/hailo_model_zoo/core/datasets/parse_coco.py +++ b/hailo_model_zoo/core/datasets/parse_coco.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="cocopose_single_person") def parse_single_person_pose_estimation_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ @@ -38,6 +41,7 @@ def parse_single_person_pose_estimation_record(serialized_example): return [image, image_info] +@DATASET_FACTORY.register(name="cocopose") def parse_pose_estimation_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ @@ -62,6 +66,9 @@ def parse_pose_estimation_record(serialized_example): return [image, image_info] +@DATASET_FACTORY.register(name="coco_segmentation") +@DATASET_FACTORY.register(name="cityscapes") +@DATASET_FACTORY.register(name="oxford_pet") def parse_segmentation_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ @@ -93,6 +100,15 @@ def parse_segmentation_record(serialized_example): return [image, image_info] +@DATASET_FACTORY.register(name="coco_detection") +@DATASET_FACTORY.register(name="open_images") +@DATASET_FACTORY.register(name="visdrone_detection") +@DATASET_FACTORY.register(name="d2s_detection") +@DATASET_FACTORY.register(name="d2s_fruits_detection") +@DATASET_FACTORY.register(name="coco_2017_detection") +@DATASET_FACTORY.register(name="vehicle_detection") +@DATASET_FACTORY.register(name="license_plates") +@DATASET_FACTORY.register(name="personface_detection") def parse_detection_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ @@ -136,28 +152,3 @@ def parse_detection_record(serialized_example): image_info['category_id'] = tf.sparse.to_dense(features['category_id'], default_value=0) return [image, image_info] - - -def parse_combined_pas_record(serialized_example): - """Parse serialized example of TfRecord and extract dictionary of all the information - """ - features = tf.io.parse_single_example( - serialized_example, - features={ - 'height': tf.io.FixedLenFeature([], tf.int64), - 'width': tf.io.FixedLenFeature([], tf.int64), - 'image_name': tf.io.FixedLenFeature([], tf.string), - 'image_jpeg': tf.io.FixedLenFeature([], tf.string), - }) - height = tf.cast(features['height'], tf.int32) - width = tf.cast(features['width'], tf.int32) - image_name = tf.cast(features['image_name'], tf.string) - image = tf.image.decode_jpeg(features['image_jpeg'], channels=3) - image_shape = tf.stack([height, width, 3]) - image = tf.cast(tf.reshape(image, image_shape), tf.uint8) - - image_info = {'image_name': image_name} - image_info['height'] = height - image_info['width'] = width - - return [image, image_info] diff --git a/hailo_model_zoo/core/datasets/parse_div2k.py b/hailo_model_zoo/core/datasets/parse_div2k.py index 7fcee7e6..45f1fea0 100644 --- a/hailo_model_zoo/core/datasets/parse_div2k.py +++ b/hailo_model_zoo/core/datasets/parse_div2k.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="div2k") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_facenet.py b/hailo_model_zoo/core/datasets/parse_facenet.py index 6b6b3752..5af3ad7a 100644 --- a/hailo_model_zoo/core/datasets/parse_facenet.py +++ b/hailo_model_zoo/core/datasets/parse_facenet.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="facenet") def parse_facenet_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_gustavosta_prompts.py b/hailo_model_zoo/core/datasets/parse_gustavosta_prompts.py index 6c983f43..beb9b35a 100644 --- a/hailo_model_zoo/core/datasets/parse_gustavosta_prompts.py +++ b/hailo_model_zoo/core/datasets/parse_gustavosta_prompts.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="gustavosta_prompts_vae") def vae(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ @@ -26,6 +29,7 @@ def vae(serialized_example): return [vae_input, image_info] +@DATASET_FACTORY.register(name="gustavosta_prompts_unet") def unet(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_imagenet.py b/hailo_model_zoo/core/datasets/parse_imagenet.py index 6ae15b77..a684f800 100644 --- a/hailo_model_zoo/core/datasets/parse_imagenet.py +++ b/hailo_model_zoo/core/datasets/parse_imagenet.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="imagenet") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_kitti_3d.py b/hailo_model_zoo/core/datasets/parse_kitti_3d.py index 1dac751a..189fd74a 100644 --- a/hailo_model_zoo/core/datasets/parse_kitti_3d.py +++ b/hailo_model_zoo/core/datasets/parse_kitti_3d.py @@ -1,10 +1,12 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY DEPTH_WIDTH = 1242 DEPTH_HEIGHT = 375 +@DATASET_FACTORY.register(name="kitti_3d") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_kitti_depth.py b/hailo_model_zoo/core/datasets/parse_kitti_depth.py index 93601275..1eece5b0 100644 --- a/hailo_model_zoo/core/datasets/parse_kitti_depth.py +++ b/hailo_model_zoo/core/datasets/parse_kitti_depth.py @@ -1,10 +1,12 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY DEPTH_WIDTH = 1242 DEPTH_HEIGHT = 375 +@DATASET_FACTORY.register(name="kitti_depth") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_kitti_stereo.py b/hailo_model_zoo/core/datasets/parse_kitti_stereo.py index 39bcf379..91908165 100644 --- a/hailo_model_zoo/core/datasets/parse_kitti_stereo.py +++ b/hailo_model_zoo/core/datasets/parse_kitti_stereo.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="kitti_stereo") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_landmarks.py b/hailo_model_zoo/core/datasets/parse_landmarks.py index 6b1c3b93..ae2111cb 100644 --- a/hailo_model_zoo/core/datasets/parse_landmarks.py +++ b/hailo_model_zoo/core/datasets/parse_landmarks.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="face_landmarks") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ @@ -27,6 +30,8 @@ def parse_record(serialized_example): return [image, image_info] +@DATASET_FACTORY.register(name="hand_detection") +@DATASET_FACTORY.register(name="hand_landmark") def parse_hand_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_lol.py b/hailo_model_zoo/core/datasets/parse_lol.py index 06719035..461d0d3c 100644 --- a/hailo_model_zoo/core/datasets/parse_lol.py +++ b/hailo_model_zoo/core/datasets/parse_lol.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="lol") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_lp_ocr.py b/hailo_model_zoo/core/datasets/parse_lp_ocr.py index 811b60b8..e7e724a9 100644 --- a/hailo_model_zoo/core/datasets/parse_lp_ocr.py +++ b/hailo_model_zoo/core/datasets/parse_lp_ocr.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="lp_ocr") def parse_lp_ocr_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_market.py b/hailo_model_zoo/core/datasets/parse_market.py index e759b65c..c5d62bd7 100644 --- a/hailo_model_zoo/core/datasets/parse_market.py +++ b/hailo_model_zoo/core/datasets/parse_market.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="market1501") def parse_market_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_mot.py b/hailo_model_zoo/core/datasets/parse_mot.py index a758accc..b9b383b3 100644 --- a/hailo_model_zoo/core/datasets/parse_mot.py +++ b/hailo_model_zoo/core/datasets/parse_mot.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="mot16") def parse_mot_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_nyu_depth_v2.py b/hailo_model_zoo/core/datasets/parse_nyu_depth_v2.py index d2c5e708..00fd8782 100644 --- a/hailo_model_zoo/core/datasets/parse_nyu_depth_v2.py +++ b/hailo_model_zoo/core/datasets/parse_nyu_depth_v2.py @@ -1,10 +1,12 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY DEPTH_WIDTH = 640 DEPTH_HEIGHT = 480 +@DATASET_FACTORY.register(name="nyu_depth_v2") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_pascal.py b/hailo_model_zoo/core/datasets/parse_pascal.py index ea738d68..0abbd9be 100644 --- a/hailo_model_zoo/core/datasets/parse_pascal.py +++ b/hailo_model_zoo/core/datasets/parse_pascal.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="pascal") def parse_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_peta.py b/hailo_model_zoo/core/datasets/parse_peta.py index 26e26b8b..8db436dd 100644 --- a/hailo_model_zoo/core/datasets/parse_peta.py +++ b/hailo_model_zoo/core/datasets/parse_peta.py @@ -1,6 +1,10 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="peta") +@DATASET_FACTORY.register(name="celeba") def parse_classification_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_tusimple.py b/hailo_model_zoo/core/datasets/parse_tusimple.py index ab9b7b1c..b36ec3d1 100644 --- a/hailo_model_zoo/core/datasets/parse_tusimple.py +++ b/hailo_model_zoo/core/datasets/parse_tusimple.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="tusimple") def parse(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/datasets/parse_utkfaces.py b/hailo_model_zoo/core/datasets/parse_utkfaces.py index 5c5302a2..6281a4ef 100644 --- a/hailo_model_zoo/core/datasets/parse_utkfaces.py +++ b/hailo_model_zoo/core/datasets/parse_utkfaces.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="utkfaces") def parse_age_gender_record(serialized_example): """ Parse serialized example of TfRecord and extract dictionary of all the information diff --git a/hailo_model_zoo/core/datasets/parse_widerface.py b/hailo_model_zoo/core/datasets/parse_widerface.py index 719fb31a..6d1acf06 100644 --- a/hailo_model_zoo/core/datasets/parse_widerface.py +++ b/hailo_model_zoo/core/datasets/parse_widerface.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import DATASET_FACTORY + +@DATASET_FACTORY.register(name="widerface") def parse_detection_record(serialized_example): """Parse serialized example of TfRecord and extract dictionary of all the information """ diff --git a/hailo_model_zoo/core/eval/age_gender_evaluation.py b/hailo_model_zoo/core/eval/age_gender_evaluation.py index 0795dd41..fa383783 100644 --- a/hailo_model_zoo/core/eval/age_gender_evaluation.py +++ b/hailo_model_zoo/core/eval/age_gender_evaluation.py @@ -1,6 +1,7 @@ from collections import OrderedDict from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY ACCEPTED_AGE_DELTA = 5 ADIENCE_AGE_LIST = [3.0, 7.0, 13.5, 22.5, 35.0, 45.5, 56.5] @@ -13,6 +14,7 @@ def _get_age_range(age): return len(ADIENCE_AGE_LIST) +@EVAL_FACTORY.register(name="age_gender") class AgeGenderEval(Eval): """ Age/Gender estimation evaluation metrics class. diff --git a/hailo_model_zoo/core/eval/classification_evaluation.py b/hailo_model_zoo/core/eval/classification_evaluation.py index c4d8f3e7..5e06ee7e 100644 --- a/hailo_model_zoo/core/eval/classification_evaluation.py +++ b/hailo_model_zoo/core/eval/classification_evaluation.py @@ -1,9 +1,13 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="zero_shot_classification") +@EVAL_FACTORY.register(name="classification") class ClassificationEval(Eval): def __init__(self, **kwargs): self._metric_names = ['top1', 'top5'] diff --git a/hailo_model_zoo/core/eval/depth_estimation_evaluation.py b/hailo_model_zoo/core/eval/depth_estimation_evaluation.py index d487a84b..d2a2a920 100644 --- a/hailo_model_zoo/core/eval/depth_estimation_evaluation.py +++ b/hailo_model_zoo/core/eval/depth_estimation_evaluation.py @@ -1,7 +1,10 @@ from collections import OrderedDict + +import cv2 import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval -import cv2 +from hailo_model_zoo.core.factory import EVAL_FACTORY DATASETS_INFO = { 'nyu_depth_v2': { @@ -18,6 +21,7 @@ } +@EVAL_FACTORY.register(name="depth_estimation") class DepthEstimationEval(Eval): """ DepthEstimationEval is a class to evaluate depth estimation models. diff --git a/hailo_model_zoo/core/eval/detection_3d_evaluation.py b/hailo_model_zoo/core/eval/detection_3d_evaluation.py index 5806141a..704e4e93 100644 --- a/hailo_model_zoo/core/eval/detection_3d_evaluation.py +++ b/hailo_model_zoo/core/eval/detection_3d_evaluation.py @@ -2,8 +2,10 @@ from hailo_model_zoo.core.eval.eval_base_class import Eval from hailo_model_zoo.core.eval.kitti_eval import kitti_evaluation +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="3d_detection") class Detection3DEval(Eval): def __init__(self, **kwargs): self._metric_names = ['car_bev_AP_e', 'car_bev_AP_m', 'car_bev_AP_h', diff --git a/hailo_model_zoo/core/eval/detection_evaluation.py b/hailo_model_zoo/core/eval/detection_evaluation.py index 108de641..8d07cbd9 100644 --- a/hailo_model_zoo/core/eval/detection_evaluation.py +++ b/hailo_model_zoo/core/eval/detection_evaluation.py @@ -1,12 +1,14 @@ -import numpy as np from collections import OrderedDict +import numpy as np from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="detection") class DetectionEval(Eval): """COCO evaluation metric class.""" diff --git a/hailo_model_zoo/core/eval/eval_factory.py b/hailo_model_zoo/core/eval/eval_factory.py index b6f3bac4..1bedcb19 100644 --- a/hailo_model_zoo/core/eval/eval_factory.py +++ b/hailo_model_zoo/core/eval/eval_factory.py @@ -1,31 +1,19 @@ """Contains a factory for network evaluation.""" -from hailo_model_zoo.core.eval.age_gender_evaluation import AgeGenderEval -from hailo_model_zoo.core.eval.classification_evaluation import ClassificationEval -from hailo_model_zoo.core.eval.face_detection_evaluation import FaceDetectionEval -from hailo_model_zoo.core.eval.segmentation_evaluation import SegmentationEval -from hailo_model_zoo.core.eval.detection_evaluation import DetectionEval -from hailo_model_zoo.core.eval.face_verification_evaluation import FaceVerificationEval -from hailo_model_zoo.core.eval.pose_estimation_evaluation import PoseEstimationEval -from hailo_model_zoo.core.eval.instance_segmentation_evaluation import InstanceSegmentationEval -from hailo_model_zoo.core.eval.super_resolution_evaluation import SuperResolutionEval -from hailo_model_zoo.core.eval.low_light_enhancement_evaluation import LowLightEnhancementEval -from hailo_model_zoo.core.eval.srgan_evaluation import SRGANEval -from hailo_model_zoo.core.eval.head_pose_estimation_evaluation import HeadPoseEstimationEval -from hailo_model_zoo.core.eval.multiple_object_tracking_evaluation import MultipleObjectTrackingEval -from hailo_model_zoo.core.eval.lane_detection_evaluation import LaneDetectionEval -from hailo_model_zoo.core.eval.face_landmark_evaluation import FaceLandmarkEval, FaceLandmark3DEval -from hailo_model_zoo.core.eval.detection_3d_evaluation import Detection3DEval -from hailo_model_zoo.core.eval.faster_rcnn_evaluation import FasterRCNNEval -from hailo_model_zoo.core.eval.ocr_evaluation import OCREval -from hailo_model_zoo.core.eval.person_reid_evaluation import PersonReidEval -from hailo_model_zoo.core.eval.person_attr_evaluation import PersonAttrEval -from hailo_model_zoo.core.eval.single_person_pose_estimation_evaluation import SinglePersonPoseEstimationEval -from hailo_model_zoo.core.eval.stereo_evaluation import StereoNetEval -from hailo_model_zoo.core.eval.image_denoising_evaluation import ImageDenoisingEval -from hailo_model_zoo.core.eval.depth_estimation_evaluation import DepthEstimationEval -from hailo_model_zoo.core.eval.image_generation_from_text_evaluation import ImagegenerationFromTextEval +import importlib +import hailo_model_zoo.core.eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +from hailo_model_zoo.utils.plugin_utils import iter_namespace +discovered_plugins = { + name: importlib.import_module(name) + for _, name, _ + in iter_namespace(hailo_model_zoo.core.eval) +} + + +@EVAL_FACTORY.register(name="landmark_detection") +@EVAL_FACTORY.register(name="empty") class EmptyEval(): def __init__(self, **kwargs): pass @@ -44,41 +32,5 @@ def get_evaluation(name): Raises: ValueError: If task `name` is not recognized. """ - evaluation_map = { - 'classification': ClassificationEval, - 'zero_shot_classification': ClassificationEval, - 'segmentation': SegmentationEval, - 'detection': DetectionEval, - 'pose_estimation': PoseEstimationEval, - 'face_verification': FaceVerificationEval, - 'instance_segmentation': InstanceSegmentationEval, - 'srgan': SRGANEval, - 'landmark_detection': EmptyEval, - 'face_landmark_detection': FaceLandmarkEval, - 'face_landmark_detection_3d': FaceLandmark3DEval, - 'head_pose_estimation': HeadPoseEstimationEval, - 'face_detection': FaceDetectionEval, - 'age_gender': AgeGenderEval, - 'multiple_object_tracking': MultipleObjectTrackingEval, - 'lane_detection': LaneDetectionEval, - '3d_detection': Detection3DEval, - 'empty': EmptyEval, - 'faster_rcnn_stage2': FasterRCNNEval, - 'ocr': OCREval, - 'person_reid': PersonReidEval, - 'person_attr': PersonAttrEval, - 'face_attr': PersonAttrEval, - 'single_person_pose_estimation': SinglePersonPoseEstimationEval, - 'super_resolution': SuperResolutionEval, - 'low_light_enhancement': LowLightEnhancementEval, - 'stereonet': StereoNetEval, - 'image_denoising': ImageDenoisingEval, - 'depth_estimation': DepthEstimationEval, - 'stable_diffusion_v2_decoder': ImagegenerationFromTextEval, - 'stable_diffusion_v2_unet': ImagegenerationFromTextEval, - } - - if name not in evaluation_map: - raise ValueError('Task name [{}] was not recognized'.format(name)) - return evaluation_map[name] + return EVAL_FACTORY.get(name) diff --git a/hailo_model_zoo/core/eval/face_detection_evaluation.py b/hailo_model_zoo/core/eval/face_detection_evaluation.py index a5650950..eea2ed7a 100644 --- a/hailo_model_zoo/core/eval/face_detection_evaluation.py +++ b/hailo_model_zoo/core/eval/face_detection_evaluation.py @@ -1,14 +1,17 @@ from collections import OrderedDict import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval from hailo_model_zoo.core.eval.widerface_evaluation_external.evaluation import ( - image_eval, img_pr_info, dataset_pr_info, voc_ap) + dataset_pr_info, image_eval, img_pr_info, voc_ap) +from hailo_model_zoo.core.factory import EVAL_FACTORY THRESH_NUM = 1000 IOU_THRESH = 0.5 +@EVAL_FACTORY.register(name="face_detection") class FaceDetectionEval(Eval): """ Widerface evaluation metric class. diff --git a/hailo_model_zoo/core/eval/face_landmark_evaluation.py b/hailo_model_zoo/core/eval/face_landmark_evaluation.py index 1fa8dbb7..7768d073 100644 --- a/hailo_model_zoo/core/eval/face_landmark_evaluation.py +++ b/hailo_model_zoo/core/eval/face_landmark_evaluation.py @@ -1,9 +1,12 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="face_landmark_detection") class FaceLandmarkEval(Eval): def __init__(self, **kwargs): self._metric_names = ['mse'] @@ -30,6 +33,7 @@ def _get_accuracy(self): return OrderedDict([(self._metric_names[0], self._metrics_vals[0])]) +@EVAL_FACTORY.register(name="face_landmark_detection_3d") class FaceLandmark3DEval(Eval): def __init__(self, **kwargs): self._metric_names = ['nme[0,90]', 'nme[0,30]', 'nme[30,60]', 'nme[60,90]'] diff --git a/hailo_model_zoo/core/eval/face_verification_evaluation.py b/hailo_model_zoo/core/eval/face_verification_evaluation.py index 0a238981..152c65c5 100644 --- a/hailo_model_zoo/core/eval/face_verification_evaluation.py +++ b/hailo_model_zoo/core/eval/face_verification_evaluation.py @@ -3,9 +3,11 @@ from collections import OrderedDict import numpy as np -from hailo_model_zoo.core.eval.eval_base_class import Eval from sklearn.model_selection import KFold +from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY + def _accuracy(threshold, dist, actual_issame): predict_issame = np.less(dist, threshold) @@ -22,6 +24,7 @@ def _distance(embeddings1, embeddings2): return dist +@EVAL_FACTORY.register(name="face_verification") class FaceVerificationEval(Eval): def __init__(self, **kwargs): self._metric_names = ['acc'] diff --git a/hailo_model_zoo/core/eval/faster_rcnn_evaluation.py b/hailo_model_zoo/core/eval/faster_rcnn_evaluation.py index 774cacd7..4844998c 100644 --- a/hailo_model_zoo/core/eval/faster_rcnn_evaluation.py +++ b/hailo_model_zoo/core/eval/faster_rcnn_evaluation.py @@ -1,14 +1,16 @@ -import numpy as np from collections import OrderedDict +import numpy as np from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval -from hailo_model_zoo.core.eval.eval_base_class import Eval from hailo_model_zoo.core.datasets.datasets_info import get_dataset_info +from hailo_model_zoo.core.eval.eval_base_class import Eval from hailo_model_zoo.core.eval.faster_rcnn_proposals_nms import FasterRCNNProposalsNMS +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="faster_rcnn_stage2") class FasterRCNNEval(Eval): """COCO evaluation metric class.""" diff --git a/hailo_model_zoo/core/eval/head_pose_estimation_evaluation.py b/hailo_model_zoo/core/eval/head_pose_estimation_evaluation.py index ec895e5e..13ecfae3 100644 --- a/hailo_model_zoo/core/eval/head_pose_estimation_evaluation.py +++ b/hailo_model_zoo/core/eval/head_pose_estimation_evaluation.py @@ -1,9 +1,12 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="head_pose_estimation") class HeadPoseEstimationEval(Eval): def __init__(self, **kwargs): self._metric_names = ['mae', 'mae_yaw', 'mae_pitch', 'mae_roll'] diff --git a/hailo_model_zoo/core/eval/image_denoising_evaluation.py b/hailo_model_zoo/core/eval/image_denoising_evaluation.py index b88e5789..f049a691 100644 --- a/hailo_model_zoo/core/eval/image_denoising_evaluation.py +++ b/hailo_model_zoo/core/eval/image_denoising_evaluation.py @@ -1,9 +1,12 @@ -import numpy as np import math +import numpy as np + from hailo_model_zoo.core.eval.super_resolution_evaluation import SuperResolutionEval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="image_denoising") class ImageDenoisingEval(SuperResolutionEval): def update_op(self, net_output, gt_labels): net_output = self._parse_net_output(net_output) diff --git a/hailo_model_zoo/core/eval/image_generation_from_text_evaluation.py b/hailo_model_zoo/core/eval/image_generation_from_text_evaluation.py index 4f803756..ddd481e4 100644 --- a/hailo_model_zoo/core/eval/image_generation_from_text_evaluation.py +++ b/hailo_model_zoo/core/eval/image_generation_from_text_evaluation.py @@ -1,11 +1,16 @@ from collections import OrderedDict -from hailo_model_zoo.core.eval.eval_base_class import Eval + import numpy as np import torch -from torchmetrics.multimodal.clip_score import CLIPScore from torchmetrics.image.fid import FrechetInceptionDistance +from torchmetrics.multimodal.clip_score import CLIPScore + +from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="stable_diffusion_v2_decoder") +@EVAL_FACTORY.register(name="stable_diffusion_v2_unet") class ImagegenerationFromTextEval(Eval): def __init__(self, **kwargs): self._metric_names = ['snr_db', 'clip_score', 'FID_score'] diff --git a/hailo_model_zoo/core/eval/instance_segmentation_evaluation.py b/hailo_model_zoo/core/eval/instance_segmentation_evaluation.py index 2a9ffe7b..4f014ede 100644 --- a/hailo_model_zoo/core/eval/instance_segmentation_evaluation.py +++ b/hailo_model_zoo/core/eval/instance_segmentation_evaluation.py @@ -1,15 +1,15 @@ -import numpy as np from collections import OrderedDict + +import numpy as np +from pycocotools import mask as maskUtils from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval -from pycocotools import mask as maskUtils -from hailo_model_zoo.core.eval.eval_base_class import Eval -from hailo_model_zoo.core.eval.instance_segmentation_evaluation_utils import (YolactEval, - Yolov5SegEval, - SparseInstEval) from hailo_model_zoo.core.datasets.datasets_info import get_dataset_info - +from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.eval.instance_segmentation_evaluation_utils import ( + SparseInstEval, YolactEval, Yolov5SegEval) +from hailo_model_zoo.core.factory import EVAL_FACTORY EVAL_CLASS_MAP = { 'yolact': YolactEval, @@ -19,6 +19,7 @@ } +@EVAL_FACTORY.register(name="instance_segmentation") class InstanceSegmentationEval(Eval): """COCO evaluation metric class.""" @@ -191,7 +192,7 @@ def _evaluate_mask(self): mask_dets = gt_annotations.loadRes(self._mask_data) seg_eval = COCOeval(gt_annotations, mask_dets, 'segm') if self._labels_map == [0]: - # zero-shot instnace segmentation (segment anything) works on AR1000 + # zero-shot instance segmentation (segment anything) works on AR1000 # COCO default is [1, 10, 100] seg_eval.params.maxDets = [1, 10, 1000] self._metric_names[8] = 'mask ARmax1000' diff --git a/hailo_model_zoo/core/eval/lane_detection_evaluation.py b/hailo_model_zoo/core/eval/lane_detection_evaluation.py index 711fa656..bfe6a95f 100644 --- a/hailo_model_zoo/core/eval/lane_detection_evaluation.py +++ b/hailo_model_zoo/core/eval/lane_detection_evaluation.py @@ -1,11 +1,15 @@ -import numpy as np -from collections import OrderedDict import os -from hailo_model_zoo.core.eval.eval_base_class import Eval -from sklearn.linear_model import LinearRegression +from collections import OrderedDict + +import numpy as np from scipy.interpolate import CubicSpline +from sklearn.linear_model import LinearRegression + +from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="lane_detection") class LaneDetectionEval(Eval): """lane evaluation metric class.""" diff --git a/hailo_model_zoo/core/eval/low_light_enhancement_evaluation.py b/hailo_model_zoo/core/eval/low_light_enhancement_evaluation.py index 81b7c669..85bd54f7 100644 --- a/hailo_model_zoo/core/eval/low_light_enhancement_evaluation.py +++ b/hailo_model_zoo/core/eval/low_light_enhancement_evaluation.py @@ -1,10 +1,14 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY + # Calculation is based on: https://torchmetrics.readthedocs.io/en/stable/image/peak_signal_noise_ratio.html +@EVAL_FACTORY.register(name="low_light_enhancement") class LowLightEnhancementEval(Eval): def __init__(self, **kwargs): self._metric_names = ['PSNR'] diff --git a/hailo_model_zoo/core/eval/multiple_object_tracking_evaluation.py b/hailo_model_zoo/core/eval/multiple_object_tracking_evaluation.py index 4a2cb2c5..820c6611 100644 --- a/hailo_model_zoo/core/eval/multiple_object_tracking_evaluation.py +++ b/hailo_model_zoo/core/eval/multiple_object_tracking_evaluation.py @@ -1,9 +1,11 @@ from collections import OrderedDict + import numpy as np from hailo_model_zoo.core.eval.eval_base_class import Eval from hailo_model_zoo.core.eval.tracking_evaluation_external.mot_evaluator import Evaluator from hailo_model_zoo.core.eval.tracking_evaluation_external.tracking_classes import JDETracker +from hailo_model_zoo.core.factory import EVAL_FACTORY MIN_BOX_AREA = 200 @@ -16,6 +18,7 @@ REID_DIMENSION = 128 +@EVAL_FACTORY.register(name="multiple_object_tracking") class MultipleObjectTrackingEval(Eval): def __init__(self, **kwargs): self._video_trackers = {} diff --git a/hailo_model_zoo/core/eval/ocr_evaluation.py b/hailo_model_zoo/core/eval/ocr_evaluation.py index d2c7a222..bf35ccba 100644 --- a/hailo_model_zoo/core/eval/ocr_evaluation.py +++ b/hailo_model_zoo/core/eval/ocr_evaluation.py @@ -1,9 +1,13 @@ -import numpy as np from collections import OrderedDict + +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY from hailo_model_zoo.core.postprocessing.ocr_postprocessing import CHARS, greedy_decoder +@EVAL_FACTORY.register(name="ocr") class OCREval(Eval): def __init__(self, **kwargs): self._metric_names = ['Accuracy'] diff --git a/hailo_model_zoo/core/eval/person_attr_evaluation.py b/hailo_model_zoo/core/eval/person_attr_evaluation.py index b9c28759..99dd0095 100644 --- a/hailo_model_zoo/core/eval/person_attr_evaluation.py +++ b/hailo_model_zoo/core/eval/person_attr_evaluation.py @@ -1,9 +1,13 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="person_attr") +@EVAL_FACTORY.register(name="face_attr") class PersonAttrEval(Eval): def __init__(self, **kwargs): self._metric_names = ['Accuracy', 'Top1'] diff --git a/hailo_model_zoo/core/eval/person_reid_evaluation.py b/hailo_model_zoo/core/eval/person_reid_evaluation.py index 2197a79f..a0652c04 100644 --- a/hailo_model_zoo/core/eval/person_reid_evaluation.py +++ b/hailo_model_zoo/core/eval/person_reid_evaluation.py @@ -1,9 +1,12 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="person_reid") class PersonReidEval(Eval): def __init__(self, **kwargs): self._metric_names = ['rank1', 'rank5', 'mAP'] diff --git a/hailo_model_zoo/core/eval/pose_estimation_evaluation.py b/hailo_model_zoo/core/eval/pose_estimation_evaluation.py index cb521502..c1192f9d 100644 --- a/hailo_model_zoo/core/eval/pose_estimation_evaluation.py +++ b/hailo_model_zoo/core/eval/pose_estimation_evaluation.py @@ -1,10 +1,12 @@ import os -import numpy as np from collections import OrderedDict + +import numpy as np from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY GT_LABELS_FILE = 'person_keypoints_val2017.json' @@ -21,6 +23,7 @@ ] +@EVAL_FACTORY.register(name="pose_estimation") class PoseEstimationEval(Eval): def __init__(self, **kwargs): diff --git a/hailo_model_zoo/core/eval/segmentation_evaluation.py b/hailo_model_zoo/core/eval/segmentation_evaluation.py index 0c15d13e..6672d29b 100644 --- a/hailo_model_zoo/core/eval/segmentation_evaluation.py +++ b/hailo_model_zoo/core/eval/segmentation_evaluation.py @@ -1,7 +1,9 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY def confusion_matrix(y_true, y_pred, N): @@ -13,6 +15,7 @@ def confusion_matrix(y_true, y_pred, N): return y[:N - 1, :N - 1] +@EVAL_FACTORY.register(name="segmentation") class SegmentationEval(Eval): def __init__(self, *, labels_map=None, **kwargs): self._labels_offset = kwargs['labels_offset'] diff --git a/hailo_model_zoo/core/eval/single_person_pose_estimation_evaluation.py b/hailo_model_zoo/core/eval/single_person_pose_estimation_evaluation.py index a78f8173..7e5d4ef3 100644 --- a/hailo_model_zoo/core/eval/single_person_pose_estimation_evaluation.py +++ b/hailo_model_zoo/core/eval/single_person_pose_estimation_evaluation.py @@ -1,10 +1,12 @@ import os -import numpy as np from collections import OrderedDict + +import numpy as np from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY GT_LABELS_FILE = 'person_keypoints_val2017.json' @@ -21,6 +23,7 @@ ] +@EVAL_FACTORY.register(name="single_person_pose_estimation") class SinglePersonPoseEstimationEval(Eval): def __init__(self, **kwargs): diff --git a/hailo_model_zoo/core/eval/srgan_evaluation.py b/hailo_model_zoo/core/eval/srgan_evaluation.py index ea9eb693..a07dd2b9 100644 --- a/hailo_model_zoo/core/eval/srgan_evaluation.py +++ b/hailo_model_zoo/core/eval/srgan_evaluation.py @@ -1,10 +1,13 @@ -import numpy as np from collections import OrderedDict + import cv2 +import numpy as np from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="srgan") class SRGANEval(Eval): def __init__(self, **kwargs): self._metric_names = ['psnr', 'ssim'] diff --git a/hailo_model_zoo/core/eval/stereo_evaluation.py b/hailo_model_zoo/core/eval/stereo_evaluation.py index f0333920..7fe7bcdf 100644 --- a/hailo_model_zoo/core/eval/stereo_evaluation.py +++ b/hailo_model_zoo/core/eval/stereo_evaluation.py @@ -1,8 +1,12 @@ from collections import OrderedDict + import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="stereonet") class StereoNetEval(Eval): def __init__(self, **kwargs): self.TotalEPE = 0 diff --git a/hailo_model_zoo/core/eval/super_resolution_evaluation.py b/hailo_model_zoo/core/eval/super_resolution_evaluation.py index 5e32380f..d75c6576 100644 --- a/hailo_model_zoo/core/eval/super_resolution_evaluation.py +++ b/hailo_model_zoo/core/eval/super_resolution_evaluation.py @@ -1,9 +1,12 @@ -import numpy as np from collections import OrderedDict +import numpy as np + from hailo_model_zoo.core.eval.eval_base_class import Eval +from hailo_model_zoo.core.factory import EVAL_FACTORY +@EVAL_FACTORY.register(name="super_resolution") class SuperResolutionEval(Eval): def __init__(self, **kwargs): self._metric_names = ['psnr'] diff --git a/hailo_model_zoo/core/factory/__init__.py b/hailo_model_zoo/core/factory/__init__.py new file mode 100644 index 00000000..d444a42a --- /dev/null +++ b/hailo_model_zoo/core/factory/__init__.py @@ -0,0 +1,8 @@ +from hailo_model_zoo.utils.factory_utils import Factory + +DATASET_FACTORY = Factory("dataset_factory") +INFER_FACTORY = Factory("infer_factory") +PREPROCESS_FACTORY = Factory("preprocess_factory") +POSTPROCESS_FACTORY = Factory("postprocess_factory") +VISUALIZATION_FACTORY = Factory("visualization_factory") +EVAL_FACTORY = Factory("eval_factory") diff --git a/hailo_model_zoo/core/infer/infer_factory.py b/hailo_model_zoo/core/infer/infer_factory.py index 8da86ea2..c3af803d 100644 --- a/hailo_model_zoo/core/infer/infer_factory.py +++ b/hailo_model_zoo/core/infer/infer_factory.py @@ -1,42 +1,16 @@ """Contains a factory for network infer.""" -from hailo_model_zoo.core.infer.tf_infer import tf_infer -from hailo_model_zoo.core.infer.model_infer import model_infer -from hailo_model_zoo.core.infer.model_infer_lite import model_infer_lite -try: - # THIS CODE IS EXPERIMENTAL AND IN USE ONLY FOR TAPPAS VALIDATION - from hailo_model_zoo.core.infer.so_infer import so_infer -except ModuleNotFoundError: - so_infer = None -from hailo_model_zoo.core.infer.tf_infer_second_stage import tf_infer_second_stage -from hailo_model_zoo.core.infer.runner_infer import runner_infer -from hailo_model_zoo.core.infer.sd2_unet_model_infer import unet_infer - -NAME_TO_INFER = { - 'tf_infer': tf_infer, - 'runner_infer': runner_infer, - 'np_infer': lambda *args, **kwargs: model_infer(*args, **kwargs, np_infer=True), - 'facenet_infer': model_infer, - 'model_infer': model_infer, - 'model_infer_lite': model_infer_lite, - 'np_infer_lite': lambda *args, **kwargs: model_infer_lite(*args, **kwargs, np_infer=True), - 'so_infer': so_infer, - 'tf_infer_second_stage': tf_infer_second_stage, - 'sd2_unet_infer': unet_infer, -} +import importlib +import hailo_model_zoo.core.infer +from hailo_model_zoo.utils.plugin_utils import iter_namespace +from hailo_model_zoo.core.factory import INFER_FACTORY def get_infer(infer_type): - """ Returns infer_fn(endnodes, **kwargs) - Args: - name: The name of the task. - Returns: - infer_fn: A function that postprocesses a batch. + return INFER_FACTORY.get(infer_type) - Raises: - ValueError: If infer `name` is not recognized. - """ - if infer_type not in NAME_TO_INFER: - raise ValueError('infer key [%s] was not recognized' % infer_type) - - return NAME_TO_INFER[infer_type] +discovered_plugins = { + name: importlib.import_module(name) + for _, name, _ + in iter_namespace(hailo_model_zoo.core.infer) +} diff --git a/hailo_model_zoo/core/infer/infer_utils.py b/hailo_model_zoo/core/infer/infer_utils.py index 9fd4dec7..b2edc9ae 100644 --- a/hailo_model_zoo/core/infer/infer_utils.py +++ b/hailo_model_zoo/core/infer/infer_utils.py @@ -1,4 +1,5 @@ import os +from collections.abc import Mapping import cv2 import numpy as np @@ -111,6 +112,7 @@ def __exit__(self, exc_type, exc_value, exc_traceback): self.video_writer.release() def write(self, image, image_name): + image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) self.video_writer.write(image) @@ -119,27 +121,57 @@ def _make_writer(info_per_image, video_outpath): writer = ImageSaver() else: ref_image = info_per_image[0]['img_orig'] - width, height = ref_image.shape[2], ref_image.shape[1] + width, height = ref_image.shape[-2], ref_image.shape[-3] writer = VideoWriter(width, height, video_outpath) return writer -def visualize(logits_batch, info_per_image, visualize_callback, video_outpath): - with _make_writer(info_per_image, video_outpath) as writer: - logits_per_image = get_logits_per_image(logits_batch) - for image_index, (image_logits, image_info) in enumerate(zip(logits_per_image, info_per_image)): - original_image = image_info['img_orig'] +class WriterHook: + def __init__(self, visualize_callback, video_outpath) -> None: + self.video_outpath = video_outpath + self.visualize_callback = visualize_callback + + self.writer = None + self.image_index = 0 + + def __enter__(self): + if self.writer: + self.writer.__enter__() + + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): + if self.writer: + self.writer.__exit__(exc_type, exc_value, exc_traceback) + + def visualize(self, image_logits, image_info): + logits_per_image = get_logits_per_image(image_logits) + batch_size = len(logits_per_image) + # image_info could either be per_image list or dictionary with info of entire batch + if isinstance(image_info, Mapping): + image_info = [{k: v[i] for k, v in image_info.items()} for i in range(batch_size)] + if not self.writer: + self.writer = _make_writer(image_info, self.video_outpath) + + for image_index_in_batch, (image_logits, image_info) in enumerate(zip(logits_per_image, image_info)): + image_index = image_index_in_batch + self.image_index + original_image = image_info["img_orig"] original_image = to_numpy(original_image) - image_name = image_info.get('image_name', f'image{image_index}') + image_name = image_info.get("image_name", f"image{image_index}") image_name = to_numpy(image_name, decode=True) # Decode image if needed if type(original_image) is bytes: - original_image = cv2.imdecode(np.fromstring(original_image, dtype=np.uint8), - cv2.IMREAD_UNCHANGED) + original_image = cv2.imdecode(np.fromstring(original_image, dtype=np.uint8), cv2.IMREAD_UNCHANGED) original_image = np.expand_dims(original_image, axis=0) - image = visualize_callback(image_logits, original_image, img_info=image_info, image_name=image_name) - writer.write(image, image_name) + image = self.visualize_callback(image_logits, original_image, img_info=image_info, image_name=image_name) + self.writer.write(image, image_name) + self.image_index += image_index_in_batch + + +def visualize(logits_batch, info_per_image, visualize_callback, video_outpath): + with WriterHook(visualize_callback, video_outpath) as writer: + writer.visualize(logits_batch, info_per_image) def aggregate(elements): diff --git a/hailo_model_zoo/core/infer/model_infer.py b/hailo_model_zoo/core/infer/model_infer.py index a7d2972e..b3019b97 100644 --- a/hailo_model_zoo/core/infer/model_infer.py +++ b/hailo_model_zoo/core/infer/model_infer.py @@ -2,8 +2,10 @@ from tqdm import tqdm from hailo_model_zoo.core.infer.infer_utils import log_accuracy, write_results, aggregate, visualize, to_numpy +from hailo_model_zoo.core.factory import INFER_FACTORY +@INFER_FACTORY.register def model_infer(runner, context, logger, eval_num_examples, print_num_examples, batch_size, dataset, postprocessing_callback, eval_callback, visualize_callback, model_augmentation_callback, @@ -60,3 +62,11 @@ def predict_function(data): img_info_per_image = [x[1] for x in dataset] visualize(probs, img_info_per_image, visualize_callback, video_outpath) return accuracy + + +@INFER_FACTORY.register +def np_infer(*args, **kwargs): + return model_infer(*args, **kwargs, np_infer=True), + + +INFER_FACTORY.register(model_infer, name="facenet_infer") diff --git a/hailo_model_zoo/core/infer/model_infer_lite.py b/hailo_model_zoo/core/infer/model_infer_lite.py index d27678aa..47e6ca5e 100644 --- a/hailo_model_zoo/core/infer/model_infer_lite.py +++ b/hailo_model_zoo/core/infer/model_infer_lite.py @@ -1,9 +1,13 @@ +from contextlib import ExitStack + import tensorflow as tf from tqdm import tqdm -from hailo_model_zoo.core.infer.infer_utils import log_accuracy, aggregate, visualize, to_numpy +from hailo_model_zoo.core.factory import INFER_FACTORY +from hailo_model_zoo.core.infer.infer_utils import WriterHook, log_accuracy, to_numpy +@INFER_FACTORY.register def model_infer_lite(runner, context, logger, eval_num_examples, print_num_examples, batch_size, dataset, postprocessing_callback, eval_callback, visualize_callback, model_augmentation_callback, @@ -14,40 +18,47 @@ def model_infer_lite(runner, context, logger, eval_num_examples, print_num_examp if eval_num_examples: dataset = dataset.take(eval_num_examples) batched_dataset = dataset.batch(batch_size) - logger.info('Running inference...') - with context as ctx, tqdm(total=None, desc="Processed", unit="images", - disable=None if not print_num_examples < 1 else True) as pbar: + logger.info("Running inference...") + with ExitStack() as stack: + ctx = stack.enter_context(context) + pbar = stack.enter_context( + tqdm(total=None, desc="Processed", unit="images", disable=None if not print_num_examples < 1 else True) + ) model = runner.get_keras_model(ctx) model = model_augmentation_callback(model) + writer = None if not visualize_callback else WriterHook(visualize_callback, video_outpath) + if writer: + stack.enter_context(writer) @tf.function() def predict_function(data): return model(data, training=False) num_of_images = 0 - logits = [] - gt = [] + for preprocessed_data, img_info in batched_dataset: output_tensors = predict_function(preprocessed_data) if np_infer: output_tensors = to_numpy(output_tensors) img_info = to_numpy(img_info) logits_batch = postprocessing_callback(output_tensors, gt_images=img_info) - current_batch_size = (output_tensors[0].shape[0] if isinstance(output_tensors, list) - else output_tensors.shape[0]) + current_batch_size = ( + output_tensors[0].shape[0] if isinstance(output_tensors, list) else output_tensors.shape[0] + ) num_of_images += current_batch_size - if not visualize_callback: - if "img_orig" in img_info: - del img_info["img_orig"] - if "img_resized" in img_info: - del img_info["img_resized"] + if writer: + logits_batch = to_numpy(logits_batch) + image_info = to_numpy(img_info) + writer.visualize(logits_batch, image_info) + + if "img_orig" in img_info: + del img_info["img_orig"] + if "img_resized" in img_info: + del img_info["img_resized"] image_info = to_numpy(img_info) if not visualize_callback and not dump_results: logits_batch = to_numpy(logits_batch) eval_metric.update_op(logits_batch, image_info) - if visualize_callback: - logits.append(logits_batch) - gt.append(image_info) pbar.update(current_batch_size) accuracy = None @@ -56,8 +67,9 @@ def predict_function(data): accuracy = eval_metric.get_accuracy() log_accuracy(logger, num_of_images, accuracy) - if visualize_callback: - probs = {k: aggregate([p[k] for p in logits]) for k in logits[0].keys()} - img_info_per_image = [x[1] for x in dataset] - visualize(probs, img_info_per_image, visualize_callback, video_outpath) return accuracy + + +@INFER_FACTORY.register +def np_infer_lite(*args, **kwargs): + return model_infer_lite(*args, **kwargs, np_infer=True) diff --git a/hailo_model_zoo/core/infer/runner_infer.py b/hailo_model_zoo/core/infer/runner_infer.py index 476d12b0..677a20f6 100644 --- a/hailo_model_zoo/core/infer/runner_infer.py +++ b/hailo_model_zoo/core/infer/runner_infer.py @@ -1,7 +1,9 @@ from hailo_model_zoo.core.infer.infer_utils import log_accuracy, write_results, aggregate, visualize +from hailo_model_zoo.core.factory import INFER_FACTORY +@INFER_FACTORY.register def runner_infer(runner, context, logger, eval_num_examples, print_num_examples, batch_size, dataset, postprocessing_callback, eval_callback, visualize_callback, model_wrapper_callback, diff --git a/hailo_model_zoo/core/infer/sd2_unet_model_infer.py b/hailo_model_zoo/core/infer/sd2_unet_model_infer.py index 1686ae68..adc372a6 100644 --- a/hailo_model_zoo/core/infer/sd2_unet_model_infer.py +++ b/hailo_model_zoo/core/infer/sd2_unet_model_infer.py @@ -3,6 +3,8 @@ import numpy as np import onnxruntime from hailo_model_zoo.core.infer.infer_utils import log_accuracy, write_results, aggregate, visualize, to_numpy + +from hailo_model_zoo.core.factory import INFER_FACTORY from hailo_model_zoo.utils.path_resolver import resolve_data_path @@ -45,6 +47,7 @@ def scheduler_step(model_output, t, sample, step_index, s_churn=0.0, s_tmin=0.0, return prev_sample +@INFER_FACTORY.register(name="sd2_unet_infer") def unet_infer(runner, context, logger, eval_num_examples, print_num_examples, batch_size, dataset, postprocessing_callback, eval_callback, visualize_callback, model_augmentation_callback, diff --git a/hailo_model_zoo/core/infer/tf_infer.py b/hailo_model_zoo/core/infer/tf_infer.py index 921e96d4..20fef3b4 100644 --- a/hailo_model_zoo/core/infer/tf_infer.py +++ b/hailo_model_zoo/core/infer/tf_infer.py @@ -4,6 +4,7 @@ from tqdm import tqdm from PIL import Image +from hailo_model_zoo.core.factory import INFER_FACTORY from hailo_model_zoo.core.infer.infer_utils import log_accuracy, write_results, save_image, get_logits_per_image from hailo_sdk_client import SdkFineTune @@ -37,6 +38,7 @@ def _visualize(logits_batch, img_info, num_of_images, visualize_callback, video_ return video_writer +@INFER_FACTORY.register def tf_infer(runner, target, logger, eval_num_examples, print_num_examples, batch_size, data_feed_callback, tf_graph_callback, postprocessing_callback, eval_callback, visualize_callback, video_outpath, dump_results, results_path): diff --git a/hailo_model_zoo/core/infer/tf_infer_second_stage.py b/hailo_model_zoo/core/infer/tf_infer_second_stage.py index 759c9a6f..40828a90 100644 --- a/hailo_model_zoo/core/infer/tf_infer_second_stage.py +++ b/hailo_model_zoo/core/infer/tf_infer_second_stage.py @@ -1,8 +1,10 @@ import tensorflow as tf from hailo_model_zoo.core.infer.infer_utils import log_accuracy +from hailo_model_zoo.core.factory import INFER_FACTORY +@INFER_FACTORY.register def tf_infer_second_stage(runner, target, logger, eval_num_examples, print_num_examples, batch_size, data_feed_callback, tf_graph_callback, postprocessing_callback, eval_callback, visualize_callback, video_outpath, dump_results, results_path): diff --git a/hailo_model_zoo/core/info_utils.py b/hailo_model_zoo/core/info_utils.py index 700552cd..d9801739 100644 --- a/hailo_model_zoo/core/info_utils.py +++ b/hailo_model_zoo/core/info_utils.py @@ -22,12 +22,12 @@ def _load_cfg(cfg_file): return config -def get_network_info(model_name, read_only=False, yaml_path=None): +def get_network_info(model_name, read_only=False, yaml_path=None, nodes=None): ''' Args: model_name: The network name to load. read_only: If set return read-only object. - The read_only mode save run-time and memroy. + The read_only mode save run-time and memory. yaml_path: Path to external YAML file for network configuration Return: OmegaConf object that represent network configuration. @@ -39,6 +39,10 @@ def get_network_info(model_name, read_only=False, yaml_path=None): if not cfg_path.is_file(): raise ValueError('cfg file is missing in {}'.format(cfg_path)) cfg = _load_cfg(cfg_path) + if nodes and nodes[0] != '': + cfg.parser.nodes[0] = nodes[0] + if nodes and nodes[1] != '': + cfg.parser.nodes[1] = nodes[1] if read_only: OmegaConf.set_readonly(cfg, True) return cfg diff --git a/hailo_model_zoo/core/main_utils.py b/hailo_model_zoo/core/main_utils.py index 31615d45..777d2793 100644 --- a/hailo_model_zoo/core/main_utils.py +++ b/hailo_model_zoo/core/main_utils.py @@ -1,3 +1,4 @@ +import json from pathlib import Path from omegaconf.listconfig import ListConfig @@ -24,11 +25,8 @@ } -def _get_input_shape(runner, network_info): - return ( - network_info.preprocessing.input_shape - or runner.get_hn_model().get_input_shapes(ignore_conversion=True)[0][1:] - ) +def _get_input_shape(runner): + return runner.get_native_hn_model().get_input_shapes(ignore_conversion=True)[0][1:3] def _get_output_shapes(runner): @@ -124,22 +122,37 @@ def load_model(runner, har_path, logger): runner.load_har(har_path) -def get_input_modifications(runner, network_info): +def get_input_modifications(runner, network_info, input_conversion_args=None, resize_args=None): + def _is_yuv2rgb(conversion_type): + return conversion_type in ['yuv_to_rgb', 'yuy2_to_rgb', 'nv12_to_rgb'] + + def _is_yuv2(conversion_type): + return conversion_type in ['yuy2_to_hailo_yuv', 'yuy2_to_rgb'] + + def _is_nv12(conversion_type): + return conversion_type in ['nv12_to_hailo_yuv', 'nv12_to_rgb'] + + def _is_rgbx(conversion_type): + return conversion_type == 'tf_rgbx_to_hailo_rgb' + hn_editor = network_info.hn_editor - yuv2rgb = hn_editor.yuv2rgb - yuy2 = hn_editor.yuy2 - nv12 = hn_editor.nv12 - rgbx = hn_editor.rgbx + yuv2rgb = hn_editor.yuv2rgb if not input_conversion_args else _is_yuv2rgb(input_conversion_args) + yuy2 = hn_editor.yuy2 if not input_conversion_args else _is_yuv2(input_conversion_args) + nv12 = hn_editor.nv12 if not input_conversion_args else _is_nv12(input_conversion_args) + rgbx = hn_editor.rgbx if not input_conversion_args else _is_rgbx(input_conversion_args) + if resize_args: + hn_editor.input_resize.enabled = True + hn_editor.input_resize.input_shape = [*resize_args] input_resize = hn_editor.input_resize for configs in runner.modifications_meta_data.inputs.values(): for config in configs: if config.cmd_type == 'input_conversion': if config.emulate_conversion: - yuv2rgb = yuv2rgb or config.conversion_type.value in ['yuv_to_rgb', 'yuy2_to_rgb', 'nv12_to_rgb'] - yuy2 = yuy2 or config.conversion_type.value in ['yuy2_to_hailo_yuv', 'yuy2_to_rgb'] - nv12 = nv12 or config.conversion_type.value in ['nv12_to_hailo_yuv', 'nv12_to_rgb'] - rgbx = rgbx or config.conversion_type.value == 'tf_rgbx_to_hailo_rgb' + yuv2rgb = yuv2rgb or _is_yuv2rgb(config.conversion_type.value) + yuy2 = yuy2 or _is_yuv2(config.conversion_type.value) + nv12 = nv12 or _is_nv12(config.conversion_type.value) + rgbx = rgbx or _is_rgbx(config.conversion_type.value) elif config.cmd_type == 'resize': input_resize['enabled'] = True input_resize['input_shape'] = [config.output_shape[1], config.output_shape[2]] @@ -147,18 +160,19 @@ def get_input_modifications(runner, network_info): return yuv2rgb, yuy2, nv12, rgbx, input_resize -def make_preprocessing(runner, network_info): +def make_preprocessing(runner, network_info, input_conversion_args=None, resize_args=None): preprocessing_args = network_info.preprocessing meta_arch = preprocessing_args.get('meta_arch') - yuv2rgb, yuy2, nv12, rgbx, input_resize = get_input_modifications(runner, network_info) + yuv2rgb, yuy2, nv12, rgbx, input_resize = get_input_modifications(runner, network_info, input_conversion_args, + resize_args) normalize_in_net, mean_list, std_list = get_normalization_params(network_info) normalization_params = [mean_list, std_list] if not normalize_in_net else None - height, width, _ = _get_input_shape(runner, network_info) + height, width = _get_input_shape(runner) flip = runner.get_hn_model().is_transposed() preproc_callback = preprocessing_factory.get_preprocessing( meta_arch, height=height, width=width, flip=flip, yuv2rgb=yuv2rgb, yuy2=yuy2, nv12=nv12, rgbx=rgbx, input_resize=input_resize, normalization_params=normalization_params, output_shapes=_get_output_shapes(runner), - **preprocessing_args) + network_name=network_info.network.network_name, **preprocessing_args) return preproc_callback @@ -212,8 +226,50 @@ def make_calibset_callback(network_info, preproc_callback, override_path): return lambda: data_feed_cb().dataset -def optimize_full_precision_model(runner, model_script, resize, input_conversion): +def _handle_classes_argument(runner, logger, classes): + script_commands = runner.model_script.split('\n') + nms_idx = ['nms_postprocess' in x for x in script_commands] + if not any(nms_idx): + logger.warning('Ignoring classes parameter since the model has no NMS post-process.') + return + + nms_idx = nms_idx.index(True) + nms_command = script_commands[nms_idx] + nms_args = nms_command[:-1].split("(", 1)[-1].split(", ") + arg_to_append = f'classes={classes}' + if 'classes' in nms_command: + classes_idx = ['classes' in x for x in nms_args].index(True) + nms_args.pop(classes_idx) + elif '.json' in nms_command: + # Duplicate the config file, edit the classes and update the path in the command. + path_idx = ['.json' in x for x in nms_args].index(True) + orig_path = nms_args[path_idx].split('=')[-1].replace('"', '').replace("'", "") + with open(orig_path, 'r') as f: + nms_cfg = json.load(f) + nms_cfg['classes'] = classes + tmp_path = f"{orig_path.split('.json')[0]}_tmp.json" + with open(tmp_path, 'w') as f: + json.dump(nms_cfg, f, indent=4) + nms_args.pop(path_idx) + arg_to_append = f'config_path="{tmp_path}"' + + nms_args.append(arg_to_append) + script_commands[nms_idx] = f'nms_postprocess({", ".join(nms_args)})' + runner.load_model_script("\n".join(script_commands)) + + +def prepare_calibration_data(runner, network_info, calib_path, logger, input_conversion_args=None, resize_args=None): + logger.info('Preparing calibration data...') + preproc_callback = make_preprocessing(runner, network_info, input_conversion_args, resize_args) + calib_feed_callback = make_calibset_callback(network_info, preproc_callback, calib_path) + return calib_feed_callback + + +def optimize_full_precision_model(runner, calib_feed_callback, logger, model_script, resize, input_conversion, + classes): runner.load_model_script(model_script) + if classes is not None: + _handle_classes_argument(runner, logger, classes) input_layers = runner.get_hn_model().get_input_layers() scope_name = input_layers[0].scope if resize is not None: @@ -230,18 +286,13 @@ def optimize_full_precision_model(runner, model_script, resize, input_conversion f'{", ".join(conversion_layers)} = input_conversion({hailo_conversion_type}, emulator_support=True)', append=True ) - runner.optimize_full_precision() + runner.optimize_full_precision(calib_data=calib_feed_callback) -def optimize_model(runner, logger, network_info, calib_path, results_dir, model_script, resize=None, - input_conversion=None): - optimize_full_precision_model(runner, model_script=model_script, resize=resize, input_conversion=input_conversion) +def optimize_model(runner, calib_feed_callback, logger, network_info, results_dir, model_script, resize=None, + input_conversion=None, classes=None): + optimize_full_precision_model(runner, calib_feed_callback, logger, model_script, resize, input_conversion, classes) - logger.info('Preparing calibration data...') - preproc_callback = make_preprocessing(runner, network_info) - calib_feed_callback = make_calibset_callback(network_info, - preproc_callback=preproc_callback, - override_path=calib_path) runner.optimize(calib_feed_callback) model_name = network_info.network.network_name @@ -268,7 +319,7 @@ def visualize_callback(logits, image, **kwargs): def _gather_postprocessing_dictionary(runner, network_info): - height, width, _ = _get_input_shape(runner, network_info) + height, width = _get_input_shape(runner) postproc_info = dict(img_dims=(height, width)) postproc_info.update(network_info.hn_editor) postproc_info.update(network_info.evaluation) @@ -299,7 +350,7 @@ def make_eval_callback(network_info, runner): gt_json_path = network_info.evaluation.gt_json_path meta_arch = network_info.evaluation.meta_arch gt_json_path = path_resolver.resolve_data_path(gt_json_path) if gt_json_path else None - input_shape = _get_input_shape(runner, network_info) + input_shape = _get_input_shape(runner) eval_args = dict( net_name=net_name, network_type=network_type, @@ -353,11 +404,12 @@ def make_infer_callback(network_info, use_lite_inference): def infer_model_tf2(runner, network_info, target, logger, eval_num_examples, data_path, batch_size, print_num_examples=256, visualize_results=False, - video_outpath=None, use_lite_inference=False, dump_results=False): + video_outpath=None, use_lite_inference=False, dump_results=False, input_conversion_args=None, + resize_args=None): logger.info('Initializing the dataset ...') if eval_num_examples: eval_num_examples = eval_num_examples + network_info.evaluation.data_count_offset - preproc_callback = make_preprocessing(runner, network_info) + preproc_callback = make_preprocessing(runner, network_info, input_conversion_args, resize_args) # we do not pass batch_size, batching is now done in infer_callback dataset = make_evalset_callback(network_info, preproc_callback, data_path) # TODO refactor @@ -407,10 +459,15 @@ def get_hef_path(results_dir, model_name): return results_dir.joinpath(f"{model_name}.hef") -def compile_model(runner, network_info, results_dir, allocator_script_filename): +def compile_model(runner, network_info, results_dir, allocator_script_filename, performance=False): model_name = network_info.network.network_name + model_script_parent = None if allocator_script_filename is not None: runner.load_model_script(allocator_script_filename) + model_script_parent = allocator_script_filename.parent.name + if performance: + if model_script_parent == 'generic' or allocator_script_filename is None: + runner.load_model_script("performance_param(compiler_optimization_level=max)", append=True) hef = runner.compile() with open(get_hef_path(results_dir, model_name), "wb") as hef_out_file: diff --git a/hailo_model_zoo/core/postprocessing/age_gender_postprocessing.py b/hailo_model_zoo/core/postprocessing/age_gender_postprocessing.py index 4960137f..558620c8 100644 --- a/hailo_model_zoo/core/postprocessing/age_gender_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/age_gender_postprocessing.py @@ -1,10 +1,11 @@ -import tensorflow as tf import numpy as np +import tensorflow as tf +from PIL import Image, ImageDraw -from PIL import Image -from PIL import ImageDraw +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +@POSTPROCESS_FACTORY.register(name="age_gender") def age_gender_postprocessing(endnodes, device_pre_post_layers, **kwargs): age_predictions, gender_predictions = endnodes @@ -30,6 +31,7 @@ def age_gender_postprocessing(endnodes, device_pre_post_layers, **kwargs): return {'age': res_age, 'is_male': is_male} +@VISUALIZATION_FACTORY.register(name="age_gender") def visualize_age_gender_result(logits, img, **kwargs): gender = 'Male' if logits['is_male'][0] else 'Female' img_orig = Image.fromarray(img[0]) diff --git a/hailo_model_zoo/core/postprocessing/centerpose_postprocessing.py b/hailo_model_zoo/core/postprocessing/centerpose_postprocessing.py index a013f858..eb5faa07 100644 --- a/hailo_model_zoo/core/postprocessing/centerpose_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/centerpose_postprocessing.py @@ -158,13 +158,13 @@ def centerpose_postprocessing(endnodes, device_pre_post_layers=None, gt_images=N center_heatmap = _nms(center_heatmap) joint_heatmap = _nms(joint_heatmap) - bboxes, scores, keypoints, joint_scores = tf.py_function(_centerpose_postprocessing, - [center_heatmap, center_wh, - joint_heatmap, center_offset, - joint_center_offset, joint_offset, - gt_images["center"], gt_images["scale"]], - [tf.float32, tf.float32, tf.float32, tf.float32], - name='centerpose_postprocessing') + bboxes, scores, keypoints, joint_scores = tf.numpy_function(_centerpose_postprocessing, + [center_heatmap, center_wh, + joint_heatmap, center_offset, + joint_center_offset, joint_offset, + gt_images["center"], gt_images["scale"]], + [tf.float64, tf.float32, tf.float64, tf.float64], + name='centerpose_postprocessing') return { 'bboxes': bboxes, 'scores': scores, diff --git a/hailo_model_zoo/core/postprocessing/classification_postprocessing.py b/hailo_model_zoo/core/postprocessing/classification_postprocessing.py index 3418e771..46b845db 100644 --- a/hailo_model_zoo/core/postprocessing/classification_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/classification_postprocessing.py @@ -1,10 +1,11 @@ -import tensorflow as tf -import numpy as np -import os import json -from PIL import Image -from PIL import ImageDraw +import os + +import numpy as np +import tensorflow as tf +from PIL import Image, ImageDraw +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.utils import path_resolver @@ -20,6 +21,8 @@ def _is_logits_shape_allowed(shape, classes): return False +@POSTPROCESS_FACTORY.register(name="person_attr") +@POSTPROCESS_FACTORY.register(name="classification") def classification_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): if device_pre_post_layers is not None and device_pre_post_layers['softmax']: probs = endnodes @@ -39,6 +42,8 @@ def _get_imagenet_labels(): return imagenet_names[1:] +@VISUALIZATION_FACTORY.register(name="classification") +@VISUALIZATION_FACTORY.register(name="zero_shot_classification") def visualize_classification_result(logits, img, **kwargs): logits = logits['predictions'] # TODO: SDK-32906 (wrong shape for classifiers) remove this when sdk is fixed @@ -54,6 +59,7 @@ def visualize_classification_result(logits, img, **kwargs): return np.array(img_orig, np.uint8) +@POSTPROCESS_FACTORY.register(name="zero_shot_classification") def zero_shot_classification_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): endnodes /= tf.norm(endnodes, keepdims=True, axis=-1) path = path_resolver.resolve_data_path(kwargs['postprocess_config_file']) diff --git a/hailo_model_zoo/core/postprocessing/depth_estimation_postprocessing.py b/hailo_model_zoo/core/postprocessing/depth_estimation_postprocessing.py index 89210c62..cc05109b 100644 --- a/hailo_model_zoo/core/postprocessing/depth_estimation_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/depth_estimation_postprocessing.py @@ -1,8 +1,11 @@ import io -import numpy as np + import matplotlib.pyplot as plt +import numpy as np import tensorflow as tf +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY + def scdepthv3_postprocessing(logits): depth = tf.math.reciprocal(tf.math.sigmoid(logits) * 10 + 0.009) @@ -24,6 +27,7 @@ def mono_depth_postprocessing(endnodes): return {'predictions': depth * depth_scale_factor} +@VISUALIZATION_FACTORY.register(name="depth_estimation") def visualize_depth_estimation_result(logits, image, **kwargs): logits = logits['predictions'] image = np.array(image, dtype=np.uint8) @@ -58,6 +62,7 @@ def _get_postprocessing_function(meta_arch): raise ValueError("Meta-architecture [{}] is not supported".format(meta_arch)) +@POSTPROCESS_FACTORY.register(name="depth_estimation") def depth_estimation_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): meta_arch = kwargs["meta_arch"].lower() postprocess = _get_postprocessing_function(meta_arch) diff --git a/hailo_model_zoo/core/postprocessing/detection/centernet.py b/hailo_model_zoo/core/postprocessing/detection/centernet.py index 216b3180..4cd89170 100644 --- a/hailo_model_zoo/core/postprocessing/detection/centernet.py +++ b/hailo_model_zoo/core/postprocessing/detection/centernet.py @@ -100,9 +100,9 @@ def _generate_boxes(self, probs, coors, classes, widths, offsets, output_height, bb_probs = pad_list(bb_probs, required_zero_detections_padding) bb_classes = pad_list(bb_classes, required_zero_detections_padding) bb_boxes = pad_list(bb_boxes, required_zero_detections_padding) - return np.expand_dims(np.array(bb_probs), 0).astype('float32'),\ - np.expand_dims(np.array(bb_classes), 0).astype('int32'),\ - np.expand_dims(np.array(bb_boxes), 0).astype('float32'),\ + return np.expand_dims(np.array(bb_probs), 0).astype('float32'), \ + np.expand_dims(np.array(bb_classes), 0).astype('int32'), \ + np.expand_dims(np.array(bb_boxes), 0).astype('float32'), \ (np.ones([1]) * num_detections).astype('int32') def _centernet_postprocessing(self, box_widths, box_offsets, sparse_probs, **kwargs): diff --git a/hailo_model_zoo/core/postprocessing/detection/nanodet.py b/hailo_model_zoo/core/postprocessing/detection/nanodet.py index 5c374d16..7b52b4a0 100644 --- a/hailo_model_zoo/core/postprocessing/detection/nanodet.py +++ b/hailo_model_zoo/core/postprocessing/detection/nanodet.py @@ -22,7 +22,7 @@ def __init__(self, img_dims=(416, 416), nms_iou_thresh=0.6, labels_offset=0, self._network_arch = meta_arch self._nms_max_output_per_class = 100 if nms_max_output_per_class is None else nms_max_output_per_class self._nms_max_output = 100 if post_nms_topk is None else post_nms_topk - self.hpp = kwargs.get("hpp", False) + self._hpp = kwargs.get("hpp", False) self._split = { 'nanodet': self.nanodet_decode, 'nanodet_split': self.split_decode, @@ -108,20 +108,19 @@ def _box_decoding(self, raw_boxes): return tf.expand_dims(boxes, axis=2) def postprocessing(self, endnodes, *, device_pre_post_layers, **kwargs): - if self.hpp: - return tf_postproc_nms(endnodes, - labels_offset=kwargs['labels_offset'], - score_threshold=0.0, - coco_2017_to_2014=True) - - scores, raw_boxes = self._get_scores_boxes(endnodes) - - # decode score/class - if not device_pre_post_layers.sigmoid: - scores = tf.sigmoid(scores) - - # decode boxes - boxes = self._box_decoding(raw_boxes) + if self._hpp: + if kwargs.get('bbox_decoding_only', False): + endnodes = tf.squeeze(endnodes, axis=1) + boxes, scores = tf.split(endnodes, [4, self._num_classes], axis=-1) + boxes = tf.expand_dims(boxes, axis=2) + else: + return tf_postproc_nms(endnodes, labels_offset=kwargs['labels_offset'], score_threshold=0.0, + coco_2017_to_2014=True) + else: + scores, raw_boxes = self._get_scores_boxes(endnodes) + scores = tf.sigmoid(scores) if not device_pre_post_layers.sigmoid else scores + # decode boxes + boxes = self._box_decoding(raw_boxes) # nms (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections) = \ @@ -137,7 +136,7 @@ def translate_coco_2017_to_2014(nmsed_classes): return np.vectorize(COCO_2017_TO_2014_TRANSLATION.get)(nmsed_classes).astype(np.int32) nmsed_classes = tf.cast(tf.add(nmsed_classes, self._labels_offset), tf.int16) - [nmsed_classes] = tf.py_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) + [nmsed_classes] = tf.numpy_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) nmsed_classes.set_shape((1, 100)) return {'detection_boxes': nmsed_boxes, diff --git a/hailo_model_zoo/core/postprocessing/detection/ssd_mlperf_tf.py b/hailo_model_zoo/core/postprocessing/detection/ssd_mlperf_tf.py index 6fe8f7ab..3428a675 100644 --- a/hailo_model_zoo/core/postprocessing/detection/ssd_mlperf_tf.py +++ b/hailo_model_zoo/core/postprocessing/detection/ssd_mlperf_tf.py @@ -198,7 +198,7 @@ def filter_bboxes(scores_pred, xmin, ymin, xmax, ymax, min_size, name): height = ymax - ymin filter_mask = tf.logical_and(width > min_size, height > min_size) filter_mask = tf.cast(filter_mask, tf.float32) - return tf.multiply(xmin, filter_mask), tf.multiply(ymin, filter_mask),\ + return tf.multiply(xmin, filter_mask), tf.multiply(ymin, filter_mask), \ tf.multiply(xmax, filter_mask), tf.multiply(ymax, filter_mask), tf.multiply(scores_pred, filter_mask) @@ -209,8 +209,8 @@ def sort_bboxes(scores_pred, xmin, ymin, xmax, ymax, keep_topk, name): xmin, ymin, xmax, ymax = tf.gather(xmin, idxes), tf.gather( ymin, idxes), tf.gather(xmax, idxes), tf.gather(ymax, idxes) paddings_scores = tf.expand_dims(tf.stack([0, tf.maximum(keep_topk - cur_bboxes, 0)], axis=0), axis=0) - return tf.pad(xmin, paddings_scores, "CONSTANT"), tf.pad(ymin, paddings_scores, "CONSTANT"),\ - tf.pad(xmax, paddings_scores, "CONSTANT"), tf.pad(ymax, paddings_scores, "CONSTANT"),\ + return tf.pad(xmin, paddings_scores, "CONSTANT"), tf.pad(ymin, paddings_scores, "CONSTANT"), \ + tf.pad(xmax, paddings_scores, "CONSTANT"), tf.pad(ymax, paddings_scores, "CONSTANT"), \ tf.pad(scores, paddings_scores, "CONSTANT") @@ -308,7 +308,7 @@ def parse_bboxes_fn(x): num_detections = tf.concat(pred_results[3], axis=0) xmin, ymin, xmax, ymax = tf.unstack(detection_bboxes, axis=-1) detection_bboxes = tf.stack([ymin, xmin, ymax, xmax], axis=-1) - [detection_classes] = tf.py_function(translate_coco_2017_to_2014, [detection_classes], ['int32']) + [detection_classes] = tf.numpy_function(translate_coco_2017_to_2014, [detection_classes], ['int32']) predictions['detection_classes'] = detection_classes predictions['detection_scores'] = detection_scores predictions['detection_boxes'] = detection_bboxes diff --git a/hailo_model_zoo/core/postprocessing/detection/yolo.py b/hailo_model_zoo/core/postprocessing/detection/yolo.py index 5c6f21e7..38ba0607 100644 --- a/hailo_model_zoo/core/postprocessing/detection/yolo.py +++ b/hailo_model_zoo/core/postprocessing/detection/yolo.py @@ -15,7 +15,6 @@ class YoloPostProc(object): def __init__(self, img_dims=(608, 608), nms_iou_thresh=0.45, score_threshold=0.01, anchors=None, output_scheme=None, classes=80, labels_offset=0, meta_arch="yolo_v3", should_clip=True, **kwargs): - self._network_arch = meta_arch self._image_dims = img_dims self._nms_iou_thresh = nms_iou_thresh @@ -77,11 +76,7 @@ def _yolo6_decode(raw_box_centers, raw_box_scales, objness, class_pred, anchors_ box_scales = (x2y2 - x1y1) * stride return box_centers, box_scales, objness, class_pred - def iou_nms(self, endnodes): - endnodes = tf.transpose(endnodes, [0, 3, 1, 2]) - detection_boxes = endnodes[:, :, :, :4] - detection_scores = tf.squeeze(endnodes[:, :, :, 4:], axis=3) - + def iou_nms(self, detection_boxes, detection_scores): (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections) = \ combined_non_max_suppression(boxes=detection_boxes, scores=detection_scores, @@ -91,7 +86,7 @@ def iou_nms(self, endnodes): max_total_size=100) nmsed_classes = tf.cast(tf.add(nmsed_classes, self._labels_offset), tf.int16) - [nmsed_classes] = tf.py_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) + [nmsed_classes] = tf.numpy_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) return {'detection_boxes': nmsed_boxes, 'detection_scores': nmsed_scores, 'detection_classes': nmsed_classes, @@ -161,7 +156,7 @@ def translate_coco_2017_to_2014(nmsed_classes): return np.vectorize(COCO_2017_TO_2014_TRANSLATION.get)(nmsed_classes).astype(np.int32) nmsed_classes = tf.cast(tf.add(nmsed_classes, self._labels_offset), tf.int16) - [nmsed_classes] = tf.py_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) + [nmsed_classes] = tf.numpy_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) nmsed_classes.set_shape((BS, 100)) return {'detection_boxes': nmsed_boxes, @@ -257,9 +252,9 @@ def reorganize_split_output(self, endnodes): scales = endnodes[branch_index + 1] obj = endnodes[branch_index + 2] probs = endnodes[branch_index + 3] - branch_endnodes = tf.py_function(self.reorganize_split_output_numpy, - [centers, scales, obj, probs], - ['float32'], name='yolov3_match_remodeled_output') + branch_endnodes = tf.numpy_function(self.reorganize_split_output_numpy, + [centers, scales, obj, probs], + ['float32'], name='yolov3_match_remodeled_output') reorganized_endnodes_list.append(branch_endnodes[0]) # because the py_func returns a list return reorganized_endnodes_list @@ -267,7 +262,7 @@ def reorganize_split_output(self, endnodes): def reorganize_split_output_numpy(self, centers, scales, obj, probs): num_anchors = len(self._anchors_list[0]) // 2 # the ith element in anchors_list is a list for the x,y # anchor values in the ith output layer (stride) - if obj.shape == [1, 1, 1, 2]: # yolov6 + if obj.shape == (1, 1, 1, 2): # yolov6 # Convert dummy to a ones of shape [B, h, w, 1] for objectness obj = np.ones((list(probs.shape[:3]) + [1]), dtype=np.float32) for anchor in range(num_anchors): @@ -298,7 +293,7 @@ def translate_coco_2017_to_2014(nmsed_classes): num_detections = tf.reduce_sum(tf.cast(detection_scores > 0, dtype=tf.int32), axis=1) nmsed_classes = tf.cast(tf.add(detection_classes, self._labels_offset), tf.int16) - [nmsed_classes] = tf.py_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) + [nmsed_classes] = tf.numpy_function(translate_coco_2017_to_2014, [nmsed_classes], ['int32']) return {'detection_boxes': detection_boxes, 'detection_scores': detection_scores, @@ -307,11 +302,20 @@ def translate_coco_2017_to_2014(nmsed_classes): def postprocessing(self, endnodes, **kwargs): if self.hpp: - return tf_postproc_nms(endnodes, - labels_offset=kwargs['labels_offset'], - score_threshold=0.0, + if kwargs.get('bbox_decoding_only', False): + # extracts the boxes and scores from the concatenated output tensor then applies NMS + endnodes = tf.squeeze(endnodes, axis=1) + detection_boxes = endnodes[:, :, None, :4] # (B, 100, 1, 4) + # multiplies the class scores by the objectness + detection_scores = endnodes[..., 4:5] * endnodes[..., 5:] + return self.iou_nms(detection_boxes, detection_scores) + + return tf_postproc_nms(endnodes, labels_offset=kwargs['labels_offset'], score_threshold=0.0, coco_2017_to_2014=True) if self._nms_on_device: - return self.iou_nms(endnodes) + endnodes = tf.transpose(endnodes, [0, 3, 1, 2]) + detection_boxes = endnodes[:, :, :, :4] + detection_scores = tf.squeeze(endnodes[:, :, :, 4:], axis=3) + return self.iou_nms(detection_boxes, detection_scores) else: return self.yolo_postprocessing(endnodes, **kwargs) diff --git a/hailo_model_zoo/core/postprocessing/detection_3d_postprocessing.py b/hailo_model_zoo/core/postprocessing/detection_3d_postprocessing.py index fff6fda0..6e9c0635 100644 --- a/hailo_model_zoo/core/postprocessing/detection_3d_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/detection_3d_postprocessing.py @@ -1,7 +1,9 @@ -import tensorflow as tf -import numpy as np import os +import numpy as np +import tensorflow as tf + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.postprocessing.visualize_3d import visualization3Dbox from hailo_model_zoo.utils import path_resolver @@ -30,6 +32,7 @@ def get_calibration_matrix_from_data(data): return P2 +@POSTPROCESS_FACTORY.register(name="3d_detection") def detection_3d_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): output_scheme = kwargs.get('output_scheme', None) if output_scheme: @@ -368,6 +371,7 @@ def _select_point_of_interest(self, batch, index, feature_maps): return feature_maps +@VISUALIZATION_FACTORY.register(name="3d_detection") def visualize_3d_detection_result(logits, image, image_name=None, threshold=0.25, image_info=None, use_normalized_coordinates=True, max_boxes_to_draw=20, dataset_name='kitti_3d', channels_remove=None, **kwargs): diff --git a/hailo_model_zoo/core/postprocessing/detection_postprocessing.py b/hailo_model_zoo/core/postprocessing/detection_postprocessing.py index 3b51c071..f01f049d 100644 --- a/hailo_model_zoo/core/postprocessing/detection_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/detection_postprocessing.py @@ -2,17 +2,18 @@ import os from detection_tools.utils.visualization_utils import visualize_boxes_and_labels_on_image_array -from hailo_model_zoo.core.postprocessing.detection.ssd import SSDPostProc -from hailo_model_zoo.core.postprocessing.detection.ssd_mlperf_tf import SSDMLPerfPostProc + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.postprocessing.detection.centernet import CenternetPostProc -from hailo_model_zoo.core.postprocessing.detection.yolo import YoloPostProc +from hailo_model_zoo.core.postprocessing.detection.detr import DetrPostProc from hailo_model_zoo.core.postprocessing.detection.efficientdet import EfficientDetPostProc from hailo_model_zoo.core.postprocessing.detection.faster_rcnn_stage1_postprocessing import FasterRCNNStage1 from hailo_model_zoo.core.postprocessing.detection.faster_rcnn_stage2_postprocessing import FasterRCNNStage2 from hailo_model_zoo.core.postprocessing.detection.nanodet import NanoDetPostProc -from hailo_model_zoo.core.postprocessing.detection.detr import DetrPostProc from hailo_model_zoo.core.postprocessing.detection.retinanet_mlperf import retinanet_postproc - +from hailo_model_zoo.core.postprocessing.detection.ssd import SSDPostProc +from hailo_model_zoo.core.postprocessing.detection.ssd_mlperf_tf import SSDMLPerfPostProc +from hailo_model_zoo.core.postprocessing.detection.yolo import YoloPostProc DETECTION_ARCHS = { "ssd": SSDPostProc, @@ -35,6 +36,7 @@ def _get_postprocessing_class(meta_arch): raise ValueError("Meta-architecture [{}] is not supported".format(meta_arch)) +@POSTPROCESS_FACTORY.register(name="detection") def detection_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): meta_arch = kwargs["meta_arch"].lower() kwargs["anchors"] = {} if kwargs["anchors"] is None else kwargs["anchors"] @@ -73,6 +75,8 @@ def _get_face_detection_visualization_data(logits): return boxes, labels, face_landmarks +@VISUALIZATION_FACTORY.register(name="detection") +@VISUALIZATION_FACTORY.register(name="face_detection") def visualize_detection_result(logits, image, threshold=0.2, image_info=None, use_normalized_coordinates=True, max_boxes_to_draw=20, dataset_name='coco', **kwargs): diff --git a/hailo_model_zoo/core/postprocessing/face_attr_postprocessing.py b/hailo_model_zoo/core/postprocessing/face_attr_postprocessing.py index a8c09dc6..52a5a6bc 100644 --- a/hailo_model_zoo/core/postprocessing/face_attr_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/face_attr_postprocessing.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY + +@POSTPROCESS_FACTORY.register(name="face_attr") def face_attr_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): preds = tf.reshape(endnodes, (-1, 40, 2)) preds = tf.cast(tf.argmax(preds, axis=-1), tf.float32) - 0.5 diff --git a/hailo_model_zoo/core/postprocessing/face_detection_postprocessing.py b/hailo_model_zoo/core/postprocessing/face_detection_postprocessing.py index 4c29da8f..070048c0 100644 --- a/hailo_model_zoo/core/postprocessing/face_detection_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/face_detection_postprocessing.py @@ -2,9 +2,9 @@ import numpy as np import tensorflow as tf - from detection_tools.core.post_processing import batch_multiclass_non_max_suppression +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY from hailo_model_zoo.core.postprocessing.face_detection.scrfd import SCRFDPostProc @@ -266,6 +266,7 @@ def tf_postproc(self, endnodes): DEFAULT_CLASS = FaceDetectionPostProc +@POSTPROCESS_FACTORY.register(name="face_detection") def face_detection_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): meta_arch = kwargs.get('meta_arch', None) if meta_arch: diff --git a/hailo_model_zoo/core/postprocessing/face_landmarks_3d_postprocessing.py b/hailo_model_zoo/core/postprocessing/face_landmarks_3d_postprocessing.py index 2cbb857d..12897e32 100644 --- a/hailo_model_zoo/core/postprocessing/face_landmarks_3d_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/face_landmarks_3d_postprocessing.py @@ -1,8 +1,11 @@ import pickle + import cv2 import numpy as np import tensorflow as tf +from hailo_model_zoo.core.factory import (POSTPROCESS_FACTORY, + VISUALIZATION_FACTORY) from hailo_model_zoo.core.infer.infer_utils import to_numpy from hailo_model_zoo.utils.path_resolver import resolve_data_path @@ -145,6 +148,7 @@ def face_3dmm_to_landmarks_np(face_3dmm_params, img_dims, roi_box): return pts3d +@POSTPROCESS_FACTORY.register(name="face_landmark_detection_3d") def face_landmarks_3d_postprocessing(endnodes, device_pre_post_layers=None, *, img_dims=None, gt_images=None, **kwargs): assert img_dims[0] == img_dims[1], "Assumes square input" batch_size = tf.shape(endnodes)[0] @@ -158,6 +162,7 @@ def face_landmarks_3d_postprocessing(endnodes, device_pre_post_layers=None, *, i return {'predictions': ptds3d} +@VISUALIZATION_FACTORY.register(name="face_landmark_detection_3d") def visualize_face_landmarks_3d_result(logits, image, **kwargs): logits = logits['predictions'] img = to_numpy(kwargs.get('img_info', {}).get('uncropped_image', image[0])) diff --git a/hailo_model_zoo/core/postprocessing/facenet_postprocessing.py b/hailo_model_zoo/core/postprocessing/facenet_postprocessing.py index f2407659..ae1c7f2d 100644 --- a/hailo_model_zoo/core/postprocessing/facenet_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/facenet_postprocessing.py @@ -1,15 +1,19 @@ -import tensorflow as tf -import numpy as np +import matplotlib import matplotlib.pyplot as plt +import numpy as np +import tensorflow as tf from sklearn.manifold import TSNE -import matplotlib + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +@POSTPROCESS_FACTORY.register(name="face_verification") def facenet_postprocessing(endnodes, device_pre_post_layers, **kwargs): embeddings = tf.nn.l2_normalize(endnodes, 1, 1e-10, name='embeddings') return {'predictions': embeddings} +@VISUALIZATION_FACTORY.register(name="face_verification") def visualize_face_result(embeddings1, embeddings2, filenames, **kwargs): matplotlib.use('TkAgg') tsne = TSNE(n_components=2, random_state=0) diff --git a/hailo_model_zoo/core/postprocessing/head_pose_estimation_postprocessing.py b/hailo_model_zoo/core/postprocessing/head_pose_estimation_postprocessing.py index 78b40fa1..f43284b6 100644 --- a/hailo_model_zoo/core/postprocessing/head_pose_estimation_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/head_pose_estimation_postprocessing.py @@ -1,9 +1,13 @@ -import tensorflow as tf -import numpy as np -import cv2 from math import cos, sin +import cv2 +import numpy as np +import tensorflow as tf + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY + +@POSTPROCESS_FACTORY.register(name="head_pose_estimation") def head_pose_estimation_postprocessing(endnodes, device_pre_post_layers, **kwargs): if device_pre_post_layers is not None and device_pre_post_layers['softmax']: probs = endnodes @@ -20,6 +24,7 @@ def head_pose_estimation_postprocessing(endnodes, device_pre_post_layers, **kwar } +@VISUALIZATION_FACTORY.register(name="head_pose_estimation") def visualize_head_pose_result(net_output, img, **kwargs): img = img[0] pitch, roll, yaw = net_output['pitch'][0], net_output['roll'][0], net_output['yaw'][0] diff --git a/hailo_model_zoo/core/postprocessing/image_denoising_postprocessing.py b/hailo_model_zoo/core/postprocessing/image_denoising_postprocessing.py index f3913661..9b348b1b 100644 --- a/hailo_model_zoo/core/postprocessing/image_denoising_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/image_denoising_postprocessing.py @@ -1,13 +1,17 @@ -import tensorflow as tf -import numpy as np import cv2 +import numpy as np +import tensorflow as tf + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +@POSTPROCESS_FACTORY.register(name="image_denoising") def image_denoising_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): endnodes = tf.cast(tf.math.round(tf.clip_by_value(endnodes, 0, 1) * 255.0), tf.uint8) return {'predictions': endnodes} +@VISUALIZATION_FACTORY.register(name="image_denoising") def visualize_image_denoising_result(predicted_img, orig_img, **kwargs): noised_img = kwargs['img_info']['img_noised'].numpy() if kwargs['img_info']['transpose'].numpy(): diff --git a/hailo_model_zoo/core/postprocessing/instance_segmentation_postprocessing.py b/hailo_model_zoo/core/postprocessing/instance_segmentation_postprocessing.py index 51d1f585..e2a0f6fe 100644 --- a/hailo_model_zoo/core/postprocessing/instance_segmentation_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/instance_segmentation_postprocessing.py @@ -1,11 +1,13 @@ -import numpy as np from itertools import product from math import sqrt + import cv2 +import numpy as np -from hailo_model_zoo.core.datasets.datasets_info import get_dataset_info, CLASS_NAMES_COCO -from hailo_model_zoo.utils import path_resolver +from hailo_model_zoo.core.datasets.datasets_info import CLASS_NAMES_COCO, get_dataset_info +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.postprocessing.cython_utils.cython_nms import nms as cnms +from hailo_model_zoo.utils import path_resolver COLORS = ((244, 67, 54), (233, 30, 99), @@ -1002,6 +1004,7 @@ def yolov8_seg_postprocess(endnodes, device_pre_post_layers=None, **kwargs): return outputs +@POSTPROCESS_FACTORY.register(name="instance_segmentation") def instance_segmentation_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): meta_arch = kwargs.get('meta_arch', '') if 'sparseinst' in meta_arch: @@ -1109,6 +1112,7 @@ def visualize_yolov5_seg_results(detections, img, class_names=None, alpha=0.5, s return img_out +@VISUALIZATION_FACTORY.register(name="instance_segmentation") def visualize_instance_segmentation_result(detections, img, **kwargs): detections = detections['predictions'] meta_arch = kwargs.get('meta_arch', '') diff --git a/hailo_model_zoo/core/postprocessing/landmarks_postprocessing.py b/hailo_model_zoo/core/postprocessing/landmarks_postprocessing.py index 3825f324..eeafb458 100644 --- a/hailo_model_zoo/core/postprocessing/landmarks_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/landmarks_postprocessing.py @@ -1,17 +1,22 @@ +import cv2 import numpy as np from PIL import Image, ImageDraw -import cv2 + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +@POSTPROCESS_FACTORY.register(name="face_landmark_detection") def face_landmarks_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): shape = kwargs['img_dims'] return {'predictions': endnodes * shape[0]} +@POSTPROCESS_FACTORY.register(name="landmark_detection") def hand_landmarks_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): return {'predictions': endnodes[0]} +@VISUALIZATION_FACTORY.register(name="face_landmark_detection") def visualize_face_landmarks_result(logits, image, **kwargs): logits = logits['predictions'] img = Image.fromarray(image[0]) @@ -20,6 +25,7 @@ def visualize_face_landmarks_result(logits, image, **kwargs): return np.array(img) +@VISUALIZATION_FACTORY.register(name="landmark_detection") def visualize_hand_landmarks_result(logits, image, **kwargs): logits = logits['predictions'][0] img = image[0] diff --git a/hailo_model_zoo/core/postprocessing/lane_detection/polylanenet.py b/hailo_model_zoo/core/postprocessing/lane_detection/polylanenet.py index fcb7a444..0f2c5022 100644 --- a/hailo_model_zoo/core/postprocessing/lane_detection/polylanenet.py +++ b/hailo_model_zoo/core/postprocessing/lane_detection/polylanenet.py @@ -58,9 +58,9 @@ def polynomize_pred(self, pred): def postprocessing(self, endnodes, device_pre_post_layers=None, output_scheme=None, **kwargs): if output_scheme and output_scheme.get('split_output', False): - endnodes = tf.py_function(self.recombine_split_endnodes, endnodes, [tf.float32]) + endnodes = tf.numpy_function(self.recombine_split_endnodes, endnodes, [tf.float32]) decoded = tf.numpy_function(self.decode, [endnodes], [tf.float32]) # network always returns 5 lane predictions. - postprocessed = tf.py_function(self.polynomize_pred, [decoded], [tf.float32]) + postprocessed = tf.numpy_function(self.polynomize_pred, [decoded], [tf.float32]) # import ipdb; ipdb.set_trace() return {'predictions': postprocessed[0]} diff --git a/hailo_model_zoo/core/postprocessing/lane_detection_postprocessing.py b/hailo_model_zoo/core/postprocessing/lane_detection_postprocessing.py index 8c0a73d2..56c6dd7a 100644 --- a/hailo_model_zoo/core/postprocessing/lane_detection_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/lane_detection_postprocessing.py @@ -1,9 +1,9 @@ -import numpy as np import cv2 +import numpy as np -from hailo_model_zoo.core.postprocessing.lane_detection.polylanenet import PolyLaneNetPostProcessHailo +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.postprocessing.lane_detection.laneaf import LaneAFPostProc - +from hailo_model_zoo.core.postprocessing.lane_detection.polylanenet import PolyLaneNetPostProcessHailo LANE_DETECTION_ARCHS = { "polylanenet": PolyLaneNetPostProcessHailo, @@ -11,6 +11,7 @@ } +@VISUALIZATION_FACTORY.register(name="lane_detection") def visualize_lane_detection_result(pred, im, dataset_name='tusimple', **kwargs): pred = pred['predictions'] color = [0, 255, 0] @@ -51,6 +52,7 @@ def _get_postprocessing_class(meta_arch): raise ValueError("Meta-architecture [{}] is not supported".format(meta_arch)) +@POSTPROCESS_FACTORY.register(name="lane_detection") def lane_detection_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): meta_arch = kwargs["meta_arch"].lower() kwargs["anchors"] = {} if kwargs["anchors"] is None else kwargs["anchors"] diff --git a/hailo_model_zoo/core/postprocessing/low_light_enhancement_postprocessing.py b/hailo_model_zoo/core/postprocessing/low_light_enhancement_postprocessing.py index 861fe263..27c50a26 100644 --- a/hailo_model_zoo/core/postprocessing/low_light_enhancement_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/low_light_enhancement_postprocessing.py @@ -1,5 +1,7 @@ -import numpy as np import cv2 +import numpy as np + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY """ properties of patch for image visualization: @@ -7,10 +9,12 @@ """ size fit for screen comparison """ +@POSTPROCESS_FACTORY.register(name="low_light_enhancement") def low_light_enhancement_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): return {'predictions': endnodes} +@VISUALIZATION_FACTORY.register(name="low_light_enhancement") def visualize_low_light_enhancement_result(logits, img, **kwargs): """ Visualizing the output of the Low-Light Enhancement network. diff --git a/hailo_model_zoo/core/postprocessing/mspn_postprocessing.py b/hailo_model_zoo/core/postprocessing/mspn_postprocessing.py index c49071aa..6867de24 100644 --- a/hailo_model_zoo/core/postprocessing/mspn_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/mspn_postprocessing.py @@ -1,9 +1,9 @@ -import numpy as np import cv2 +import numpy as np +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.preprocessing.affine_utils import transform_preds - pose_kpt_color = np.array([[0, 255, 0], [0, 255, 0], [0, 255, 0], @@ -156,6 +156,7 @@ def _get_default_bbox(batch_size): return default_bbox # Bx1x4 +@POSTPROCESS_FACTORY.register(name="single_person_pose_estimation") def mspn_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): image_info = kwargs['gt_images'] height, width = image_info['img_resized'].shape[1:3] @@ -195,6 +196,7 @@ def mspn_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): return {'predictions': all_preds} +@VISUALIZATION_FACTORY.register(name="single_person_pose_estimation") def visualize_single_person_pose_estimation_result(probs, image, kpt_score_thr=0.3, radius=8, thickness=2, **kwargs): diff --git a/hailo_model_zoo/core/postprocessing/multiple_object_tracking_postprocessing.py b/hailo_model_zoo/core/postprocessing/multiple_object_tracking_postprocessing.py index ba6d9804..6cdbbae9 100644 --- a/hailo_model_zoo/core/postprocessing/multiple_object_tracking_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/multiple_object_tracking_postprocessing.py @@ -1,10 +1,13 @@ -import tensorflow as tf import numpy as np +import tensorflow as tf +from detection_tools.utils.visualization_utils import \ + visualize_boxes_and_labels_on_image_array +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.postprocessing.detection.centernet import CenternetPostProc -from detection_tools.utils.visualization_utils import visualize_boxes_and_labels_on_image_array +@POSTPROCESS_FACTORY.register(name="multiple_object_tracking") def multiple_object_tracking_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): kwargs['meta_arch'] = kwargs.get('meta_arch', {}) if kwargs['meta_arch'] != 'fair_mot': @@ -17,9 +20,9 @@ def multiple_object_tracking_postprocessing(endnodes, device_pre_post_layers=Non **kwargs) re_id_values = tf.nn.l2_normalize(endnodes[0], axis=-1) - top_indices = tf.py_function(_get_top_indices, [re_id_values, detection_dict['top_k_indices']], [tf.int64]) + top_indices = tf.numpy_function(_get_top_indices, [re_id_values, detection_dict['top_k_indices']], [tf.int64]) detection_dict['re_id_values'] = tf.gather_nd(re_id_values, top_indices) - return dict(**detection_dict) + return {**detection_dict} def _get_top_indices(re_id_values, top_k): @@ -30,6 +33,7 @@ def _get_top_indices(re_id_values, top_k): return top_indices_including_all_features.reshape((re_id_values.shape[0], -1, 128, top_k.shape[-1])) +@VISUALIZATION_FACTORY.register(name="multiple_object_tracking") def visualize_tracking_result(logits, image, threshold=0.4, image_info=None, use_normalized_coordinates=True, max_boxes_to_draw=20, dataset_name=None, **kwargs): boxes = logits['detection_boxes'][0] diff --git a/hailo_model_zoo/core/postprocessing/ocr_postprocessing.py b/hailo_model_zoo/core/postprocessing/ocr_postprocessing.py index 6cdf1930..baca03ce 100644 --- a/hailo_model_zoo/core/postprocessing/ocr_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/ocr_postprocessing.py @@ -1,6 +1,7 @@ import numpy as np from PIL import Image, ImageDraw +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY CHARS = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-'] @@ -8,12 +9,14 @@ # return np.exp(x) / np.expand_dims(np.sum(np.exp(x), axis=-1), axis=-1) +@POSTPROCESS_FACTORY.register(name="ocr") def ocr_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): logits = np.mean(endnodes, axis=1) # probs = _softmax(logits) return {'predictions': np.argmax(logits, axis=2)} +@VISUALIZATION_FACTORY.register(name="ocr") def visualize_ocr_result(probs, img, text_color=(255, 0, 0), **kwrgs): probs = np.expand_dims(probs['predictions'][0], axis=0) pred = greedy_decoder(probs)[0] diff --git a/hailo_model_zoo/core/postprocessing/person_reid_postprocessing.py b/hailo_model_zoo/core/postprocessing/person_reid_postprocessing.py index c66bd926..5cfb9ce0 100644 --- a/hailo_model_zoo/core/postprocessing/person_reid_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/person_reid_postprocessing.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY + +@POSTPROCESS_FACTORY.register(name="person_reid") def person_reid_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): embeddings = endnodes embeddings = tf.nn.l2_normalize(endnodes, 1, 1e-10, name='embeddings') diff --git a/hailo_model_zoo/core/postprocessing/pose_estimation_postprocessing.py b/hailo_model_zoo/core/postprocessing/pose_estimation_postprocessing.py index 36bc4309..c4548c49 100644 --- a/hailo_model_zoo/core/postprocessing/pose_estimation_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/pose_estimation_postprocessing.py @@ -1,11 +1,13 @@ import math -import numpy as np -import cv2 from operator import itemgetter -from hailo_model_zoo.core.postprocessing.instance_segmentation_postprocessing import xywh2xyxy + +import cv2 +import numpy as np + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.postprocessing.centerpose_postprocessing import centerpose_postprocessing from hailo_model_zoo.core.postprocessing.cython_utils.cython_nms import nms as cnms - +from hailo_model_zoo.core.postprocessing.instance_segmentation_postprocessing import xywh2xyxy BODY_PARTS_KPT_IDS = [[1, 2], [1, 5], [2, 3], [3, 4], [5, 6], [6, 7], [1, 8], [8, 9], [9, 10], [1, 11], [11, 12], [12, 13], [1, 0], [0, 14], [14, 16], [0, 15], [15, 17], [2, 16], [5, 17]] @@ -36,6 +38,7 @@ def scale_kpts(kpts, shape, orig_shape): return kpts +@POSTPROCESS_FACTORY.register(name="pose_estimation") def pose_estimation_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): if kwargs.get('meta_arch') == 'centerpose': return centerpose_postprocessing(endnodes, @@ -76,6 +79,7 @@ def pose_estimation_postprocessing(endnodes, device_pre_post_layers=None, **kwar return {'predictions': coco_result_list} +@VISUALIZATION_FACTORY.register(name="pose_estimation") def visualize_pose_estimation_result(results, img, dataset_name, *, detection_threshold=0.5, joint_threshold=0.5, **kwargs): assert dataset_name == 'cocopose' diff --git a/hailo_model_zoo/core/postprocessing/postprocessing_factory.py b/hailo_model_zoo/core/postprocessing/postprocessing_factory.py index fb3dabdf..e4f001fa 100644 --- a/hailo_model_zoo/core/postprocessing/postprocessing_factory.py +++ b/hailo_model_zoo/core/postprocessing/postprocessing_factory.py @@ -1,67 +1,16 @@ """Contains a factory for network postprocessing.""" -from hailo_model_zoo.core.postprocessing.age_gender_postprocessing import (age_gender_postprocessing, - visualize_age_gender_result) -from hailo_model_zoo.core.postprocessing.classification_postprocessing import (classification_postprocessing, - zero_shot_classification_postprocessing, - visualize_classification_result) -from hailo_model_zoo.core.postprocessing.detection_postprocessing import (detection_postprocessing, - visualize_detection_result) -from hailo_model_zoo.core.postprocessing.face_detection_postprocessing import (face_detection_postprocessing) -from hailo_model_zoo.core.postprocessing.segmentation_postprocessing import (segmentation_postprocessing, - visualize_segmentation_result) -from hailo_model_zoo.core.postprocessing.facenet_postprocessing import (facenet_postprocessing, visualize_face_result) -from hailo_model_zoo.core.postprocessing.instance_segmentation_postprocessing import ( - instance_segmentation_postprocessing, visualize_instance_segmentation_result) -from hailo_model_zoo.core.postprocessing.pose_estimation_postprocessing import (pose_estimation_postprocessing, - visualize_pose_estimation_result) -from hailo_model_zoo.core.postprocessing.super_resolution_postprocessing import (super_resolution_postprocessing, - visualize_super_resolution_result, - visualize_srgan_result) -from hailo_model_zoo.core.postprocessing.low_light_enhancement_postprocessing import ( - low_light_enhancement_postprocessing, - visualize_low_light_enhancement_result) -from hailo_model_zoo.core.postprocessing.head_pose_estimation_postprocessing import ( - head_pose_estimation_postprocessing, visualize_head_pose_result) -from hailo_model_zoo.core.postprocessing.lane_detection_postprocessing import (lane_detection_postprocessing, - visualize_lane_detection_result) -from hailo_model_zoo.core.postprocessing.multiple_object_tracking_postprocessing import ( - multiple_object_tracking_postprocessing, visualize_tracking_result) -from hailo_model_zoo.core.postprocessing.landmarks_postprocessing import ( - face_landmarks_postprocessing, visualize_face_landmarks_result, - visualize_hand_landmarks_result, hand_landmarks_postprocessing) -from hailo_model_zoo.core.postprocessing.face_landmarks_3d_postprocessing import ( - face_landmarks_3d_postprocessing, visualize_face_landmarks_3d_result) -from hailo_model_zoo.core.postprocessing.detection_3d_postprocessing import ( - detection_3d_postprocessing, visualize_3d_detection_result) -from hailo_model_zoo.core.postprocessing.depth_estimation_postprocessing import (depth_estimation_postprocessing, - visualize_depth_estimation_result) -from hailo_model_zoo.core.postprocessing.ocr_postprocessing import ( - ocr_postprocessing, visualize_ocr_result -) -from hailo_model_zoo.core.postprocessing.person_reid_postprocessing import ( - person_reid_postprocessing) -from hailo_model_zoo.core.postprocessing.face_attr_postprocessing import face_attr_postprocessing -from hailo_model_zoo.core.postprocessing.mspn_postprocessing import ( - mspn_postprocessing, visualize_single_person_pose_estimation_result -) -from hailo_model_zoo.core.postprocessing.stereonet_postprocessing import ( - stereonet_postprocessing, visualize_stereonet_result) -from hailo_model_zoo.core.postprocessing.image_denoising_postprocessing import ( - image_denoising_postprocessing, visualize_image_denoising_result -) -from hailo_model_zoo.core.postprocessing.vit_pose_postprocessing import ( - vit_pose_postprocessing -) -from hailo_model_zoo.core.postprocessing.stable_diffusion_v2_postprocessing import ( - stable_diffusion_v2_decoder_postprocessing, stable_diffusion_v2_unet_postprocessing, - visualize_stable_diffusion_v2_decoder, visualize_stable_diffusion_v2_unet -) +import importlib -try: - # THIS CODE IS EXPERIMENTAL AND IN USE ONLY FOR TAPPAS VALIDATION - from hailo_model_zoo.core.postprocessing.tappas_postprocessing import tappas_postprocessing -except ModuleNotFoundError: - tappas_postprocessing = None +import hailo_model_zoo.core.postprocessing +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +from hailo_model_zoo.utils.plugin_utils import iter_namespace + +discovered_plugins = { + name: importlib.import_module(name) + for _, name, _ + in iter_namespace(hailo_model_zoo.core.postprocessing) + if 'post' in name.split('.')[-1] # ignore roi_align module which isn't importable +} def get_visualization(name, **kwargs): @@ -82,40 +31,7 @@ def get_visualization(name, **kwargs): if name in unsupported_visualizations: raise ValueError(f'Visualization is currently not supported for {name}') - visualization_fn_map = { - 'classification': visualize_classification_result, - 'zero_shot_classification': visualize_classification_result, - 'segmentation': visualize_segmentation_result, - 'detection': visualize_detection_result, - 'pose_estimation': visualize_pose_estimation_result, - 'face_verification': visualize_face_result, - 'instance_segmentation': visualize_instance_segmentation_result, - 'super_resolution': visualize_super_resolution_result, - 'super_resolution_srgan': visualize_srgan_result, - 'low_light_enhancement': visualize_low_light_enhancement_result, - 'head_pose_estimation': visualize_head_pose_result, - 'age_gender': visualize_age_gender_result, - 'face_detection': visualize_detection_result, - 'multiple_object_tracking': visualize_tracking_result, - 'face_landmark_detection': visualize_face_landmarks_result, - 'landmark_detection': visualize_hand_landmarks_result, - 'lane_detection': visualize_lane_detection_result, - '3d_detection': visualize_3d_detection_result, - 'face_landmark_detection_3d': visualize_face_landmarks_3d_result, - 'ocr': visualize_ocr_result, - 'single_person_pose_estimation': visualize_single_person_pose_estimation_result, - 'stereonet': visualize_stereonet_result, - 'image_denoising': visualize_image_denoising_result, - 'depth_estimation': visualize_depth_estimation_result, - 'stable_diffusion_v2_decoder': visualize_stable_diffusion_v2_decoder, - 'stable_diffusion_v2_unet': visualize_stable_diffusion_v2_unet, - } - if name not in visualization_fn_map: - raise ValueError('Visualization name [%s] was not recognized' % name) - - def visualization_fn(endnodes, image_info, **kwargs): - return visualization_fn_map[name](endnodes, image_info, **kwargs) - + visualization_fn = VISUALIZATION_FACTORY.get(name) return visualization_fn @@ -129,44 +45,10 @@ def get_postprocessing(name, flip=False): Raises: ValueError: If postprocessing `name` is not recognized. """ - postprocessing_fn_map = { - 'classification': classification_postprocessing, - 'zero_shot_classification': zero_shot_classification_postprocessing, - 'segmentation': segmentation_postprocessing, - 'detection': detection_postprocessing, - 'pose_estimation': pose_estimation_postprocessing, - 'face_verification': facenet_postprocessing, - 'landmark_detection': hand_landmarks_postprocessing, - 'face_landmark_detection': face_landmarks_postprocessing, - 'instance_segmentation': instance_segmentation_postprocessing, - 'super_resolution': super_resolution_postprocessing, - 'super_resolution_srgan': super_resolution_postprocessing, - 'low_light_enhancement': low_light_enhancement_postprocessing, - 'head_pose_estimation': head_pose_estimation_postprocessing, - 'face_detection': face_detection_postprocessing, - 'age_gender': age_gender_postprocessing, - 'multiple_object_tracking': multiple_object_tracking_postprocessing, - 'lane_detection': lane_detection_postprocessing, - '3d_detection': detection_3d_postprocessing, - 'face_landmark_detection_3d': face_landmarks_3d_postprocessing, - 'ocr': ocr_postprocessing, - 'person_reid': person_reid_postprocessing, - 'person_attr': classification_postprocessing, - 'face_attr': face_attr_postprocessing, - 'single_person_pose_estimation': mspn_postprocessing, - 'vit_pose': vit_pose_postprocessing, - 'stereonet': stereonet_postprocessing, - 'tappas_postprocessing': tappas_postprocessing, - 'image_denoising': image_denoising_postprocessing, - 'depth_estimation': depth_estimation_postprocessing, - 'stable_diffusion_v2_decoder': stable_diffusion_v2_decoder_postprocessing, - 'stable_diffusion_v2_unet': stable_diffusion_v2_unet_postprocessing, - } - if name not in postprocessing_fn_map: - raise ValueError('Postprocessing name [%s] was not recognized' % name) + postprocess_callback = POSTPROCESS_FACTORY.get(name) def postprocessing_fn(endnodes, device_pre_post_layers=None, **kwargs): - return postprocessing_fn_map[name](endnodes, device_pre_post_layers, **kwargs) + return postprocess_callback(endnodes, device_pre_post_layers, **kwargs) return postprocessing_fn diff --git a/hailo_model_zoo/core/postprocessing/segmentation_postprocessing.py b/hailo_model_zoo/core/postprocessing/segmentation_postprocessing.py index 13a26760..e2e17d07 100644 --- a/hailo_model_zoo/core/postprocessing/segmentation_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/segmentation_postprocessing.py @@ -1,7 +1,8 @@ -import tensorflow as tf import numpy as np -from PIL import ImageFilter -from PIL import Image +import tensorflow as tf +from PIL import Image, ImageFilter + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY class PostProcessingException(Exception): @@ -71,6 +72,7 @@ def color_segment_img(orig_img, logits, dataset): return np.array(composite_backscaled, np.uint8)[:, :, :3] +@POSTPROCESS_FACTORY.register(name="segmentation") def segmentation_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): device_pre_post_layers = device_pre_post_layers if device_pre_post_layers is not None else { 'bilinear': False, 'argmax': False} @@ -88,6 +90,7 @@ def segmentation_postprocessing(endnodes, device_pre_post_layers=None, **kwargs) return {'predictions': predictions} +@VISUALIZATION_FACTORY.register(name="segmentation") def visualize_segmentation_result(logits, image, **kwargs): logits = logits['predictions'] dataset = kwargs['dataset_name'] diff --git a/hailo_model_zoo/core/postprocessing/stable_diffusion_v2_postprocessing.py b/hailo_model_zoo/core/postprocessing/stable_diffusion_v2_postprocessing.py index c0b04a0a..601500c3 100644 --- a/hailo_model_zoo/core/postprocessing/stable_diffusion_v2_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/stable_diffusion_v2_postprocessing.py @@ -1,14 +1,17 @@ -import tensorflow as tf import numpy as np -from PIL import Image -from PIL import ImageDraw +import tensorflow as tf +from PIL import Image, ImageDraw + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +@POSTPROCESS_FACTORY.register(name="stable_diffusion_v2_decoder") def stable_diffusion_v2_decoder_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): output_image = tf.clip_by_value(endnodes / 2 + 0.5, 0.0, 1.0) return {'predictions': output_image} +@VISUALIZATION_FACTORY.register(name="stable_diffusion_v2_decoder") def visualize_stable_diffusion_v2_decoder(logits, img_gt, **kwargs): max_chars_per_line = 80 image_gen = logits['predictions'] @@ -25,11 +28,13 @@ def visualize_stable_diffusion_v2_decoder(logits, img_gt, **kwargs): return np.array(image_gen_pil, np.uint8) +@POSTPROCESS_FACTORY.register(name="stable_diffusion_v2_unet") def stable_diffusion_v2_unet_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): endnodes = tf.transpose(endnodes, (0, 2, 3, 1)) return {'predictions': endnodes} +@VISUALIZATION_FACTORY.register(name="stable_diffusion_v2_unet") def visualize_stable_diffusion_v2_unet(logits, img_gt, **kwargs): max_chars_per_line = 80 image_gen = logits['predictions'] diff --git a/hailo_model_zoo/core/postprocessing/stereonet_postprocessing.py b/hailo_model_zoo/core/postprocessing/stereonet_postprocessing.py index e147fe24..c9ad702c 100644 --- a/hailo_model_zoo/core/postprocessing/stereonet_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/stereonet_postprocessing.py @@ -1,13 +1,17 @@ import io -import numpy as np import matplotlib.pyplot as plt +import numpy as np + +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY +@POSTPROCESS_FACTORY.register(name="stereonet") def stereonet_postprocessing(logits, device_pre_post_layers=None, **kwargs): return {'predictions': logits} +@VISUALIZATION_FACTORY.register(name="stereonet") def visualize_stereonet_result(logits, image, **kwargs): logits = np.array(logits['predictions'])[0] fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1) diff --git a/hailo_model_zoo/core/postprocessing/super_resolution_postprocessing.py b/hailo_model_zoo/core/postprocessing/super_resolution_postprocessing.py index 84b57cf0..512e59eb 100644 --- a/hailo_model_zoo/core/postprocessing/super_resolution_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/super_resolution_postprocessing.py @@ -1,7 +1,8 @@ -import tensorflow as tf -import numpy as np import cv2 +import numpy as np +import tensorflow as tf +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY, VISUALIZATION_FACTORY from hailo_model_zoo.core.preprocessing.super_resolution_preprocessing import RGB2YUV_mat, RGB2YUV_offset """ @@ -17,6 +18,8 @@ [1.59602715, -0.81296805, 0.]] +@POSTPROCESS_FACTORY.register(name="super_resolution") +@POSTPROCESS_FACTORY.register(name="super_resolution_srgan") def super_resolution_postprocessing(endnodes, device_pre_post_layers=None, **kwargs): meta_arch = kwargs['meta_arch'].lower() if 'sr_resnet' in meta_arch: @@ -68,6 +71,7 @@ def focus_on_patch(image, h_center, w_center, width): return image[h_min:h_max, w_min:w_max, :] +@VISUALIZATION_FACTORY.register(name="super_resolution_srgan") def visualize_srgan_result(logits, img, **kwargs): """ Visualizing the output of the Super-Res network compared with a naive upscaling. @@ -99,6 +103,7 @@ def visualize_srgan_result(logits, img, **kwargs): return mosaic_image +@VISUALIZATION_FACTORY.register(name="super_resolution") def visualize_super_resolution_result(logits, img, **kwargs): """ Visualizing the output of the Super-Res network compared with a naive upscaling. diff --git a/hailo_model_zoo/core/postprocessing/vit_pose_postprocessing.py b/hailo_model_zoo/core/postprocessing/vit_pose_postprocessing.py index 01b53627..70438410 100644 --- a/hailo_model_zoo/core/postprocessing/vit_pose_postprocessing.py +++ b/hailo_model_zoo/core/postprocessing/vit_pose_postprocessing.py @@ -1,6 +1,7 @@ -import numpy as np import cv2 +import numpy as np +from hailo_model_zoo.core.factory import POSTPROCESS_FACTORY pose_kpt_color = np.array([[0, 255, 0], [0, 255, 0], @@ -141,7 +142,9 @@ def post_dark_udp(coords, batch_heatmaps, kernel=3): dxy = 0.5 * (ix1y1 - ix1 - iy1 + i_ + i_ - ix1_ - iy1_ + ix1_y1_) hessian = np.concatenate([dxx, dxy, dxy, dyy], axis=1) hessian = hessian.reshape(N, K, 2, 2) - hessian = np.linalg.inv(hessian + np.finfo(np.float32).eps * np.eye(2)) + # we factor np.eye(2) by 10^-6 because originally we had a case of a + # too small epsilon, resulting in a non inversible matrix. + hessian = np.linalg.inv(hessian + 1e-6 * np.eye(2)) coords -= np.einsum('ijmn,ijnk->ijmk', hessian, derivative).squeeze() return coords @@ -265,6 +268,7 @@ def transform_preds(coords, center, scale, output_size, use_udp=False): return target_coords +@POSTPROCESS_FACTORY.register(name="vit_pose") def vit_pose_postprocessing(endnodes, device_pre_post_layers=None, kernel=11, **kwargs): img_info = kwargs['gt_images'] center = img_info['center'] diff --git a/hailo_model_zoo/core/preprocessing/centerpose_preprocessing.py b/hailo_model_zoo/core/preprocessing/centerpose_preprocessing.py index df0968dc..7b04e834 100644 --- a/hailo_model_zoo/core/preprocessing/centerpose_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/centerpose_preprocessing.py @@ -1,6 +1,8 @@ -import tensorflow as tf import cv2 import numpy as np +import tensorflow as tf + +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY from hailo_model_zoo.core.preprocessing.affine_utils import get_affine_transform @@ -20,12 +22,13 @@ def _centerpose_preprocessing(image, height, width): return inp_image, center, scale +@PREPROCESS_FACTORY.register(name="centerpose") def centerpose_preprocessing(image, image_info=None, height=None, width=None, **kwargs): image_info['orig_height'], image_info['orig_width'] = tf.shape(image)[0], tf.shape(image)[1] image_info['img_orig'] = tf.image.encode_jpeg(image, quality=100) - image, center, scale = tf.py_function(_centerpose_preprocessing, - [image, height, width], - [tf.float32, tf.float32, tf.float32]) + image, center, scale = tf.numpy_function(_centerpose_preprocessing, + [image, height, width], + [tf.uint8, tf.float32, tf.float64]) image.set_shape((height, width, 3)) image_info['img_resized'] = image image_info['center'], image_info['scale'] = center, scale diff --git a/hailo_model_zoo/core/preprocessing/classification_preprocessing.py b/hailo_model_zoo/core/preprocessing/classification_preprocessing.py index ba675a40..e077046b 100644 --- a/hailo_model_zoo/core/preprocessing/classification_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/classification_preprocessing.py @@ -1,10 +1,12 @@ from __future__ import division -from past.utils import old_div -import tensorflow as tf import numpy as np +import tensorflow as tf +from past.utils import old_div from PIL import Image +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + RESIZE_SIDE = 256 VIT_RESIZE_SIDE = 248 MOBILENET_CENTRAL_FRACTION = 0.875 @@ -18,6 +20,7 @@ class PatchifyException(Exception): """Patchify exception.""" +@PREPROCESS_FACTORY.register(name="mobilenet") def mobilenet_v1(image, image_info=None, height=None, width=None, **kwargs): if image.dtype != tf.float32: image = tf.image.convert_image_dtype(image, dtype=tf.float32) @@ -122,6 +125,7 @@ def _resnet_base_preprocessing(image, output_height=None, output_width=None, res return image +@PREPROCESS_FACTORY.register(name="basic_resnet") def resnet_v1_18_34(image, image_info=None, output_height=None, output_width=None, **kwargs): image = _resnet_base_preprocessing(image, output_height, output_width, RESIZE_SIDE) if image_info: @@ -129,6 +133,7 @@ def resnet_v1_18_34(image, image_info=None, output_height=None, output_width=Non return image, image_info +@PREPROCESS_FACTORY.register def efficientnet(image, image_info=None, output_height=None, output_width=None, **kwargs): shape = tf.shape(image) padded_center_crop_size = tf.cast(output_height / (output_height + 32) @@ -145,6 +150,7 @@ def efficientnet(image, image_info=None, output_height=None, output_width=None, return tf.cast(image_resize, tf.float32), image_info +@PREPROCESS_FACTORY.register def fastvit(image, image_info=None, output_height=None, output_width=None, **kwargs): if output_height is not None: assert output_width is not None @@ -157,6 +163,7 @@ def fastvit(image, image_info=None, output_height=None, output_width=None, **kwa return image, image_info +@PREPROCESS_FACTORY.register def resmlp(image, image_info=None, output_height=None, output_width=None, **kwargs): # Full model in chip ''' This version of preprocessing runs the base ResMLP preprocess (Resize + CenterCrop). @@ -181,6 +188,7 @@ def pil_resize(image, output_height, output_width): return image_numpy +@PREPROCESS_FACTORY.register def clip(image, image_info=None, output_height=None, output_width=None, **kwargs): image = tf.numpy_function(pil_resize, [image, output_height, output_width], tf.uint8) image = tf.cast(image, tf.float32) @@ -190,6 +198,7 @@ def clip(image, image_info=None, output_height=None, output_width=None, **kwargs return image, image_info +@PREPROCESS_FACTORY.register def lprnet(image, image_info=None, output_height=None, output_width=None, **kwargs): image = tf.image.resize([image], [output_height, output_width], method='bicubic')[0] image = tf.squeeze(image) @@ -198,6 +207,7 @@ def lprnet(image, image_info=None, output_height=None, output_width=None, **kwar return image, image_info +@PREPROCESS_FACTORY.register(name="vit") def vit_tiny(image, image_info=None, output_height=None, output_width=None, **kwargs): # Full model in chip if output_height is not None: assert output_width is not None @@ -210,6 +220,7 @@ def vit_tiny(image, image_info=None, output_height=None, output_width=None, **kw return image, image_info +@PREPROCESS_FACTORY.register def resnet_pruned(image, image_info=None, output_height=None, output_width=None, **kwargs): image = _resnet_base_preprocessing(image, output_height, output_width, RESIZE_SIDE, method='bilinear') if image_info: diff --git a/hailo_model_zoo/core/preprocessing/depth_estimation_preprocessing.py b/hailo_model_zoo/core/preprocessing/depth_estimation_preprocessing.py index 6bfb9f72..523e2096 100644 --- a/hailo_model_zoo/core/preprocessing/depth_estimation_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/depth_estimation_preprocessing.py @@ -1,7 +1,11 @@ from __future__ import division + import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + +@PREPROCESS_FACTORY.register(name="mono_depth") def mono_depth_2(image, image_info=None, output_height=None, output_width=None, **kwargs): image = tf.image.convert_image_dtype(image, dtype=tf.float32) if output_height and output_width: @@ -21,6 +25,7 @@ def fastdepth_transform(image): return image +@PREPROCESS_FACTORY.register def fast_depth(image, image_info=None, output_height=None, output_width=None, **kwargs): image = tf.image.convert_image_dtype(image, dtype=tf.float32) # from unit8 also divides by 255... image = fastdepth_transform(image) @@ -35,6 +40,7 @@ def fast_depth(image, image_info=None, output_height=None, output_width=None, ** return image, image_info +@PREPROCESS_FACTORY.register def scdepthv3(image, image_info=None, output_height=None, output_width=None, **kwargs): image = tf.image.convert_image_dtype(image, dtype=tf.float32) image = tf.image.resize(image, (output_height, output_width)) diff --git a/hailo_model_zoo/core/preprocessing/detection_preprocessing.py b/hailo_model_zoo/core/preprocessing/detection_preprocessing.py index f5b112f0..22789b70 100644 --- a/hailo_model_zoo/core/preprocessing/detection_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/detection_preprocessing.py @@ -2,6 +2,7 @@ import cv2 import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY from hailo_model_zoo.core.preprocessing.roi_align_wrapper import ROIAlignWrapper MAX_PADDING_LENGTH = 100 @@ -37,6 +38,8 @@ def _pad_tensor(x, max_tensor_padding=MAX_PADDING_LENGTH): return tf.squeeze(tf.pad(tf.expand_dims(x, axis=0), paddings, "CONSTANT", constant_values=-1)) +@PREPROCESS_FACTORY.register(name="centernet") +@PREPROCESS_FACTORY.register(name="smoke") def centernet_resnet_v1_18_detection(image, image_info=None, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): image_info['img_orig'] = image @@ -87,6 +90,7 @@ def centernet_resnet_v1_18_detection(image, image_info=None, height=None, width= return image, image_info +@PREPROCESS_FACTORY.register def yolo_v3(image, image_info=None, height=None, width=None, **kwargs): """This is the preprocessing used by GluonCV""" image = tf.cast(image, tf.float32) @@ -129,6 +133,7 @@ def resize_shrink_or_other(image, height, width): return image, image_info +@PREPROCESS_FACTORY.register def yolo_v5(image, image_info=None, height=None, width=None, scope=None, padding_color=114, **kwargs): """ @@ -186,6 +191,7 @@ def yolo_v5(image, image_info=None, height=None, width=None, return image, image_info +@PREPROCESS_FACTORY.register(name="resnet_ssd") def resnet_v1_18_detection(image, image_info=None, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): image = tf.cast(image, tf.float32) @@ -209,6 +215,7 @@ def resnet_v1_18_detection(image, image_info=None, height=None, width=None, return image, image_info +@PREPROCESS_FACTORY.register def regnet_detection(image, image_info=None, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): image = tf.cast(image, tf.float32) @@ -270,11 +277,13 @@ def ssd_base(image, image_info, resize_function, height=None, width=None, return image, image_info +@PREPROCESS_FACTORY.register def mobilenet_ssd(image, image_info, height, width, **kwargs): image, image_info = ssd_base(image, image_info, _resize_bilinear_tf, height, width, **kwargs) return image, image_info +@PREPROCESS_FACTORY.register def faster_rcnn_stage2(featuremap, image_info, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): """Prepare stage2 inputs @@ -304,6 +313,7 @@ def _resize_ar_preserving(image, height, width, **kwargs): return image, height_factor, width_factor +@PREPROCESS_FACTORY.register(name="mobilenet_ssd_ar") def mobilenet_ssd_ar_preserving(image, image_info=None, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): image, image_info = ssd_base(image, image_info, _resize_ar_preserving, @@ -347,6 +357,7 @@ def _ar_preserving_resize_and_crop(image, height, width, **kwargs): padding_w) +@PREPROCESS_FACTORY.register def face_ssd(image, image_info=None, height=None, width=None, max_pad=2048, **kwargs): if image.dtype == tf.uint8: @@ -380,6 +391,7 @@ def face_ssd(image, image_info=None, height=None, width=None, return image, image_info +@PREPROCESS_FACTORY.register def retinaface(image, image_info=None, height=None, width=None, max_pad=2048, **kwargs): shape = tf.shape(image) @@ -432,6 +444,7 @@ def letterbox(img, height=608, width=1088, centered=True, return img, new_width, new_height +@PREPROCESS_FACTORY.register def fair_mot(image, image_info=None, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): if height and width: @@ -463,6 +476,7 @@ def fair_mot(image, image_info=None, height=None, width=None, return image, image_info +@PREPROCESS_FACTORY.register def detr(image, image_info=None, height=800, width=800, **kwargs): image = tf.cast(image, tf.float32) shape = tf.shape(image) @@ -498,6 +512,7 @@ def detr(image, image_info=None, height=800, width=800, **kwargs): return image, image_info +@PREPROCESS_FACTORY.register def retinanet_resnext50(image, image_info=None, height=800, width=800, flip=False, image_mean=[0.485, 0.456, 0.406], image_std=[0.229, 0.224, 0.225], **kwargs): shape = tf.shape(image) diff --git a/hailo_model_zoo/core/preprocessing/face_landmarks_preprocessing.py b/hailo_model_zoo/core/preprocessing/face_landmarks_preprocessing.py index b6300a6e..b32ca21b 100644 --- a/hailo_model_zoo/core/preprocessing/face_landmarks_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/face_landmarks_preprocessing.py @@ -1,6 +1,10 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + +@PREPROCESS_FACTORY.register +@PREPROCESS_FACTORY.register(name="face_landmark_cnn_3d") def face_landmark_cnn(image, image_info=None, output_height=None, output_width=None, **kwargs): if output_height and output_width: image = tf.expand_dims(image, axis=0) diff --git a/hailo_model_zoo/core/preprocessing/image_denoising_preprocessing.py b/hailo_model_zoo/core/preprocessing/image_denoising_preprocessing.py index 9d5a4c8f..f57eb0c0 100644 --- a/hailo_model_zoo/core/preprocessing/image_denoising_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/image_denoising_preprocessing.py @@ -1,9 +1,12 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + NOISE_MEAN = 0 NOISE_STD = 15 +@PREPROCESS_FACTORY.register def dncnn3(image, image_info, height, width, output_shapes, **kwargs): transpose = False if tf.shape(image)[0] > tf.shape(image)[1]: diff --git a/hailo_model_zoo/core/preprocessing/lane_detection_preprocessing.py b/hailo_model_zoo/core/preprocessing/lane_detection_preprocessing.py index 72489009..100bd82d 100644 --- a/hailo_model_zoo/core/preprocessing/lane_detection_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/lane_detection_preprocessing.py @@ -1,5 +1,7 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + def _cast_image_info_types(image_info, image): image_info['img_orig'] = tf.cast(image, tf.uint8) @@ -7,6 +9,7 @@ def _cast_image_info_types(image_info, image): return image_info +@PREPROCESS_FACTORY.register def polylanenet(image, image_info=None, height=None, width=None, **kwargs): image = tf.cast(image, tf.float32) image_info = _cast_image_info_types(image_info, image) @@ -22,6 +25,7 @@ def polylanenet(image, image_info=None, height=None, width=None, **kwargs): return image, image_info +@PREPROCESS_FACTORY.register def laneaf(image, image_info=None, height=None, width=None, **kwargs): image = tf.cast(image, tf.float32) image_info = _cast_image_info_types(image_info, image) diff --git a/hailo_model_zoo/core/preprocessing/low_light_enhancement_preprocessing.py b/hailo_model_zoo/core/preprocessing/low_light_enhancement_preprocessing.py index 1cb68ca9..aaf34441 100644 --- a/hailo_model_zoo/core/preprocessing/low_light_enhancement_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/low_light_enhancement_preprocessing.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + +@PREPROCESS_FACTORY.register def zero_dce(image, image_info, height, width, output_shapes, **kwargs): """ preprocessing function for zero_dce diff --git a/hailo_model_zoo/core/preprocessing/mspn_preprocessing.py b/hailo_model_zoo/core/preprocessing/mspn_preprocessing.py index 744953d7..a022865b 100644 --- a/hailo_model_zoo/core/preprocessing/mspn_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/mspn_preprocessing.py @@ -1,8 +1,10 @@ -import tensorflow as tf +import math + import cv2 import numpy as np -import math +import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY from hailo_model_zoo.core.preprocessing.affine_utils import get_affine_transform @@ -55,6 +57,7 @@ def _get_bbox_xywh(image_info): return bbox +@PREPROCESS_FACTORY.register def mspn(image, image_info=None, height=None, width=None, **kwargs): image_info['orig_height'], image_info['orig_width'] = tf.shape(image)[0], tf.shape(image)[1] image_info['img_orig'] = tf.image.encode_jpeg(image, quality=100) @@ -62,10 +65,10 @@ def mspn(image, image_info=None, height=None, width=None, **kwargs): aspect_ratio = width / height bbox = _get_bbox_xywh(image_info) - image, center, scale = tf.py_function(_mspn_preprocessing, - [image, aspect_ratio, bbox, height, width], - [tf.float32, tf.float32, tf.float32]) - + image, center, scale = tf.numpy_function(_mspn_preprocessing, + [image, aspect_ratio, bbox, height, width], + [tf.uint8, tf.float32, tf.float32]) + image = tf.cast(image, tf.float32) image.set_shape((height, width, 3)) image_info['img_resized'] = image image_info['center'], image_info['scale'] = center, scale @@ -73,6 +76,7 @@ def mspn(image, image_info=None, height=None, width=None, **kwargs): return image, image_info +@PREPROCESS_FACTORY.register def vit_pose(image, image_info=None, height=None, width=None, **kwargs): image_info['orig_height'], image_info['orig_width'] = tf.shape(image)[0], tf.shape(image)[1] image_info['img_orig'] = tf.image.encode_jpeg(image, quality=100) @@ -82,9 +86,9 @@ def vit_pose(image, image_info=None, height=None, width=None, **kwargs): bbox = _get_bbox_xywh(image_info) - image, center, scale = tf.py_function(_vit_pose_preprocessing, - [image, aspect_ratio, bbox, height, width], - [tf.float32, tf.float32, tf.float32]) + image, center, scale = tf.numpy_function(_vit_pose_preprocessing, + [image, aspect_ratio, bbox, height, width], + [tf.uint8, tf.float32, tf.float32]) image.set_shape((height, width, 3)) image_info['img_resized'] = image diff --git a/hailo_model_zoo/core/preprocessing/person_reid_preprocessing.py b/hailo_model_zoo/core/preprocessing/person_reid_preprocessing.py index 36595196..2e833b27 100644 --- a/hailo_model_zoo/core/preprocessing/person_reid_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/person_reid_preprocessing.py @@ -1,6 +1,9 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + +@PREPROCESS_FACTORY.register(name="person_reid") def market1501(image, image_info=None, height=256, width=128, **kwargs): image = tf.cast(image, tf.float32) diff --git a/hailo_model_zoo/core/preprocessing/pose_preprocessing.py b/hailo_model_zoo/core/preprocessing/pose_preprocessing.py index bd2d560c..b42d4aab 100644 --- a/hailo_model_zoo/core/preprocessing/pose_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/pose_preprocessing.py @@ -1,7 +1,10 @@ -import tensorflow as tf +import math + import cv2 import numpy as np -import math +import tensorflow as tf + +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY def _openpose_padding(img, desired_dims, pad_value=0): @@ -28,6 +31,7 @@ def _openpose_preproc(img, desired_height, desired_width): return padded_img, pad +@PREPROCESS_FACTORY.register(name="openpose") def openpose_tf_preproc(img, image_info, desired_height, desired_width, **kwargs): res_tens, pad = tf.numpy_function(_openpose_preproc, [img, desired_height, desired_width], (tf.float32, tf.int64)) @@ -66,6 +70,7 @@ def letterbox(img, height=608, width=1088, centered=True, return img, new_width, new_height +@PREPROCESS_FACTORY.register(name="yolov8_pose") def yolo_pose(image, image_info=None, height=None, width=None, scope=None, padding_color=114, **kwargs): """ diff --git a/hailo_model_zoo/core/preprocessing/preprocessing_factory.py b/hailo_model_zoo/core/preprocessing/preprocessing_factory.py index 20545072..2cf55c9d 100644 --- a/hailo_model_zoo/core/preprocessing/preprocessing_factory.py +++ b/hailo_model_zoo/core/preprocessing/preprocessing_factory.py @@ -1,22 +1,18 @@ """Contains a factory for image preprocessing.""" +import importlib + import numpy as np import tensorflow as tf -from hailo_model_zoo.core.preprocessing import classification_preprocessing -from hailo_model_zoo.core.preprocessing import segmentation_preprocessing -from hailo_model_zoo.core.preprocessing import detection_preprocessing -from hailo_model_zoo.core.preprocessing import pose_preprocessing -from hailo_model_zoo.core.preprocessing import centerpose_preprocessing -from hailo_model_zoo.core.preprocessing import super_resolution_preprocessing -from hailo_model_zoo.core.preprocessing import lane_detection_preprocessing -from hailo_model_zoo.core.preprocessing import face_landmarks_preprocessing -from hailo_model_zoo.core.preprocessing import depth_estimation_preprocessing -from hailo_model_zoo.core.preprocessing import person_reid_preprocessing -from hailo_model_zoo.core.preprocessing import mspn_preprocessing -from hailo_model_zoo.core.preprocessing import low_light_enhancement_preprocessing -from hailo_model_zoo.core.preprocessing import stereonet_preprocessing -from hailo_model_zoo.core.preprocessing import image_denoising_preprocessing -from hailo_model_zoo.core.preprocessing import stable_diffusion_v2_preprocessing +import hailo_model_zoo.core.preprocessing +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY +from hailo_model_zoo.utils.plugin_utils import iter_namespace + +discovered_plugins = { + name: importlib.import_module(name) + for _, name, _ + in iter_namespace(hailo_model_zoo.core.preprocessing) +} def convert_rgb_to_yuv(image): @@ -85,69 +81,17 @@ def get_preprocessing(name, height, width, normalization_params, **kwargs): Raises: ValueError: If Preprocessing `name` is not recognized. """ - preprocessing_fn_map = { - 'basic_resnet': classification_preprocessing.resnet_v1_18_34, - 'fcn_resnet': segmentation_preprocessing.resnet_v1_18, - 'fcn_resnet_bw': segmentation_preprocessing.resnet_bw_18, - 'mobilenet': classification_preprocessing.mobilenet_v1, - 'fastvit': classification_preprocessing.fastvit, - 'efficientnet': classification_preprocessing.efficientnet, - 'mobilenet_ssd': detection_preprocessing.mobilenet_ssd, - 'mobilenet_ssd_ar': detection_preprocessing.mobilenet_ssd_ar_preserving, - 'resnet_ssd': detection_preprocessing.resnet_v1_18_detection, - 'regnet_detection': detection_preprocessing.regnet_detection, - 'yolo_v3': detection_preprocessing.yolo_v3, - 'yolo_v5': detection_preprocessing.yolo_v5, - 'detr': detection_preprocessing.detr, - 'faster_rcnn_stage2': detection_preprocessing.faster_rcnn_stage2, - 'centernet': detection_preprocessing.centernet_resnet_v1_18_detection, - 'retinaface': detection_preprocessing.retinaface, - 'face_ssd': detection_preprocessing.face_ssd, - 'sr_resnet': super_resolution_preprocessing.resnet, - 'srgan': super_resolution_preprocessing.srgan, - 'zero_dce': low_light_enhancement_preprocessing.zero_dce, - 'openpose': pose_preprocessing.openpose_tf_preproc, - 'yolov8_pose': pose_preprocessing.yolo_pose, - 'centerpose': centerpose_preprocessing.centerpose_preprocessing, - 'mono_depth': depth_estimation_preprocessing.mono_depth_2, - 'polylanenet': lane_detection_preprocessing.polylanenet, - 'laneaf': lane_detection_preprocessing.laneaf, - 'fair_mot': detection_preprocessing.fair_mot, - 'face_landmark_cnn': face_landmarks_preprocessing.face_landmark_cnn, - 'smoke': detection_preprocessing.centernet_resnet_v1_18_detection, - 'face_landmark_cnn_3d': face_landmarks_preprocessing.face_landmark_cnn, - 'resmlp': classification_preprocessing.resmlp, - 'fast_depth': depth_estimation_preprocessing.fast_depth, - 'lprnet': classification_preprocessing.lprnet, - 'clip': classification_preprocessing.clip, - 'person_reid': person_reid_preprocessing.market1501, - 'mspn': mspn_preprocessing.mspn, - 'vit_pose': mspn_preprocessing.vit_pose, - 'vit': classification_preprocessing.vit_tiny, - 'espcn': super_resolution_preprocessing.espcn, - 'retinanet_resnext50': detection_preprocessing.retinanet_resnext50, - 'sparseinst': segmentation_preprocessing.sparseinst, - 'resnet_pruned': classification_preprocessing.resnet_pruned, - 'stereonet': stereonet_preprocessing.stereonet, - 'dncnn3': image_denoising_preprocessing.dncnn3, - 'scdepthv3': depth_estimation_preprocessing.scdepthv3, - 'stable_diffusion_v2_decoder': stable_diffusion_v2_preprocessing.vae_decoder, - 'stable_diffusion_v2_unet': stable_diffusion_v2_preprocessing.unet, - } - - if name not in preprocessing_fn_map: - raise ValueError('Preprocessing name [%s] was not recognized' % name) + + preprocessing_callback = PREPROCESS_FACTORY.get(name) flip = kwargs.pop('flip', False) yuv2rgb = kwargs.pop('yuv2rgb', False) yuy2 = kwargs.pop('yuy2', False) nv12 = kwargs.pop('nv12', False) rgbx = kwargs.pop('rgbx', False) input_resize = kwargs.pop('input_resize', {}) - if flip: - height, width = width, height def preprocessing_fn(image, image_info=None): - image, image_info = preprocessing_fn_map[name](image, image_info, height, width, flip=flip, **kwargs) + image, image_info = preprocessing_callback(image, image_info, height, width, flip=flip, **kwargs) if normalization_params: image = normalize(image, normalization_params) if input_resize.get('enabled', False): diff --git a/hailo_model_zoo/core/preprocessing/segmentation_preprocessing.py b/hailo_model_zoo/core/preprocessing/segmentation_preprocessing.py index 76b7a328..bf329e80 100644 --- a/hailo_model_zoo/core/preprocessing/segmentation_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/segmentation_preprocessing.py @@ -1,5 +1,6 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY from hailo_model_zoo.core.preprocessing.detection_preprocessing import MAX_PADDING_LENGTH @@ -19,26 +20,24 @@ def _resnet_base_preprocessing(image, height=None, width=None, is_mask=False): return image -def resnet_bw_18(image, image_info=None, input_height=None, input_width=None, output_shapes=None, **kwargs): +@PREPROCESS_FACTORY.register(name="fcn_resnet_bw") +def resnet_bw_18(image, image_info=None, input_height=None, input_width=None, **kwargs): image_orig = _resnet_base_preprocessing(image, height=input_height, width=input_width) image_gray = tf.image.rgb_to_grayscale(image) image_gray = _resnet_base_preprocessing(image_gray, height=input_height, width=input_width) image_gray = tf.expand_dims(image_gray, axis=-1) if image_info and 'mask' in image_info.keys(): - assert len(output_shapes) == 1, f"expects 1 output shape but got {len(output_shapes)}" - image_info['mask'] = _resnet_base_preprocessing(image_info['mask'], height=output_shapes[0][1], - width=output_shapes[0][2], is_mask=True) + image_info['mask'] = _resnet_base_preprocessing(image_info['mask'], height=input_height, width=input_width, + is_mask=True) image_info['img_orig'] = image_orig return image_gray, image_info -def resnet_v1_18(image, image_info=None, height=None, width=None, output_shapes=None, **kwargs): +@PREPROCESS_FACTORY.register(name="fcn_resnet") +def resnet_v1_18(image, image_info=None, height=None, width=None, **kwargs): image_orig = _resnet_base_preprocessing(image, height, width) - if image_info and 'mask' in image_info.keys(): - assert len(output_shapes) == 1, f"expects 1 output shape but got {len(output_shapes)}" - image_info['mask'] = _resnet_base_preprocessing(image_info['mask'], height=output_shapes[0][1], - width=output_shapes[0][2], is_mask=True) + image_info['mask'] = _resnet_base_preprocessing(image_info['mask'], height=height, width=width, is_mask=True) image_info['img_orig'] = image_orig return image_orig, image_info @@ -59,6 +58,7 @@ def _get_resized_shape(size, height, width): return newh, neww +@PREPROCESS_FACTORY.register def sparseinst(image, image_info=None, height=None, width=None, max_pad=MAX_PADDING_LENGTH, **kwargs): image_resized = image if height and width: diff --git a/hailo_model_zoo/core/preprocessing/stable_diffusion_v2_preprocessing.py b/hailo_model_zoo/core/preprocessing/stable_diffusion_v2_preprocessing.py index 02e8cdee..197c03c8 100644 --- a/hailo_model_zoo/core/preprocessing/stable_diffusion_v2_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/stable_diffusion_v2_preprocessing.py @@ -1,8 +1,11 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + VAE_CONFIG_SCALING_FACTOR = 0.18215 +@PREPROCESS_FACTORY.register(name="stable_diffusion_v2_decoder") def vae_decoder(image, iamge_info, height, width, **kwargs): image = tf.cast(image, tf.float32) image = image / VAE_CONFIG_SCALING_FACTOR @@ -11,5 +14,6 @@ def vae_decoder(image, iamge_info, height, width, **kwargs): return image, iamge_info +@PREPROCESS_FACTORY.register(name="stable_diffusion_v2_unet") def unet(image, image_info, height, width, **kwargs): return image, image_info diff --git a/hailo_model_zoo/core/preprocessing/stereonet_preprocessing.py b/hailo_model_zoo/core/preprocessing/stereonet_preprocessing.py index 56526f84..4cfd888a 100644 --- a/hailo_model_zoo/core/preprocessing/stereonet_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/stereonet_preprocessing.py @@ -1,7 +1,11 @@ from __future__ import division + import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + +@PREPROCESS_FACTORY.register def stereonet(images, image_info=None, output_height=None, output_width=None, flip=None, **kwargs): image_l = images['image_l'] image_r = images['image_r'] diff --git a/hailo_model_zoo/core/preprocessing/super_resolution_preprocessing.py b/hailo_model_zoo/core/preprocessing/super_resolution_preprocessing.py index 6eeec054..d7497d02 100644 --- a/hailo_model_zoo/core/preprocessing/super_resolution_preprocessing.py +++ b/hailo_model_zoo/core/preprocessing/super_resolution_preprocessing.py @@ -1,5 +1,7 @@ import tensorflow as tf +from hailo_model_zoo.core.factory import PREPROCESS_FACTORY + TO_BLUR = True BLUR_SIZE = 5 BLUR_MEAN = 1.0 @@ -32,6 +34,7 @@ def _blur_image(image, size=5, mean=1.0, std=0.66): return blurred +@PREPROCESS_FACTORY.register(name="sr_resnet") def resnet(hr_image, image_info=None, height=136, width=260, **kwargs): hr_image = tf.cast(hr_image, tf.float32) hr_image = tf.expand_dims(hr_image, 0) @@ -49,6 +52,7 @@ def resnet(hr_image, image_info=None, height=136, width=260, **kwargs): return lr_image, hr_image +@PREPROCESS_FACTORY.register def srgan(image, image_info, height, width, output_shapes=None, **kwargs): """ preprocessing function for srgan and div2k @@ -79,6 +83,7 @@ def srgan(image, image_info, height, width, output_shapes=None, **kwargs): return image, image_info +@PREPROCESS_FACTORY.register def espcn(image, image_info, height, width, output_shapes, **kwargs): if width and height: diff --git a/hailo_model_zoo/main.py b/hailo_model_zoo/main.py index 255b6d26..89f11e15 100755 --- a/hailo_model_zoo/main.py +++ b/hailo_model_zoo/main.py @@ -1,157 +1,88 @@ #!/usr/bin/env python import argparse import importlib -from pathlib import Path -import hailo_model_zoo.plugin # we try to minimize imports to make 'main.py --help' responsive. So we only import definitions. - -from hailo_model_zoo.utils.cli_utils import HMZ_COMMANDS, OneResizeValueAction, add_model_name_arg -from hailo_model_zoo.utils.constants import DEVICE_NAMES, TARGETS +import hailo_model_zoo.plugin +from hailo_model_zoo.base_parsers import ( + make_evaluation_base, + make_hef_base, + make_optimization_base, + make_parsing_base, + make_profiling_base, +) +from hailo_model_zoo.utils.cli_utils import HMZ_COMMANDS from hailo_model_zoo.utils.plugin_utils import iter_namespace from hailo_model_zoo.utils.version import get_version - discovered_plugins = { - name: importlib.import_module(name) - for finder, name, ispkg - in iter_namespace(hailo_model_zoo.plugin) + name: importlib.import_module(name) for finder, name, ispkg in iter_namespace(hailo_model_zoo.plugin) } -def _make_parsing_base(): - parsing_base_parser = argparse.ArgumentParser(add_help=False) - config_group = parsing_base_parser.add_mutually_exclusive_group() - add_model_name_arg(config_group, optional=True) - config_group.add_argument( - '--yaml', type=str, default=None, dest='yaml_path', - help='Path to YAML for network configuration. By default using the default configuration') - parsing_base_parser.add_argument( - '--ckpt', type=str, default=None, dest='ckpt_path', - help='Path to onnx or ckpt to use for parsing. By default using the model cache location') - parsing_base_parser.add_argument( - '--hw-arch', type=str, default='hailo8', metavar='', choices=['hailo8', 'hailo8l', 'hailo15h'], - help='Which hw arch to run: hailo8 / hailo8l/ hailo15h. By default using hailo8.') - parsing_base_parser.set_defaults(results_dir=Path('./')) - return parsing_base_parser - - -def _make_optimization_base(): - optimization_base_parser = argparse.ArgumentParser(add_help=False) - optimization_base_parser.add_argument( - '--har', type=str, default=None, help='Use external har file', dest='har_path') - optimization_base_parser.add_argument( - '--calib-path', type=Path, - help='Path to external tfrecord for calibration or a directory containing \ - images in jpg or png format', - ) - optimization_base_parser.add_argument( - '--model-script', type=str, default=None, dest='model_script_path', - help='Path to model script to use. By default using the model script specified' - ' in the network YAML configuration') - optimization_base_parser.add_argument( - '--performance', action='store_true', - help='Enable flag for benchmark performance') - optimization_base_parser.add_argument( - '--resize', type=int, nargs='+', action=OneResizeValueAction, - help='Add input resize from given [h,w]') - optimization_base_parser.add_argument( - '--input-conversion', type=str, - choices=['nv12_to_rgb', 'yuy2_to_rgb', 'rgbx_to_rgb'], - help='Add input conversion from given type') - return optimization_base_parser - - -def _make_hef_base(): - hef_base_parser = argparse.ArgumentParser(add_help=False) - hef_base_parser.add_argument( - '--hef', type=str, default=None, help='Use external HEF files', dest='hef_path') - return hef_base_parser - - -def _make_profiling_base(): - profile_base_parser = argparse.ArgumentParser(add_help=False) - return profile_base_parser - - -def _make_evaluation_base(): - evaluation_base_parser = argparse.ArgumentParser(add_help=False) - targets = TARGETS - devices = ', '.join(DEVICE_NAMES) - evaluation_base_parser.add_argument( - '--target', type=str, choices=targets, metavar='', default='full_precision', - help='Which target to run: full_precision (GPU) / emulator (GPU) / hailo8 (PCIe).\n' - f'A specific hailo8 device may be specified. Available devices: {devices}') - - evaluation_base_parser.add_argument( - '--batch-size', type=int, - help='Batch size for INFERENCE (evaluation and pre-quant stats collection) only ' - '(feel free to increase to whatever your GPU can handle). ' - ' the quant-aware optimizers s.a. QFT & IBC use the calibration batch size parameter from the ALLS' - ) - - evaluation_base_parser.add_argument( - '--data-count', type=int, default=None, dest='eval_num_examples', - help='Maximum number of images to use for evaluation') - - evaluation_base_parser.add_argument( - '--visualize', action='store_true', dest='visualize_results', - help='Run visualization without evaluation. The default value is False', - ) - evaluation_base_parser.add_argument( - '--video-outpath', - help='Make a video from the visualizations and save it to this path', - ) - evaluation_base_parser.add_argument( - '--data-path', type=Path, - help='Path to external tfrecord for evaluation. In case you use --visualize \ - you can give a directory of images in jpg or png format', - ) - evaluation_base_parser.set_defaults(print_num_examples=1e9, - visualize_results=False, - use_lite_inference=False, - use_service=False, - ) - return evaluation_base_parser - - def _create_args_parser(): # --- create shared arguments parsers - parsing_base_parser = _make_parsing_base() - optimization_base_parser = _make_optimization_base() - hef_base_parser = _make_hef_base() - profile_base_parser = _make_profiling_base() - evaluation_base_parser = _make_evaluation_base() - version = get_version('hailo_model_zoo') + parsing_base_parser = make_parsing_base() + optimization_base_parser = make_optimization_base() + hef_base_parser = make_hef_base() + profile_base_parser = make_profiling_base() + evaluation_base_parser = make_evaluation_base() + version = get_version("hailo_model_zoo") # --- create per action subparser - parser = argparse.ArgumentParser(epilog='Example: hailomz parse resnet_v1_50') - parser.add_argument('--version', action='version', - version=f'Hailo Model Zoo v{version}') + parser = argparse.ArgumentParser(epilog="Example: hailomz parse resnet_v1_50") + parser.add_argument("--version", action="version", version=f"Hailo Model Zoo v{version}") # can't set the entry point for each subparser as it forces us to add imports which slow down the startup time. # instead we'll check the 'command' argument after parsing - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('parse', parents=[parsing_base_parser], - help="model translation of the input model into Hailo's internal representation.") + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser( + "parse", + parents=[parsing_base_parser], + help="model translation of the input model into Hailo's internal representation.", + ) - subparsers.add_parser('optimize', parents=[parsing_base_parser, optimization_base_parser], - help="run model optimization which includes numeric translation of \ - the input model into a compressed integer representation.") + subparsers.add_parser( + "optimize", + parents=[parsing_base_parser, optimization_base_parser], + help="run model optimization which includes numeric translation of \ + the input model into a compressed integer representation.", + ) - compile_help = ("run the Hailo compiler to generate the Hailo Executable Format file (HEF)" - " which can be executed on the Hailo hardware.") - subparsers.add_parser('compile', parents=[parsing_base_parser, optimization_base_parser], - help=compile_help) + compile_help = ( + "run the Hailo compiler to generate the Hailo Executable Format file (HEF)" + " which can be executed on the Hailo hardware." + ) + subparsers.add_parser( + "compile", + parents=[parsing_base_parser, optimization_base_parser], + help=compile_help, + ) - profile_help = ("generate profiler report of the model." - " The report contains information about your model and expected performance on the Hailo hardware.") - subparsers.add_parser('profile', parents=[ - parsing_base_parser, optimization_base_parser, hef_base_parser, profile_base_parser], - help=profile_help) + profile_help = ( + "generate profiler report of the model." + " The report contains information about your model and expected performance on the Hailo hardware." + ) + subparsers.add_parser( + "profile", + parents=[ + parsing_base_parser, + optimization_base_parser, + hef_base_parser, + profile_base_parser, + ], + help=profile_help, + ) - subparsers.add_parser('eval', parents=[ - parsing_base_parser, optimization_base_parser, hef_base_parser, evaluation_base_parser], - help="infer the model using the Hailo Emulator or the Hailo hardware and produce the model accuracy.") + subparsers.add_parser( + "eval", + parents=[ + parsing_base_parser, + optimization_base_parser, + hef_base_parser, + evaluation_base_parser, + ], + help="infer the model using the Hailo Emulator or the Hailo hardware and produce the model accuracy.", + ) # add parsers for plugins for command in HMZ_COMMANDS: @@ -167,13 +98,14 @@ def run(args): return command_to_handler[args.command](args) # we make sure to only import these now to keep loading & plugins fast - from hailo_model_zoo.main_driver import parse, optimize, compile, profile, evaluate + from hailo_model_zoo.main_driver import compile, evaluate, optimize, parse, profile + handlers = { - 'parse': parse, - 'optimize': optimize, - 'compile': compile, - 'profile': profile, - 'eval': evaluate, + "parse": parse, + "optimize": optimize, + "compile": compile, + "profile": profile, + "eval": evaluate, } return handlers[args.command](args) @@ -190,5 +122,5 @@ def main(): run(args) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/hailo_model_zoo/main_driver.py b/hailo_model_zoo/main_driver.py index b9dc7bc0..6a6712b9 100644 --- a/hailo_model_zoo/main_driver.py +++ b/hailo_model_zoo/main_driver.py @@ -6,14 +6,15 @@ except ModuleNotFoundError: HEF_EXISTS = False +from hailo_sdk_common.targets.inference_targets import SdkFPOptimized, SdkPartialNumeric +from hailo_sdk_common.logger.logger import DeprecationVersion from hailo_sdk_client import ClientRunner, InferenceContext from hailo_sdk_client.exposed_definitions import States from hailo_sdk_client.tools.profiler.react_report_generator import ReactReportGenerator -from hailo_sdk_common.targets.inference_targets import SdkFPOptimized, SdkPartialNumeric from hailo_model_zoo.core.main_utils import (compile_model, get_hef_path, get_integrated_postprocessing, get_network_info, infer_model_tf1, infer_model_tf2, optimize_full_precision_model, optimize_model, parse_model, - resolve_alls_path) + prepare_calibration_data, resolve_alls_path) from hailo_model_zoo.utils.hw_utils import DEVICE_NAMES, DEVICES, INFERENCE_TARGETS, TARGETS from hailo_model_zoo.utils.logger import get_logger from hailo_model_zoo.utils.path_resolver import get_network_performance @@ -42,8 +43,14 @@ def _extract_model_script_path(networks_alls_script, model_script_path, hw_arch, def _ensure_compiled(runner, logger, args, network_info): if runner.state == States.COMPILED_MODEL or runner.hef: return - logger.info("Compiling the model (without inference) ...") - compile_model(runner, network_info, args.results_dir, allocator_script_filename=args.model_script_path) + logger.info("Compiling the model...") + model_name = network_info.network.network_name + model_script = _extract_model_script_path(network_info.paths.alls_script, + args.model_script_path, args.hw_arch, args.performance) + _ensure_performance(model_name, model_script, args.performance, logger) + + compile_model(runner, network_info, args.results_dir, allocator_script_filename=model_script, + performance=args.performance) def _ensure_optimized(runner, logger, args, network_info): @@ -61,8 +68,10 @@ def _ensure_optimized(runner, logger, args, network_info): args.hw_arch, args.performance) _ensure_performance(network_info.network.network_name, model_script, args.performance, logger) - optimize_model(runner, logger, network_info, args.calib_path, args.results_dir, - model_script=model_script, resize=args.resize, input_conversion=args.input_conversion) + calib_feed_callback = prepare_calibration_data(runner, network_info, args.calib_path, logger, + args.input_conversion, args.resize) + optimize_model(runner, calib_feed_callback, logger, network_info, args.results_dir, model_script, args.resize, + args.input_conversion, args.classes) def _ensure_parsed(runner, logger, network_info, args): @@ -90,9 +99,11 @@ def _ensure_runnable_state_tf1(args, logger, network_info, runner, target): _ensure_parsed(runner, logger, network_info, args) if isinstance(target, SdkFPOptimized) or (isinstance(target, PcieDevice) and args.hef_path is not None): if runner.state == States.HAILO_MODEL: + calib_feed_callback = prepare_calibration_data(runner, network_info, args.calib_path, logger, + args.input_conversion, args.resize) integrated_postprocessing = get_integrated_postprocessing(network_info) if integrated_postprocessing and integrated_postprocessing.enabled: - runner.optimize_full_precision() + runner.optimize_full_precision(calib_data=calib_feed_callback) return None if not args.hef_path else configure_hef_tf1(args.hef_path, target) # We intentionally use base model script and assume its modifications # compatible to the performance model script @@ -101,8 +112,8 @@ def _ensure_runnable_state_tf1(args, logger, network_info, runner, target): args.hw_arch, performance=False) - optimize_full_precision_model(runner, model_script=model_script, resize=args.resize, - input_conversion=args.input_conversion) + optimize_full_precision_model(runner, calib_feed_callback, logger, model_script, args.resize, + args.input_conversion, args.classes) return None if not args.hef_path else configure_hef_tf1(args.hef_path, target) @@ -133,8 +144,10 @@ def _ensure_runnable_state_tf2(args, logger, network_info, runner, target): args.model_script_path, args.hw_arch, False) - optimize_full_precision_model(runner, model_script=model_script, resize=args.resize, - input_conversion=args.input_conversion) + calib_feed_callback = prepare_calibration_data(runner, network_info, args.calib_path, logger, + args.input_conversion, args.resize) + optimize_full_precision_model(runner, calib_feed_callback, logger, model_script, args.resize, + args.input_conversion, args.classes) configure_hef_tf2(runner, args.hef_path) return @@ -156,7 +169,8 @@ def _hailo8l_warning(hw_arch, logger): def parse(args): logger = get_logger() - network_info = get_network_info(args.model_name, yaml_path=args.yaml_path) + nodes = [args.start_node_names, args.end_node_names] + network_info = get_network_info(args.model_name, yaml_path=args.yaml_path, nodes=nodes) model_name = network_info.network.network_name logger.info(f'Start run for network {model_name} ...') @@ -167,7 +181,8 @@ def parse(args): def optimize(args): logger = get_logger() - network_info = get_network_info(args.model_name, yaml_path=args.yaml_path) + nodes = [args.start_node_names, args.end_node_names] + network_info = get_network_info(args.model_name, yaml_path=args.yaml_path, nodes=nodes) model_name = network_info.network.network_name logger.info(f'Start run for network {model_name} ...') @@ -185,13 +200,16 @@ def optimize(args): args.hw_arch, args.performance) _ensure_performance(model_name, model_script, args.performance, logger) - optimize_model(runner, logger, network_info, args.calib_path, args.results_dir, - model_script=model_script, resize=args.resize, input_conversion=args.input_conversion) + calib_feed_callback = prepare_calibration_data(runner, network_info, args.calib_path, logger, + args.input_conversion, args.resize) + optimize_model(runner, calib_feed_callback, logger, network_info, args.results_dir, model_script, args.resize, + args.input_conversion, args.classes) def compile(args): logger = get_logger() - network_info = get_network_info(args.model_name, yaml_path=args.yaml_path) + nodes = [args.start_node_names, args.end_node_names] + network_info = get_network_info(args.model_name, yaml_path=args.yaml_path, nodes=nodes) model_name = network_info.network.network_name logger.info(f'Start run for network {model_name} ...') @@ -203,14 +221,19 @@ def compile(args): model_script = _extract_model_script_path(network_info.paths.alls_script, args.model_script_path, args.hw_arch, args.performance) _ensure_performance(model_name, model_script, args.performance, logger) - compile_model(runner, network_info, args.results_dir, model_script) + compile_model(runner, network_info, args.results_dir, model_script, performance=args.performance) logger.info(f'HEF file written to {get_hef_path(args.results_dir, network_info.network.network_name)}') def profile(args): logger = get_logger() - network_info = get_network_info(args.model_name, yaml_path=args.yaml_path) + logger.deprecation_warning(( + "'profile' command is deprecated and will be removed in future release." + " Please use 'hailo profiler' tool instead."), + DeprecationVersion.FUTURE) + nodes = [args.start_node_names, args.end_node_names] + network_info = get_network_info(args.model_name, yaml_path=args.yaml_path, nodes=nodes) model_name = network_info.network.network_name logger.info(f'Start run for network {model_name} ...') @@ -225,8 +248,10 @@ def profile(args): args.hw_arch, args.performance) _ensure_performance(model_name, model_script, args.performance, logger) - optimize_full_precision_model(runner, model_script=model_script, resize=args.resize, - input_conversion=args.input_conversion) + calib_feed_callback = prepare_calibration_data(runner, network_info, args.calib_path, logger, + args.input_conversion, args.resize) + optimize_full_precision_model(runner, calib_feed_callback, logger, model_script, args.resize, + args.input_conversion, args.classes) export = runner.profile(should_use_logical_layers=True, hef_filename=args.hef_path) outpath = args.results_dir / f'{model_name}.html' @@ -239,14 +264,12 @@ def profile(args): def evaluate(args): logger = get_logger() - if args.target == "hailo8": - logger.warning("Using 'hailo8' as target will be deprecated please use 'hardware'") - if args.target in ['hailo8', 'hardware'] and not HEF_EXISTS: + if args.target == 'hardware' and not HEF_EXISTS: raise ModuleNotFoundError( f"HailoRT is not available, in case you want to run on {args.target} you should install HailoRT first") - if (args.hw_arch == 'hailo15h' and args.target in ['hailo8', 'hardware']) and not args.use_service: + if (args.hw_arch == ['hailo15h', 'hailo15m'] and args.target == 'hardware') and not args.use_service: raise ValueError("Evaluation of hw_arch hailo15h is currently not supported in the Hailo Model Zoo") if args.hef_path and not HEF_EXISTS: @@ -254,24 +277,22 @@ def evaluate(args): "HailoRT is not available, in case you want to evaluate with hef you should install HailoRT first") hardware_targets = set(DEVICE_NAMES) - hardware_targets.update(['hailo8', 'hardware']) + hardware_targets.update(['hardware']) if args.hef_path and args.target not in hardware_targets: raise ValueError( - f"hef is not used when evaluating with {args.target}. use --target hailo8 for evaluating with a hef.") + f"hef is not used when evaluating with {args.target}. use --target hardware for evaluating with a hef.") if args.video_outpath and not args.visualize_results: raise ValueError( "The --video-output argument requires --visualize argument") - network_info = get_network_info(args.model_name, yaml_path=args.yaml_path) + nodes = [args.start_node_names, args.end_node_names] + network_info = get_network_info(args.model_name, yaml_path=args.yaml_path, nodes=nodes) if args.data_path is None and network_info.evaluation.data_set is None: raise ValueError( "Cannot run evaluation without dataset. use --data-path to provide external dataset.") model_name = network_info.network.network_name - if args.hw_arch == "hailo15h" and args.target == "hailo8": - raise ValueError( - "Cannot run hailo15h compiled hef on hailo8.") logger.info(f'Start run for network {model_name} ...') logger.info('Initializing the runner...') @@ -303,10 +324,14 @@ def evaluate(args): _ensure_runnable_state_tf2(args, logger, network_info, runner, target) device_info = DEVICES.get(args.target) - context = runner.infer_context(target, device_info) + # overrides nms score threshold if postprocess on-host + nms_score_threshold = (network_info['postprocessing'].get('score_threshold', None) + if network_info['postprocessing']['hpp'] else None) + context = runner.infer_context(target, device_info, nms_score_threshold=nms_score_threshold) return infer_model_tf2(runner, network_info, context, logger, args.eval_num_examples, args.data_path, batch_size, args.print_num_examples, args.visualize_results, args.video_outpath, - args.use_lite_inference, dump_results=False) + args.use_lite_inference, dump_results=False, input_conversion_args=args.input_conversion, + resize_args=args.resize) def __get_batch_size(network_info, target): diff --git a/hailo_model_zoo/multi_main.py b/hailo_model_zoo/multi_main.py index 23a6452e..5e46447a 100644 --- a/hailo_model_zoo/multi_main.py +++ b/hailo_model_zoo/multi_main.py @@ -12,6 +12,7 @@ get_network_info, optimize_model, load_model, + prepare_calibration_data, resolve_alls_path ) from hailo_model_zoo.utils.logger import get_logger @@ -32,8 +33,8 @@ def get_quantized_model(model_name, network_info, results_dir): logger.info("Start Optimization...") model_script = resolve_alls_path(network_info.paths.alls_script, performance="base") - optimize_model(runner, logger, network_info, calib_path=None, results_dir=results_dir, - model_script=model_script) + calib_feed_callback = prepare_calibration_data(runner, network_info, None, logger) + optimize_model(runner, calib_feed_callback, network_info, results_dir=results_dir, model_script=model_script) return runner diff --git a/hailo_model_zoo/utils/completions.py b/hailo_model_zoo/utils/completions.py new file mode 100644 index 00000000..8baef632 --- /dev/null +++ b/hailo_model_zoo/utils/completions.py @@ -0,0 +1,51 @@ +ALLS_COMPLETE = { + "bash": "_shtab_greeter_compgen_ALLSFiles", + "zsh": "_files -g '(*.alls|*.ALLS)'", + "tcsh": "f:*.alls", +} + +CKPT_COMPLETE = { + "bash": "_shtab_greeter_compgen_CKPTFiles", + "zsh": "", + "tcsh": "", +} +HAR_COMPLETE = { + "bash": "_shtab_greeter_compgen_HARFiles", + "zsh": "_files -g '(*.har|*.HAR)'", + "tcsh": "f:*.har", +} +HEF_COMPLETE = { + "bash": "_shtab_greeter_compgen_HEFFiles", + "zsh": "_files -g '(*.hef|*.HEF)'", + "tcsh": "f:*.hef", +} + +JSON_COMPLETE = { + "bash": "_shtab_greeter_compgen_JSONFiles", + "zsh": "_files -g '(*.json|*.JSON)'", + "tcsh": "f:*.json", +} + +TFRECORD_COMPLETE = { + "bash": "_shtab_greeter_compgen_TFRECORDFiles", + "zsh": "_files -g '(*.tfrecord|*.TFRECORD)'", + "tcsh": "f:*.tfrecord", +} + +YAML_COMPLETE = { + "bash": "_shtab_greeter_compgen_YAMLFiles", + "zsh": "_files -g '(*.yaml|*.YAML)'", + "tcsh": "f:*.yaml", +} + +DEVICE_COMPLETE = { + "bash": "_shtab_compgen_hailo_device", + "zsh": "", + "tcsh": "", +} + +FILE_COMPLETE = { + "bash": "_shtab_compgen_files", + "zsh": "_files", + "tcsh": "f", +} diff --git a/hailo_model_zoo/utils/constants.py b/hailo_model_zoo/utils/constants.py index e4b3fd16..1b753c48 100644 --- a/hailo_model_zoo/utils/constants.py +++ b/hailo_model_zoo/utils/constants.py @@ -2,7 +2,6 @@ TARGETS = [ - 'hailo8', 'hardware', 'full_precision', 'emulator', diff --git a/hailo_model_zoo/utils/data.py b/hailo_model_zoo/utils/data.py index 267ef89b..fce7250a 100644 --- a/hailo_model_zoo/utils/data.py +++ b/hailo_model_zoo/utils/data.py @@ -1,8 +1,8 @@ -from builtins import object import os import cv2 import numpy as np import tensorflow as tf +from pathlib import Path from hailo_model_zoo.core.datasets import dataset_factory from hailo_model_zoo.utils.video_utils import VideoCapture @@ -49,11 +49,10 @@ def _parse_video_frame(image, name): 'mask': tf.image.rgb_to_grayscale(image)} -def _video_generator(video_path): +def _video_generator(video_path: Path): def _video_generator_implementation(): - filename = os.path.basename(video_path) - base, _ = os.path.splitext(filename) - with VideoCapture(video_path) as cap: + base = video_path.stem + with VideoCapture(str(video_path)) as cap: total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) required_digits = len(str(total_frames)) number_format = '{{:0{}d}}'.format(required_digits) @@ -70,7 +69,7 @@ def _video_generator_implementation(): return _video_generator_implementation -class DataFeed(object): +class DataFeed: """DataFeed class. Use this class to handle input data""" def __init__(self, preprocessing_callback, batch_size=8): @@ -133,7 +132,13 @@ class VideoFeed(DataFeed): def __init__(self, preprocessing_callback, batch_size, file_path): super().__init__(preprocessing_callback, batch_size=batch_size) - dataset = tf.data.Dataset.from_generator(_video_generator(file_path), (tf.float32, tf.string)) + dataset = tf.data.Dataset.from_generator( + _video_generator(file_path), + output_signature=( + tf.TensorSpec(shape=(None, None, 3), dtype=tf.float32), + (tf.TensorSpec(shape=(), dtype=tf.string)), + ), + ) dataset = dataset.map(_parse_video_frame) if self._preproc_callback: dataset = dataset.map(self._preproc_callback) diff --git a/hailo_model_zoo/utils/factory_utils.py b/hailo_model_zoo/utils/factory_utils.py new file mode 100644 index 00000000..a5f9ca91 --- /dev/null +++ b/hailo_model_zoo/utils/factory_utils.py @@ -0,0 +1,32 @@ +from typing import Any, Optional + + +class Factory: + def __init__(self, name: str) -> None: + self._name = name + self._name_to_callable = {} + + def _register_impl(self, name: str, callable: Any) -> None: + assert name not in self._name_to_callable, f"'{name}' already registered in '{self._name}' registry!" + self._name_to_callable[name] = callable + + def register(self, callable: Any = None, *, name: Optional[str] = None) -> Any: + if callable is None: + # used as a decorator + def wrapper(func_or_class: Any) -> Any: + final_name = name or func_or_class.__name__ + self._register_impl(final_name, func_or_class) + return func_or_class + + return wrapper + + # used as a function call + final_name = name or callable.__name__ + self._register_impl(final_name, callable) + return callable + + def get(self, name: str) -> Any: + ret = self._name_to_callable.get(name) + if ret is None: + raise ValueError(f"'{name}' not recognized in '{self._name}' factory ") + return ret diff --git a/hailo_model_zoo/utils/hw_utils.py b/hailo_model_zoo/utils/hw_utils.py index 881d40f3..269b64e7 100644 --- a/hailo_model_zoo/utils/hw_utils.py +++ b/hailo_model_zoo/utils/hw_utils.py @@ -8,14 +8,12 @@ from hailo_platform.pyhailort._pyhailort import HailoRTStatusException TARGETS = { - 'hailo8': Device if PLATFORM_AVAILABLE else None, 'hardware': Device if PLATFORM_AVAILABLE else None, 'full_precision': SdkFPOptimized, 'emulator': SdkPartialNumeric, } INFERENCE_TARGETS = { - 'hailo8': InferenceContext.SDK_HAILO_HW, 'hardware': InferenceContext.SDK_HAILO_HW, 'full_precision': InferenceContext.SDK_FP_OPTIMIZED, 'emulator': InferenceContext.SDK_QUANTIZED, diff --git a/hailo_model_zoo/utils/logger.py b/hailo_model_zoo/utils/logger.py index 98bc50b3..c9640b3d 100644 --- a/hailo_model_zoo/utils/logger.py +++ b/hailo_model_zoo/utils/logger.py @@ -15,6 +15,9 @@ class HailoExamplesFormatter(logging.Formatter): logging.WARNING: '33;1m', # bold yellow logging.ERROR: '31;1m', # bold red logging.CRITICAL: '41;1m', # bold white on red + # This has to match hailo_sdk_common.logger.logger.DEPRECATION_WARNING. + # Unfortunately we can't import it here because it's too slow. + logging.WARNING - 1: '33;21m', # yellow } def format(self, record): diff --git a/hailo_model_zoo/utils/numpy_utils.py b/hailo_model_zoo/utils/numpy_utils.py index 812ef0e6..bcf8d964 100644 --- a/hailo_model_zoo/utils/numpy_utils.py +++ b/hailo_model_zoo/utils/numpy_utils.py @@ -5,14 +5,18 @@ def to_numpy(tensor, *, decode=False): if isinstance(tensor, np.ndarray): return tensor - if hasattr(tensor, 'numpy'): + if hasattr(tensor, "numpy"): result = tensor.numpy() if decode: - result = result.decode('utf8') + result = result.decode("utf8") return result if isinstance(tensor, str): return tensor + if isinstance(tensor, bytes): + if decode: + tensor = tensor.decode("utf8") + return tensor if isinstance(tensor, dict): - return {k: v.numpy() if hasattr(v, 'numpy') else v for k, v in tensor.items()} + return {k: v.numpy() if hasattr(v, "numpy") else v for k, v in tensor.items()} return tf.nest.map_structure(to_numpy, tensor) diff --git a/hailo_model_zoo/utils/parse_utils.py b/hailo_model_zoo/utils/parse_utils.py index 3e77f7b0..9863e059 100644 --- a/hailo_model_zoo/utils/parse_utils.py +++ b/hailo_model_zoo/utils/parse_utils.py @@ -18,9 +18,9 @@ def translate_model(runner, network_info, ckpt_path, *, tensor_shapes=None): model_name = network_info.network.network_name start_node, end_node = network_info.parser.nodes[0:2] - if type(end_node) == str: + if type(end_node) is str: end_node = [end_node] - if type(start_node) == str: + if type(start_node) is str: start_node = [start_node] ckpt_path = str(ckpt_path) diff --git a/hailo_models/license_plate_detection/docs/TRAINING_GUIDE.rst b/hailo_models/license_plate_detection/docs/TRAINING_GUIDE.rst index 572bdd66..2aa5feb2 100644 --- a/hailo_models/license_plate_detection/docs/TRAINING_GUIDE.rst +++ b/hailo_models/license_plate_detection/docs/TRAINING_GUIDE.rst @@ -11,7 +11,7 @@ Prerequisites * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -21,13 +21,12 @@ Environment Preparations #. **Build the docker image** - .. raw:: html - :name:validation + .. code-block:: -


-      cd hailo_model_zoo/hailo_models/license_plate_detection/
+      
+      cd hailo_model_zoo/hailo_models/license_plate_detection/
       docker build --build-arg timezone=`cat /etc/timezone` -t license_plate_detection:v0 .
-      
+ * This command will build the docker image with the necessary requirements using the Dockerfile exists in this directory. @@ -35,12 +34,11 @@ Environment Preparations #. **Start your docker:** - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir license_plate_detection:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir license_plate_detection:v0 + * ``docker run`` create a new docker container. @@ -79,14 +77,13 @@ Finetuning and exporting to ONNX * Start training on your dataset starting from our pre-trained weights in ``tiny_yolov4_license_plates.weights`` (or download it from `here `_\ ) - .. raw:: html - :name:validation + .. code-block:: - - ./darknet detector train data/obj.data ./cfg/tiny_yolov4_license_plates.cfg tiny_yolov4_license_plates.weights -map -clear - + + ./darknet detector train data/obj.data ./cfg/tiny_yolov4_license_plates.cfg tiny_yolov4_license_plates.weights -map -clear + - **NOTE:** If during training you get an error similar to + **NOTE:**\ If during training you get an error similar to .. code-block:: @@ -104,12 +101,11 @@ Finetuning and exporting to ONNX #. | **Export to ONNX** | Export the model to ONNX using the following command: - .. raw:: html - :name:validation + .. code-block:: - - python ../pytorch-YOLOv4/demo_darknet2onnx.py cfg/tiny_yolov4_license_plates.cfg /path/to/trained.weights /path/to/some/image.jpg 1 - + + python ../pytorch-YOLOv4/demo_darknet2onnx.py cfg/tiny_yolov4_license_plates.cfg /path/to/trained.weights /path/to/some/image.jpg 1 + ---- @@ -119,16 +115,16 @@ Compile the Model using Hailo Model Zoo You can generate an HEF file for inference on Hailo-8 from your trained ONNX model. In order to do so you need a working model-zoo environment. Choose the model YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/tiny_yolov4_license_plates.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt tiny_yolov4_license_plates_1_416_416.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/tiny_yolov4_license_plates.yaml - + + hailomz compile --ckpt tiny_yolov4_license_plates_1_416_416.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/tiny_yolov4_license_plates.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/hailo_models/license_plate_recognition/README.rst b/hailo_models/license_plate_recognition/README.rst index d5448a84..eed1715a 100644 --- a/hailo_models/license_plate_recognition/README.rst +++ b/hailo_models/license_plate_recognition/README.rst @@ -30,7 +30,7 @@ Architecture Inputs ^^^^^^ -* RGB liscense plate image with size of 75x300x3 +* RGB license plate image with size of 75x300x3 * Image normalization occurs on-chip diff --git a/hailo_models/license_plate_recognition/docs/TRAINING_GUIDE.rst b/hailo_models/license_plate_recognition/docs/TRAINING_GUIDE.rst index e5cb70de..ca85635e 100644 --- a/hailo_models/license_plate_recognition/docs/TRAINING_GUIDE.rst +++ b/hailo_models/license_plate_recognition/docs/TRAINING_GUIDE.rst @@ -11,7 +11,7 @@ Prerequisites * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -21,25 +21,23 @@ Environment Preparations #. **Build the docker image** - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/hailo_models/license_plate_recognition/
+      
+      cd hailo_model_zoo/hailo_models/license_plate_recognition/
       docker build  --build-arg timezone=`cat /etc/timezone` -t license_plate_recognition:v0 .
-      
+ * This command will build the docker image with the necessary requirements using the Dockerfile that exists in this directory. #. **Start your docker:** - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir license_plate_recognition:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir license_plate_recognition:v0 + * ``docker run`` create a new docker container. @@ -62,7 +60,7 @@ Finetuning and exporting to ONNX * Create a folder with license plates images for training and testing. The folder should contain images whose file names correspond to the plate number, e.g. ``12345678.png``. - **NOTE:** Please make sure the file names **do not** contain characters which are not numbers or letters. + **NOTE:**\ Please make sure the file names **do not** contain characters which are not numbers or letters. * @@ -72,7 +70,7 @@ Finetuning and exporting to ONNX * Clean license plate images with no characters in the ``dataset/plates/`` folder * ``.ttf`` font files in the ``dataset/fonts/`` folder - **NOTE:** We recommend the autogenerated training set to contain at least 4 million images + **NOTE:**\ We recommend the autogenerated training set to contain at least 4 million images * Start training on your dataset: @@ -80,12 +78,11 @@ Finetuning and exporting to ONNX * Start from our pre-trained weights in ``pre_trained/lprnet.pth`` (you can also download it from `here `_\ ) - .. raw:: html - :name:validation + .. code-block:: - - python train_LPRNet.py --train_img_dirs path/to/train/images --test_img_dirs path/to/test/images --max_epoch 30 --train_batch_size 64 --test_batch_size 32 --resume_epoch 15 --pretrained_model pre_trained/lprnet.pth --save_folder runs/exp0/ --test_interval 2000 - + + python train_LPRNet.py --train_img_dirs path/to/train/images --test_img_dirs path/to/test/images --max_epoch 30 --train_batch_size 64 --test_batch_size 32 --resume_epoch 15 --pretrained_model pre_trained/lprnet.pth --save_folder runs/exp0/ --test_interval 2000 + * Or train from scratch @@ -96,12 +93,11 @@ Finetuning and exporting to ONNX #. | **Export to ONNX** | Export the model to ONNX using the following command: - .. raw:: html - :name:validation + .. code-block:: - - python export.py --onnx lprnet.onnx --weights /path/to/trained/model.pth - + + python export.py --onnx lprnet.onnx --weights /path/to/trained/model.pth + ---- @@ -111,16 +107,16 @@ Compile the Model using Hailo Model Zoo You can generate an HEF file for inference on Hailo-8 from your trained ONNX model. In order to do so you need a working model-zoo environment. Choose the model YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/lprnet.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt lprnet.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/lprnet.yaml - + + hailomz compile --ckpt lprnet.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/lprnet.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/hailo_models/personface_detection/README.rst b/hailo_models/personface_detection/README.rst index 6ace06eb..0c0b7e7b 100644 --- a/hailo_models/personface_detection/README.rst +++ b/hailo_models/personface_detection/README.rst @@ -37,7 +37,7 @@ Outputs * Bounding box coordinates ((x,y) centers, height, width) * Box objectness confidence score - * Class probablity confidence score per class + * Class probability confidence score per class * The above 7 values per anchor are concatenated into the 21 output channels diff --git a/hailo_models/personface_detection/docs/TRAINING_GUIDE.rst b/hailo_models/personface_detection/docs/TRAINING_GUIDE.rst index 3ff5d883..5324ebe1 100644 --- a/hailo_models/personface_detection/docs/TRAINING_GUIDE.rst +++ b/hailo_models/personface_detection/docs/TRAINING_GUIDE.rst @@ -10,7 +10,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -19,24 +19,22 @@ Environment Preparations #. **Build the docker image:** - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/hailo_models/personface_detection/
+      
+      cd hailo_model_zoo/hailo_models/personface_detection/
       docker build  --build-arg timezone=`cat /etc/timezone` -t personface_detection:v0 .
-      
+ * This command will build the docker image with the necessary requirements using the Dockerfile that exists in this directory. #. **Start your docker:** - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir personface_detection:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir personface_detection:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -68,23 +66,21 @@ Finetuning and exporting to ONNX * Start training on your dataset starting from our pre-trained weights in ``weights/yolov5s_personface.pt`` (you can also download it from `here `_ - .. raw:: html - :name:validation + .. code-block:: - - python train.py --data ./data/personface_data.yaml --cfg ./models/yolov5s_personface.yaml --weights ./weights/yolov5s_personface.pt --epochs 300 --batch 128 --device 1,2,3,4 - + + python train.py --data ./data/personface_data.yaml --cfg ./models/yolov5s_personface.yaml --weights ./weights/yolov5s_personface.pt --epochs 300 --batch 128 --device 1,2,3,4 + #. **Export to ONNX** Export the model to ONNX using the following command: - .. raw:: html - :name:validation + .. code-block:: - - python models/export.py --weights ./runs/exp<#>/weights/best.pt --img-size 640 --batch-size 1 - + + python models/export.py --weights ./runs/exp<#>/weights/best.pt --img-size 640 --batch-size 1 + * | The best model's weights will be saved under the following path: ``./runs/exp<#>/weights/best.pt`` | , where <#> is the experiment number. @@ -98,16 +94,17 @@ Compile the Model using Hailo Model Zoo | You can generate an HEF file for inference on Hailo-8 from your trained ONNX model. In order to do so you need a working model-zoo environment. | Choose the model YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolov5s_personface.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolov5s_personface.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov5s_personface.yaml - + + hailomz compile --ckpt yolov5s_personface.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov5s_personface.yaml --start-node-names name1 name2 --end-node-names name1 --classes 80 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | ``--classes`` - adjusting the number of classes in post-processing configuration (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: @@ -121,13 +118,12 @@ Anchors Extraction ------------------ | The training flow will automatically try to find more fitting anchors values then the default anchors. In our TAPPAS environment we use the default anchors, but you should be aware that the resulted anchors might be different. -| The model anchors can be retrieved from the trained model using the following snnipet: +| The model anchors can be retrieved from the trained model using the following snippet: -.. raw:: html - :name:validation +.. code-block:: -

+   
    m = torch.load("last.pt")["model"]
    detect = list(m.children())[0][-1]
    print(detect.anchor_grid)
-   
+ diff --git a/hailo_models/reid/docs/TRAINING_GUIDE.rst b/hailo_models/reid/docs/TRAINING_GUIDE.rst index 00178dd2..6fa37753 100644 --- a/hailo_models/reid/docs/TRAINING_GUIDE.rst +++ b/hailo_models/reid/docs/TRAINING_GUIDE.rst @@ -10,7 +10,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -18,13 +18,12 @@ Environment Preparations #. **Build the docker image:** - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/hailo_models/reid/
+      
+      cd hailo_model_zoo/hailo_models/reid/
       docker build  --build-arg timezone=`cat /etc/timezone` -t person_reid:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -40,13 +39,12 @@ Environment Preparations #. **Start your docker:** - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/drive:/path/to/docker/dir person_reid:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/drive:/path/to/docker/dir person_reid:v0 + * ``docker run`` create a new docker container. @@ -71,22 +69,20 @@ Finetuning and exporting to ONNX * Start training on your dataset starting from our pre-trained weights in ``models/repvgg_a0_person_reid_512.pth`` or ``models/repvgg_a0_person_reid_2048.pth`` (you can also download it from `512-dim `_ & `2048-dim `_\ ) - to do so, you can edit the added yaml ``configs/repvgg_a0_hailo_pre_train.yaml`` and take a look at the examples in `torchreid `_. - .. raw:: html - :name:validation + .. code-block:: - + python scripts/main.py --config-file configs/repvgg_a0_hailo_pre_train.yaml - + #. | **Export to ONNX** | Export the model to ONNX using the following command: - .. raw:: html - :name:validation + .. code-block:: - - python scripts/export.py --model_name --weights /path/to/model/pth - + + python scripts/export.py --model_name --weights /path/to/model/pth + ---- @@ -96,17 +92,17 @@ Compile the Model using Hailo Model Zoo | In case you exported to onnx based on one of our provided RepVGG models, you can generate an HEF file for inference on Hailo-8 from your trained ONNX model. In order to do so you need a working model-zoo environment. | Choose the model YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/repvgg_a0_person_reid_512.yaml`` (or 2048), and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt repvgg_a0_person_reid_512.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/repvgg_a0_person_reid_512.yaml - + + hailomz compile --ckpt repvgg_a0_person_reid_512.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/repvgg_a0_person_reid_512.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/hailo_models/vehicle_detection/docs/TRAINING_GUIDE.rst b/hailo_models/vehicle_detection/docs/TRAINING_GUIDE.rst index 40360b2a..ba645ed2 100644 --- a/hailo_models/vehicle_detection/docs/TRAINING_GUIDE.rst +++ b/hailo_models/vehicle_detection/docs/TRAINING_GUIDE.rst @@ -11,7 +11,7 @@ Prerequisites * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -20,24 +20,22 @@ Environment Preparations #. **Build the docker image** - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/hailo_models/vehicle_detection/
+      
+      cd hailo_model_zoo/hailo_models/vehicle_detection/
       docker build  --build-arg timezone=`cat /etc/timezone` -t vehicle_detection:v0 .
-      
+ * This command will build the docker image with the necessary requirements using the Dockerfile that exists in this directory. #. **Start your docker:** - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/drive:/path/to/docker/dir vehicle_detection:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/drive:/path/to/docker/dir vehicle_detection:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -72,22 +70,20 @@ Finetuning and exporting to ONNX * Start training on your dataset starting from our pre-trained weights in ``weights/yolov5m_vehicles.pt`` (you can also download it from `here `_\ ) - .. raw:: html - :name:validation + .. code-block:: - - python train.py --data ./data/vehicles.yaml --cfg ./models/yolov5m.yaml --weights ./weights/yolov5m_vehicles.pt --epochs 300 --batch 128 --device 1,2,3,4 - + + python train.py --data ./data/vehicles.yaml --cfg ./models/yolov5m.yaml --weights ./weights/yolov5m_vehicles.pt --epochs 300 --batch 128 --device 1,2,3,4 + #. **Export to ONNX** Export the model to ONNX using the following command: - .. raw:: html - :name:validation + .. code-block:: - - python models/export.py --weights ./runs/exp<#>/weights/best.pt --img 640 --batch 1 - + + python models/export.py --weights ./runs/exp<#>/weights/best.pt --img 640 --batch 1 + * The best model's weights will be saved under the following path: ``./runs/exp<#>/weights/best.pt``, where <#> is the experiment number. @@ -100,16 +96,17 @@ Compile the Model using Hailo Model Zoo | You can generate an HEF file for inference on Hailo-8 from your trained ONNX model. In order to do so you need a working model-zoo environment. | Choose the model YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolov5m_vehicles.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolov5m_vehicles.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov5m_vehicles.yaml - + + hailomz compile --ckpt yolov5m_vehicles.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov5m_vehicles.yaml --start-node-names name1 name2 --end-node-names name1 --classes 80 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | ``--classes`` - adjusting the number of classes in post-processing configuration (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: @@ -130,13 +127,12 @@ Anchors Extraction ------------------ | The training flow will automatically try to find more fitting anchors values then the default anchors. In our TAPPAS environment we use the default anchors, but you should be aware that the resulted anchors might be different. -| The model anchors can be retrieved from the trained model using the following snnipet: +| The model anchors can be retrieved from the trained model using the following snippet: -.. raw:: html - :name:validation +.. code-block:: -

+   
    m = torch.load("last.pt")["model"]
    detect = list(m.children())[0][-1]
    print(detect.anchor_grid)
-   
+ diff --git a/setup.py b/setup.py index 27b1d811..8eedbca7 100755 --- a/setup.py +++ b/setup.py @@ -2,19 +2,11 @@ from setuptools import find_packages, setup - -import subprocess -check_dfc_installed = subprocess.run( - "pip show hailo-dataflow-compiler".split(), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, -) -if check_dfc_installed.stderr: +import importlib +if not importlib.util.find_spec('hailo_sdk_client'): raise ModuleNotFoundError("hailo_sdk_client was not installed or you are not " "in the right virtualenv.\n" "In case you are not an Hailo customer please visit us at https://hailo.ai/") - - try: import cpuinfo cpu_flags = cpuinfo.get_cpu_info()['flags'] @@ -51,7 +43,7 @@ def main(): 'torch==1.11.0', 'torchmetrics==1.2.0'] - model_zoo_version = "2.10.0" + model_zoo_version = "2.11.0" package_data = { "hailo_model_zoo": [ diff --git a/training/arcface/README.rst b/training/arcface/README.rst index 69770940..2ce04b98 100644 --- a/training/arcface/README.rst +++ b/training/arcface/README.rst @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/arcface
+      
+      cd hailo_model_zoo/training/arcface
       docker build --build-arg timezone=`cat /etc/timezone` -t arcface:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -39,12 +38,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir arcface:v0 - + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir arcface:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -92,12 +90,11 @@ Training and exporting to ONNX | Start training with the following command: - .. raw:: html - :name:validation + .. code-block:: - - python -m torch.distributed.launch --nproc_per_node=2 --nnodes=1 --node_rank=0 --master_addr="127.0.0.1" --master_port=12581 train_v2.py /path/to/config - + + python -m torch.distributed.launch --nproc_per_node=2 --nnodes=1 --node_rank=0 --master_addr="127.0.0.1" --master_port=12581 train_v2.py /path/to/config + * nproc_per_node: number of gpu devices @@ -106,12 +103,11 @@ Training and exporting to ONNX | After finishing training run the following command: - .. raw:: html - :name:validation + .. code-block:: - - python torch2onnx.py /path/to/model.pt --network mbf --output /path/to/model.onnx --simplify true - + + python torch2onnx.py /path/to/model.pt --network mbf --output /path/to/model.onnx --simplify true + @@ -124,17 +120,17 @@ You can generate an HEF file for inference on Hailo-8 from your trained ONNX mod In order to do so you need a working model-zoo environment. Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/arcface_mobilefacenet.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt arcface_s_leaky.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml /path/to/arcface_mobilefacenet.yaml - + + hailomz compile --ckpt arcface_s_leaky.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml /path/to/arcface_mobilefacenet.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/centerpose/README.rst b/training/centerpose/README.rst index 8f79e2fd..018a564a 100644 --- a/training/centerpose/README.rst +++ b/training/centerpose/README.rst @@ -13,7 +13,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations ------------------------ @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/centerpose
+      
+      cd hailo_model_zoo/training/centerpose
       docker build -t centerpose:v0 --build-arg timezone=`cat /etc/timezone` .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -40,12 +39,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir centerpose:v0 - + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir centerpose:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -89,26 +87,24 @@ Training and exporting to ONNX | Configure your model in a .yaml file. We'll use /workspace/centerpose/experiments/regnet_fpn.yaml in this guide. | start training with the following command: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/centerpose/tools
-      python -m torch.distributed.launch --nproc_per_node 4 train.py --cfg ../experiments/regnet_fpn.yaml
-      
+ python -m torch.distributed.launch --nproc_per_node 4 train.py --cfg ../experiments/regnet_fpn.yaml + | Where 4 is the number of GPUs used for training. | If using a different number, update both this and the used gpus in the .yaml configuration. #. | Exporting to onnx After training, run the following command: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/centerpose/tools
-      python export.py --cfg ../experiments/regnet_fpn.yaml --TESTMODEL /workspace/out/regnet1_6/model_best.pth
-      
+ python export.py --cfg ../experiments/regnet_fpn.yaml --TESTMODEL /workspace/out/regnet1_6/model_best.pth + ---- @@ -119,16 +115,16 @@ You can generate an HEF file for inference on Hailo-8 from your trained ONNX mod In order to do so you need a working model-zoo environment. Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/centerpose_regnetx_1.6gf_fpn.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt coco_pose_regnet1.6_fpn.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/centerpose_regnetx_1.6gf_fpn.yaml - + + hailomz compile --ckpt coco_pose_regnet1.6_fpn.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/centerpose_regnetx_1.6gf_fpn.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/damoyolo/README.rst b/training/damoyolo/README.rst index 3689ef50..933e19b7 100644 --- a/training/damoyolo/README.rst +++ b/training/damoyolo/README.rst @@ -13,7 +13,7 @@ Prerequisites * nvidia-docker2 (\ `nvidia docker installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations ------------------------ @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/damoyolo
+      
+      cd hailo_model_zoo/training/damoyolo
       docker build --build-arg timezone=`cat /etc/timezone` -t damoyolo:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -42,12 +41,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir damoyolo:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir damoyolo:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -70,16 +68,15 @@ Training and exporting to ONNX * | Update the symbolic link to your dataset: ln -sfn /your/coco/like/dataset/path datasets/coco * | Start training - The following command is an example for training a *damoyolo_tinynasL20_T* model. - .. raw:: html - :name:validation + .. code-block:: - + python tools/train.py -f configs/damoyolo_tinynasL20_T.py -

+                                 
                                  configs/damoyolo_tinynasL25_S.py
                                  configs/damoyolo_tinynasL35_M.py
-                                 
-
+ + * ``configs/damoyolo_tinynasL20_T.py`` - configuration file of the DAMO-YOLO variant you would like to train. In order to change the number of classes make sure you update ``num_classes`` and ``class_names`` in this file. @@ -88,12 +85,11 @@ Training and exporting to ONNX | In order to export your trained DAMO-YOLO model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - python tools/converter.py -f configs/damoyolo_tinynasL20_T.py -c /path/to/trained/model.pth --batch_size 1 --img_size 640 # export at 640x640 with batch size 1 - + + python tools/converter.py -f configs/damoyolo_tinynasL20_T.py -c /path/to/trained/model.pth --batch_size 1 --img_size 640 # export at 640x640 with batch size 1 + ---- @@ -104,16 +100,16 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/damoyolo_tinynasL20_T.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt damoyolo_tinynasL20_T.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/damoyolo/variant.yaml - + + hailomz compile --ckpt damoyolo_tinynasL20_T.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/damoyolo/variant.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. diff --git a/training/fcn/README.rst b/training/fcn/README.rst index f19e442e..f5dac06d 100644 --- a/training/fcn/README.rst +++ b/training/fcn/README.rst @@ -12,7 +12,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -20,13 +20,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/fcn
+      
+      cd hailo_model_zoo/training/fcn
       docker build -t fcn:v0 --build-arg timezone=`cat /etc/timezone` .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -38,12 +37,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir fcn:v0 - + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir fcn:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -96,13 +94,12 @@ Training and exporting to ONNX | Configure your model in a .py file. We'll use /workspace/mmsegmentation/configs/fcn/fcn8_r18_hailo.py in this guide. | start training with the following command: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/mmsegmentation
-      ./tools/dist_train.sh configs/fcn/fcn8_r18_hailo.py 2
-      
+ ./tools/dist_train.sh configs/fcn/fcn8_r18_hailo.py 2 + | Where 2 is the number of GPUs used for training. @@ -110,13 +107,12 @@ Training and exporting to ONNX | After training, run the following command: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/mmsegmentation
       python ./tools/pytorch2onnx.py configs/fcn/fcn8_r18_hailo.py --checkpoint ./work_dirs/fcn8_r18_hailo/iter_59520.pth --shape 1024 1920 --out_name fcn.onnx
-      
+ ---- @@ -128,17 +124,17 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/fcn8_resnet_v1_18.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt fcn.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/fcn8_resnet_v1_18.yaml - + + hailomz compile --ckpt fcn.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/fcn8_resnet_v1_18.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/fcn_hailo/Dockerfile b/training/fcn_hailo/Dockerfile new file mode 100644 index 00000000..a54bff92 --- /dev/null +++ b/training/fcn_hailo/Dockerfile @@ -0,0 +1,30 @@ +ARG base_image=nvcr.io/nvidia/pytorch:21.10-py3 +FROM $base_image + +# using ARG so it won't persist in user env +ARG DEBIAN_FRONTEND=noninteractive +ARG timezone="Asia/Jerusalem" +ENV TZ=$timezone +RUN apt-get update && \ + ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && \ + echo $TZ > /etc/timezone && \ + apt-get -y --no-install-recommends install vim git build-essential python3-opencv sudo tmux && \ + # solve mpi conflicts + { which mpirun && apt-get remove -y libopenmpi3 || true ; } + +ARG repo=https://github.com/hailo-ai/mmsegmentation.git +RUN git clone $repo && \ + cd mmsegmentation && \ + git checkout Hailo-2.0 && \ + pip install --upgrade pip && \ + pip install opencv-python==4.5.5.64 && \ + pip install openmim==0.3.7 && \ + mim install mmengine==0.8.5 && \ + mim install mmcv==2.0.0 && \ + pip install -e . +WORKDIR /workspace/mmsegmentation + +# Add wget of our pretrained model and pruned model +RUN pip install "sparseml==1.5.4" + +ENV NM_BYPASS_TORCH_VERSION=1 diff --git a/training/fcn_hailo/README.rst b/training/fcn_hailo/README.rst new file mode 100644 index 00000000..aad2916b --- /dev/null +++ b/training/fcn_hailo/README.rst @@ -0,0 +1,141 @@ +============== +FCN-Hailo Retraining +============== + +* To learn more about FCN look `here `_ + +------- + +Prerequisites +------------- + +* docker (\ `installation instructions `_\ ) +* nvidia-docker2 (\ `installation instructions `_\ ) + +**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. + + +Environment Preparations +------------------------ + +#. | Build the docker image: + + .. code-block:: + + + cd hailo_model_zoo/training/fcn_hailo + docker build -t fcn_hailo:v0 --build-arg timezone=`cat /etc/timezone` . + + + | the following optional arguments can be passed via --build-arg: + + * ``timezone`` - a string for setting up timezone. E.g. "Asia/Jerusalem" + * ``user`` - username for a local non-root user. Defaults to 'hailo'. + * ``group`` - default group for a local non-root user. Defaults to 'hailo'. + * ``uid`` - user id for a local non-root user. + * ``gid`` - group id for a local non-root user. + +#. | Start your docker: + + .. code-block:: + + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir fcn_hailo:v0 + + + * ``docker run`` create a new docker container. + * ``--name `` name for your container. + * ``-u `` same username as used for building the image. + * ``-it`` runs the command interactively. + * ``--gpus all`` allows access to all GPUs. + * ``--ipc=host`` sets the IPC mode for the container. + * ``-v /path/to/local/data/dir:/path/to/docker/data/dir`` maps ``/path/to/local/data/dir`` from the host to the container. You can use this command multiple times to mount multiple directories. + * ``fcn_hailo:v0`` the name of the docker image. + +Training and exporting to ONNX +------------------------------ + + +#. | Prepare your data: + + | Data is expected to be in coco format, and by default should be in /workspace/data/. + | The expected structure is as follows: + + .. code-block:: + + /workspace + |-- mmsegmentation + `-- |-- data + `-- cityscapes + |-- gtFine + | | -- train + | | | -- aachem + | | | -- | -- *.png + | | ` -- ... + | ` -- test + | | -- berlin + | | -- | -- *.png + | ` -- ... + `-- leftImg8bit + | -- train + | -- | -- aachem + | -- | -- | -- *.png + | -- ` -- ... + ` -- test + | -- berlin + | -- | -- *.png + ` -- ... + + | more information can be found `here `_ + + +#. | Training: + + | Configure your model in a .py file. We'll use /workspace/mmsegmentation/configs/fcn/fcn_hailo_10classes.py in this guide. + | start training with the following command: + + .. code-block:: + + + cd /workspace/mmsegmentation + ./tools/dist_train.sh configs/fcn/fcn_hailo_10classes.py 2 + + + | Where 2 is the number of GPUs used for training. + +#. | Exporting to onnx + + | After training, run the following command: + + .. code-block:: + + + cd /workspace/mmsegmentation + python ./tools/pytorch2onnx.py configs/fcn/fcn_hailo_10classes.py --checkpoint ./work_dirs/fcn_hailo_10classes/iter_74400.pth --shape 736 960 --postprocess --soft_weights_loading --out_name fcn_hailo.onnx + + + +---- + +Compile the Model using Hailo Model Zoo +--------------------------------------- + +| You can generate an HEF file for inference on Hailo-8 from your trained ONNX model. +| In order to do so you need a working model-zoo environment. +| Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/fcn_hailo_pruned50.yaml``\ , and run compilation using the model zoo: + +.. code-block:: + + + hailomz compile --ckpt fcn_hailo.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/fcn_hailo_pruned50.yaml --start-node-names name1 name2 --end-node-names name1 + + + +* | ``--ckpt`` - path to your ONNX file. +* | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format +* | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | The model zoo will take care of adding the input normalization to be part of the model. + +.. note:: + More details about YAML files are presented `here <../../docs/YAML.rst>`_. diff --git a/training/mspn/README.rst b/training/mspn/README.rst index 297adf05..9df8c9f9 100644 --- a/training/mspn/README.rst +++ b/training/mspn/README.rst @@ -9,7 +9,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -17,13 +17,12 @@ Environment Preparations #. Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/mspn
+      
+      cd hailo_model_zoo/training/mspn
       docker build -t mspn:v0 --build-arg timezone=`cat /etc/timezone` .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -36,12 +35,11 @@ Environment Preparations #. Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir mspn:v0 - + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir mspn:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -86,27 +84,25 @@ Training and exporting to ONNX Configure your model in a .py config file. We will use ``/workspace/mmpose/configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/regnetx_800mf_256x192.py`` in this guide. Start training with the following command: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/mmpose
-      ./tools/dist_train.sh ./configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/regnetx_800mf_256x192.py 4 --work-dir exp0
-      
+ ./tools/dist_train.sh ./configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/regnetx_800mf_256x192.py 4 --work-dir exp0 + Where 4 is the number of GPUs used for training. In this example, the trained model will be saved under ``exp0`` directory. #. Export to onnx - In orded to export your trained model to ONNX run the following script: + In order to export your trained model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/mmpose
-      python tools/deployment/pytorch2onnx.py ./configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/regnetx_800mf_256x192.py exp0/best_AP_epoch_310.pth --output-file mspn_regnetx_800mf.onnx
-      
+ python tools/deployment/pytorch2onnx.py ./configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/regnetx_800mf_256x192.py exp0/best_AP_epoch_310.pth --output-file mspn_regnetx_800mf.onnx + where ``exp0/best_AP_epoch_310.pth`` should be replaced by the trained model file path. @@ -119,18 +115,18 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/mspn_regnetx_800mf.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt mspn_regnetx_800mf.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/mspn_regnetx_800mf.yaml - + + hailomz compile --ckpt mspn_regnetx_800mf.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/mspn_regnetx_800mf.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: - More details about YAML files are presented `here <../../docs/YAML.rst>`_. \ No newline at end of file + More details about YAML files are presented `here <../../docs/YAML.rst>`_. diff --git a/training/nanodet/README.rst b/training/nanodet/README.rst index c259c276..add14f45 100644 --- a/training/nanodet/README.rst +++ b/training/nanodet/README.rst @@ -12,7 +12,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -20,13 +20,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/nanodet
+      
+      cd hailo_model_zoo/training/nanodet
       docker build -t nanodet:v0 --build-arg timezone=`cat /etc/timezone` .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -39,12 +38,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir nanodet:v0 - + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir nanodet:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -84,14 +82,13 @@ Training and exporting to ONNX | Start training with the following commands: - .. raw:: html - :name:validation + .. code-block:: -

-      cd /workspace/nanodet
+      
+      cd /workspace/nanodet
       ln -s /workspace/data/coco/ /coco
       python tools/train.py ./config/legacy_v0.x_configs/RepVGG/nanodet-RepVGG-A0_416.yml
-      
+ | In case you want to use the pretrained nanodet-RepVGG-A0_416.ckpt, which was predownloaded into your docker modify your configurationf file: @@ -113,14 +110,13 @@ Training and exporting to ONNX | After training, install the ONNX and ONNXruntime packages, then export the ONNX model: - .. raw:: html - :name:validation + .. code-block:: -

+      
       python tools/export_onnx.py --cfg_path ./config/legacy_v0.x_configs/RepVGG/nanodet-RepVGG-A0_416.yml --model_path /workspace/nanodet/workspace/RepVGG-A0-416/model_last.ckpt
-      
+ -**NOTE:** Your trained model will be found under the following path: /workspace/nanodet/workspace/ /model_last.ckpt, and exported onnx will be written to /workspace/nanodet/nanodet.onnx +**NOTE:**\ Your trained model will be found under the following path: /workspace/nanodet/workspace/ /model_last.ckpt, and exported onnx will be written to /workspace/nanodet/nanodet.onnx ---- @@ -132,16 +128,17 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/nanodet_repvgg.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt nanodet.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/nanodet_repvgg.yaml - + + hailomz compile --ckpt nanodet.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/nanodet_repvgg.yaml --start-node-names name1 name2 --end-node-names name1 --classes 80 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | ``--classes`` - adjusting the number of classes in post-processing configuration (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/vit/README.rst b/training/vit/README.rst index ae9c4f83..2e0acc9f 100644 --- a/training/vit/README.rst +++ b/training/vit/README.rst @@ -14,14 +14,13 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

+       
+       cd hailo_model_zoo/training/vit
        cd hailo_model_zoo/training/vit
-       cd hailo_model_zoo/training/vit
        docker build --build-arg timezone=`cat /etc/timezone` -t vit:v0 .
-       
+ | the following optional arguments can be passed via --build-arg: @@ -37,13 +36,12 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir vit:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir vit:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -60,16 +58,15 @@ Training and exporting to ONNX #. | Train your model: | Once the docker is started, you can start training your model. - * | Prepare your custome dataset - Follow the steps described `here ` + * | Prepare your custom dataset - Follow the steps described `here ` - * | Start training - The following commad is an example for training a *vit_tiny_un_patch16_224* model. + * | Start training - The following command is an example for training a *vit_tiny_un_patch16_224* model. - .. raw:: html - :name:validation + .. code-block:: - - python3 -m torch.distributed.launch --nproc_per_node=1 train.py ../data/imagenet_10000/ --model vit_tiny_un_patch16_224 --output output --experiment retrain --initial-checkpoint vit_tiny_un_patch16_224.pth.tar --epochs 1 --workers 6 --batch-size=64 --drop-path 0.1 --model-ema --model-ema-decay 0.99996 --opt adamw --opt-eps 1e-8 --weight-decay 0.05 --lr 0.00001 --aa rand-m9-mstd0.5-inc1 --train-interpolation bicubic --use-ra-sampler --reprob 0.25 --mixup 0.8 --cutmix 1.0 - + + python3 -m torch.distributed.launch --nproc_per_node=1 train.py ../data/imagenet_10000/ --model vit_tiny_un_patch16_224 --output output --experiment retrain --initial-checkpoint vit_tiny_un_patch16_224.pth.tar --epochs 1 --workers 6 --batch-size=64 --drop-path 0.1 --model-ema --model-ema-decay 0.99996 --opt adamw --opt-eps 1e-8 --weight-decay 0.05 --lr 0.00001 --aa rand-m9-mstd0.5-inc1 --train-interpolation bicubic --use-ra-sampler --reprob 0.25 --mixup 0.8 --cutmix 1.0 + * ``vit_tiny_un_patch16_224.pth.tar`` - pretrained weights. * ``--model-ema`` - use exponential moving average weights. @@ -79,12 +76,11 @@ Training and exporting to ONNX | In order to export your trained ViT model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - python export.py --model vit_tiny_un_patch16_224 --checkpoint=/path/to/trained/best.pt --use-ema - + + python export.py --model vit_tiny_un_patch16_224 --checkpoint=/path/to/trained/best.pt --use-ema + * ``--use-ema`` - optional to use if --model-ema was used during training. @@ -97,14 +93,15 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/vit_tiny.yaml``\ , and run compilation using the model zoo: - - hailomz compile --ckpt vit_tiny_un_patch16_224.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/vit_tiny_un_patch16_224.yaml - + + hailomz compile --ckpt vit_tiny_un_patch16_224.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/vit_tiny_un_patch16_224.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. More details about YAML files are presented `here <../../docs/YAML.rst>`_. diff --git a/training/yolact/README.rst b/training/yolact/README.rst index 34651e89..563c91a6 100644 --- a/training/yolact/README.rst +++ b/training/yolact/README.rst @@ -20,13 +20,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolact
+      
+      cd hailo_model_zoo/training/yolact
       docker build --build-arg timezone=`cat /etc/timezone` -t yolact:v0 .
-      
+ @@ -42,12 +41,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolact:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolact:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -89,25 +87,23 @@ Training and exporting to ONNX | Once your dataset is prepared, create a soft link to it under the yolact/data work directory, then you can start training your model: - .. raw:: html - :name:validation + .. code-block:: -

+      
       cd /workspace/yolact
       ln -s /workspace/data/coco data/coco
       python train.py --config=yolact_regnetx_800MF_config
-      
+ * ``yolact_regnetx_800MF_config`` - configuration using the regnetx_800MF backbone. -#. | Export to ONNX: In orded to export your trained YOLACT model to ONNX run the following script: +#. | Export to ONNX: In order to export your trained YOLACT model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - python export.py --config=yolact_regnetx_800MF_config --trained_model=path/to/trained/model --export_path=path/to/export/model.onnx - + + python export.py --config=yolact_regnetx_800MF_config --trained_model=path/to/trained/model --export_path=path/to/export/model.onnx + * ``--config`` - same configuration used for training. * ``--trained_model`` - path to the weights produced by the training process. @@ -122,16 +118,16 @@ You can generate an HEF file for inference on Hailo-8 from your trained ONNX mod In order to do so you need a working model-zoo environment. Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolact.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile yolact --ckpt yolact.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolact_regnetx_800mf_20classes.yaml - + + hailomz compile yolact --ckpt yolact.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolact_regnetx_800mf_20classes.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: @@ -139,4 +135,4 @@ Choose the corresponding YAML from our networks configuration directory, i.e. `` is an example yaml where some of the classes (out of 80) were removed. If you wish to change the number of classes, the easiest way is to retrain with the exact number of classes, erase the ``channels_remove`` section (lines 18 to 437). - More details about YAML files are presented `here <../../docs/YAML.rst>`_. \ No newline at end of file + More details about YAML files are presented `here <../../docs/YAML.rst>`_. diff --git a/training/yolov3/README.rst b/training/yolov3/README.rst index a9574383..778e0999 100644 --- a/training/yolov3/README.rst +++ b/training/yolov3/README.rst @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolov3
+      
+      cd hailo_model_zoo/training/yolov3
       docker build --build-arg timezone=`cat /etc/timezone` -t yolov3:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -40,12 +39,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov3:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov3:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -81,12 +79,11 @@ Training and exporting to ONNX * | Start training - The following command is an example for training the yolov3. - .. raw:: html - :name:validation + .. code-block:: - - ./darknet detector train data/obj.data cfg/yolov3.cfg yolov3.weights -map -clear - + + ./darknet detector train data/obj.data cfg/yolov3.cfg yolov3.weights -map -clear + | Final trained weights will be available in ``backup/`` directory. @@ -94,12 +91,11 @@ Training and exporting to ONNX #. | Export to ONNX: | In order to export your trained YOLOv3 model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - python ../pytorch-YOLOv4/demo_darknet2onnx.py cfg/yolov3.cfg /path/to/trained.weights /path/to/some/image.jpg 1 - + + python ../pytorch-YOLOv4/demo_darknet2onnx.py cfg/yolov3.cfg /path/to/trained.weights /path/to/some/image.jpg 1 + * The ONNX would be available in ``/workspace/darknet/`` @@ -117,12 +113,11 @@ Compile the Model using Hailo Model Zoo | | Run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile yolov3_416 --ckpt yolov3_1_416_416.onnx --calib-path /path/to/calibration/imgs/dir/ - + + hailomz compile yolov3_416 --ckpt yolov3_1_416_416.onnx --calib-path /path/to/calibration/imgs/dir/ + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format diff --git a/training/yolov4/README.rst b/training/yolov4/README.rst index a8531285..10c63f1b 100644 --- a/training/yolov4/README.rst +++ b/training/yolov4/README.rst @@ -12,7 +12,7 @@ Prerequisites * docker (\ `installation instructions `_\ ) * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations @@ -20,13 +20,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolov4
+      
+      cd hailo_model_zoo/training/yolov4
       docker build --build-arg timezone=`cat /etc/timezone` -t yolov4:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -39,12 +38,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov4:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov4:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -80,12 +78,11 @@ Training and exporting to ONNX * | Start training - The following command is an example for training the yolov4-leaky model. - .. raw:: html - :name:validation + .. code-block:: - - ./darknet detector train data/obj.data cfg/yolov4-leaky.cfg -map -clear - + + ./darknet detector train data/obj.data cfg/yolov4-leaky.cfg -map -clear + | Final trained weights will be available in ``backup/`` directory. @@ -93,12 +90,11 @@ Training and exporting to ONNX | In order to export your trained YOLOv4 model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - python ../pytorch-YOLOv4/demo_darknet2onnx.py cfg/yolov4-leaky.cfg /path/to/trained.weights /path/to/some/image.jpg 1 - + + python ../pytorch-YOLOv4/demo_darknet2onnx.py cfg/yolov4-leaky.cfg /path/to/trained.weights /path/to/some/image.jpg 1 + * | The ONNX will be available in ``/workspace/darknet/`` @@ -111,16 +107,16 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolov4_leaky.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolov4_1_3_512_512.onnx --calib-path /path/to/calibration/imgs/ --yaml path/to/yolov4_leaky.yaml - + + hailomz compile --ckpt yolov4_1_3_512_512.onnx --calib-path /path/to/calibration/imgs/ --yaml path/to/yolov4_leaky.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/yolov5/README.rst b/training/yolov5/README.rst index 0556e725..558e3e4a 100644 --- a/training/yolov5/README.rst +++ b/training/yolov5/README.rst @@ -13,7 +13,7 @@ Prerequisites * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations ------------------------ @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolov5
+      
+      cd hailo_model_zoo/training/yolov5
       docker build --build-arg timezone=`cat /etc/timezone` -t yolov5:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -42,12 +41,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov5:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov5:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -72,28 +70,26 @@ Training and exporting to ONNX * | Start training - The following command is an example for training a *yolov5s* model. - .. raw:: html - :name:validation + .. code-block:: - + python train.py --img 640 --batch 16 --epochs 3 --data coco128.yaml --weights yolov5s.pt --cfg models/yolov5s.yaml - + * ``yolov5s.pt`` - pretrained weights. You can find the pretrained weights for *yolov5s*\ , *yolov5m*\ , *yolov5l*\ , *yolov5x* and *yolov5m_wo_spp* in your working directory. * ``models/yolov5s.yaml`` - configuration file of the yolov5 variant you would like to train. In order to change the number of classes make sure you update this file. - | **NOTE:**\ We recommend to use *yolov5m_wo_spp* for best performance on Hailo-8 + | **NOTE:**\ We recommend to use *yolov5m_wo_spp* for best performance on Hailo-8 #. | Export to ONNX: | In order to export your trained YOLOv5 model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - python models/export.py --weights /path/to/trained/model.pt --img 640 --batch 1 # export at 640x640 with batch size 1 - + + python models/export.py --weights /path/to/trained/model.pt --img 640 --batch 1 # export at 640x640 with batch size 1 + ---- @@ -104,16 +100,17 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolov5s.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolov5s.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov5s.yaml - + + hailomz compile --ckpt yolov5s.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov5s.yaml --start-node-names name1 name2 --end-node-names name1 --classes 80 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | ``--classes`` - adjusting the number of classes in post-processing configuration (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: @@ -127,11 +124,10 @@ Anchors Extraction | The training flow will automatically try to find more fitting anchors values then the default anchors. In our TAPPAS environment we use the default anchors, but you should be aware that the resulted anchors might be different. | The model anchors can be retrieved from the trained model using the following snippet: -.. raw:: html - :name:validation +.. code-block:: -

+   
    m = torch.load("last.pt")["model"]
    detect = list(m.children())[0][-1]
    print(detect.anchor_grid)
-   
+ diff --git a/training/yolov8/README.rst b/training/yolov8/README.rst index 6c896909..81301a98 100644 --- a/training/yolov8/README.rst +++ b/training/yolov8/README.rst @@ -13,7 +13,7 @@ Prerequisites * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations ------------------------ @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolov8
+      
+      cd hailo_model_zoo/training/yolov8
       docker build --build-arg timezone=`cat /etc/timezone` -t yolov8:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -42,12 +41,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov8:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov8:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -72,12 +70,11 @@ Training and exporting to ONNX * | Start training - The following command is an example for training a *yolov8s* model. - .. raw:: html - :name:validation + .. code-block:: - - yolo detect train data=coco128.yaml model=yolov8s.pt name=retrain_yolov8s epochs=100 batch=16 - + + yolo detect train data=coco128.yaml model=yolov8s.pt name=retrain_yolov8s epochs=100 batch=16 + * ``yolov8s.pt`` - pretrained weights. The pretrained weights for *yolov8n*\ , *yolov8s*\ , *yolov8m*\ , *yolov8l* and *yolov8x* will be downloaded to your working directory when running this command. * ``coco128.yaml`` - example file for data.yaml file. Can be found at ultralytics/ultralytics/datasets. @@ -85,20 +82,19 @@ Training and exporting to ONNX * ``epochs`` - number of epochs to run. default to 100. * ``batch`` - number of images per batch. default to 16. - **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/train/ + **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/train/ #. | Export to ONNX: | In order to export your trained YOLOv8 model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - yolo export model=/path/to/trained/best.pt imgsz=640 format=onnx opset=11 # export at 640x640 - + + yolo export model=/path/to/trained/best.pt imgsz=640 format=onnx opset=11 # export at 640x640 + - **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/export/ + **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/export/ ---- @@ -109,16 +105,17 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolov8s.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolov8s.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov8s.yaml - + + hailomz compile --ckpt yolov8s.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov8s.yaml --start-node-names name1 name2 --end-node-names name1 --classes 80 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | ``--classes`` - adjusting the number of classes in post-processing configuration (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/yolov8_seg/README.rst b/training/yolov8_seg/README.rst index 1749af26..09c4fd75 100644 --- a/training/yolov8_seg/README.rst +++ b/training/yolov8_seg/README.rst @@ -13,7 +13,7 @@ Prerequisites * nvidia-docker2 (\ `installation instructions `_\ ) -**NOTE:** In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. +**NOTE:**\ In case you are using the Hailo Software Suite docker, make sure to run all of the following instructions outside of that docker. Environment Preparations ------------------------ @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolov8_seg
+      
+      cd hailo_model_zoo/training/yolov8_seg
       docker build --build-arg timezone=`cat /etc/timezone` -t yolov8_seg:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -42,12 +41,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov8_seg:v0 - + + docker run --name "your_docker_name" -it --gpus all --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolov8_seg:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -72,12 +70,11 @@ Training and exporting to ONNX * | Start training - The following command is an example for training a *yolov8s-seg* model. - .. raw:: html - :name:validation + .. code-block:: - - yolo segment train data=coco128-seg.yaml model=yolov8s-seg.pt name=retrain_yolov8s_seg epochs=100 batch=16 - + + yolo segment train data=coco128-seg.yaml model=yolov8s-seg.pt name=retrain_yolov8s_seg epochs=100 batch=16 + * ``yolov8s-seg.pt`` - pretrained weights. The pretrained weights for *yolov8n-seg*\ , *yolov8s-seg*\ , *yolov8m-seg*\ , *yolov8l-seg* and *yolov8x-seg* will be downloaded to your working directory when running this command. * ``coco128-seg.yaml`` - example file for data.yaml file. Can be found at ultralytics/ultralytics/datasets. @@ -85,20 +82,19 @@ Training and exporting to ONNX * ``epochs`` - number of epochs to run. default to 100. * ``batch`` - number of images per batch. default to 16. - **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/train/ + **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/train/ #. | Export to ONNX: | In order to export your trained YOLOv8-seg model to ONNX run the following script: - .. raw:: html - :name:validation + .. code-block:: - - yolo export model=/path/to/trained/best.pt imgsz=640 format=onnx opset=11 # export at 640x640 - + + yolo export model=/path/to/trained/best.pt imgsz=640 format=onnx opset=11 # export at 640x640 + - **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/export/ + **NOTE:**\ more configurable parameters can be found at https://docs.ultralytics.com/modes/export/ ---- @@ -109,16 +105,16 @@ Compile the Model using Hailo Model Zoo | In order to do so you need a working model-zoo environment. | Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolov8s-seg.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolov8s-seg.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov8s-seg.yaml - + + hailomz compile --ckpt yolov8s-seg.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolov8s-seg.yaml --start-node-names name1 name2 --end-node-names name1 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/yolox/README.rst b/training/yolox/README.rst index 574f63fd..e61ed621 100644 --- a/training/yolox/README.rst +++ b/training/yolox/README.rst @@ -21,13 +21,12 @@ Environment Preparations #. | Build the docker image: - .. raw:: html - :name:validation + .. code-block:: -

-      cd hailo_model_zoo/training/yolox
+      
+      cd hailo_model_zoo/training/yolox
       docker build --build-arg timezone=`cat /etc/timezone` -t yolox:v0 .
-      
+ | the following optional arguments can be passed via --build-arg: @@ -39,12 +38,11 @@ Environment Preparations #. | Start your docker: - .. raw:: html - :name:validation + .. code-block:: - - docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolox:v0 - + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolox:v0 + * ``docker run`` create a new docker container. * ``--name `` name for your container. @@ -65,19 +63,18 @@ Training and exporting to ONNX | Start training with the following command: - .. raw:: html - :name:validation + .. code-block:: - - python tools/train.py -f exps/default/yolox_s_leaky.py -d 8 -b 64 -c yolox_s.pth -

+      
+      python tools/train.py -f exps/default/yolox_s_leaky.py -d 8 -b 64 -c yolox_s.pth
+                              
                               exps/default/yolox_m_leaky.py
                               exps/default/yolox_l_leaky.py
                               exps/default/yolox_x_leaky.py
                               exps/default/yolox_s_wide_leaky.py
-                              
+ -
+ * -f: experiment description file @@ -96,12 +93,11 @@ Training and exporting to ONNX | After finishing training run the following command: - .. raw:: html - :name:validation + .. code-block:: - + python tools/export_onnx.py --output-name yolox_s_leaky.onnx -f ./exps/default/yolox_s_leaky.py -c YOLOX_outputs/yolox_s_leaky/best_ckpt.pth - + **NOTE:**\ Your trained model will be found under the following path: ``/workspace/YOLOX/YOLOX_outputs/yolox_s_leaky/``\ , and the exported onnx will be written to ``/workspace/YOLOX/yolox_s_leaky.onnx`` @@ -116,16 +112,17 @@ You can generate an HEF file for inference on Hailo-8 from your trained ONNX mod In order to do so you need a working model-zoo environment. Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolox_s_leaky.yaml``\ , and run compilation using the model zoo: -.. raw:: html - :name:validation +.. code-block:: - - hailomz compile --ckpt yolox_s_leaky.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolox_s_leaky.yaml - + + hailomz compile --ckpt yolox_s_leaky.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolox_s_leaky.yaml --start-node-names name1 name2 --end-node-names name1 --classes 80 + * | ``--ckpt`` - path to your ONNX file. * | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format * | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | ``--classes`` - adjusting the number of classes in post-processing configuration (optional). * | The model zoo will take care of adding the input normalization to be part of the model. .. note:: diff --git a/training/yolox_hailo/Dockerfile b/training/yolox_hailo/Dockerfile new file mode 100644 index 00000000..5924b38b --- /dev/null +++ b/training/yolox_hailo/Dockerfile @@ -0,0 +1,34 @@ +ARG base_image=nvcr.io/nvidia/pytorch:21.10-py3 +FROM $base_image + +# using ARG so it won't persist in user env +ARG DEBIAN_FRONTEND=noninteractive +ARG timezone="Asia/Jerusalem" +ENV TZ=$timezone + +RUN apt-get update && \ + ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && \ + echo $TZ > /etc/timezone && \ + apt-get -y --no-install-recommends install vim git build-essential python3-opencv sudo tmux && \ + # solve mpi conflicts + { which mpirun && apt-get remove -y libopenmpi3 || true ; } + +ARG repo=https://github.com/hailo-ai/YOLOX +ARG yolox_branch=yolox-hailo-model + +RUN mkdir /workspace/data && \ + git clone $repo --branch $yolox_branch && \ + cd YOLOX && pip install -r requirements.txt && pip install -e . && \ + pip install cython==0.29.24 +ENV YOLOX_DATADIR=/workspace/data + +WORKDIR /workspace/YOLOX + +RUN wget https://github.com/Megvii-BaseDetection/YOLOX/releases/download/0.1.1rc0/yolox_s.pth -q +RUN wget https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/HailoNets/OD/yolox_hailo/2023-09-23/yolox_hailo_before_prune.pth -q; \ + wget https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/HailoNets/OD/yolox_hailo/2023-09-23/yolox_hailo_p50.pth -q; \ + wget https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/HailoNets/OD/yolox_hailo/2023-09-23/README.md -q + +RUN pip install "sparseml==1.5.4" + +ENV NM_BYPASS_TORCH_VERSION=1 diff --git a/training/yolox_hailo/GUIDE.md b/training/yolox_hailo/GUIDE.md new file mode 100644 index 00000000..850f9e57 --- /dev/null +++ b/training/yolox_hailo/GUIDE.md @@ -0,0 +1,69 @@ +To create a new image run: +docker build --build-arg timezone=`cat /etc/timezone` --no-cache -t hailo_sparseml_21_06:v0 . + + +To create a container from a pre-build image: +docker run --name hailo_sparseml_21_06_v0 -it --gpus all --ipc=host -v /data:/data -v /fastdata:/fastdata -v /local:/local -v /work/users/$USER/workspace:/root/workspace hailo_sparseml_21_06:v0 + +Once you are in the container, you need to create a dataset folder. +In order to run a default yolox (vanilla) model, please prepare the COCO dataset as follows: +cd +ln -s /path/to/your/COCO ../data/COCO +In Hailo servers its: +ln -s /fastdata/coco/coco ../data/COCO + +---------------------------------------------------- +To run a different dataset, you should modify the 'Data' section in the experiment config file (yolox_hailo_prune.py) as follows: +# Data +self.num_classes = 6 +self.data_dir = '/fastdata/users/hailo_dataset' # base data directory +self.train_ann = "train.json" # train annotation file name +self.val_ann = "test.json" # validation annotation file name +self.test_ann = "test.json" # test annotation file name +self.name = 'images/train2017/' # train images folder, relative to base directory +self.eval_imgs_rpath = 'images/test2017' # validation images folder, relative to base directory + +======================================== +Vanilla Training +======================================== +1. yolox-s +CUDA_VISIBLE_DEVICES=4 python tools/eval.py -n yolox-s -d 1 -b 16 -c yolox_s.pth --conf 0.05 +CUDA_VISIBLE_DEVICES=4,5,6,7 python -m yolox.tools.train -n yolox-s -d 4 -b 32 --fp16 +CUDA_VISIBLE_DEVICES=4,5,6,7 python -m yolox.tools.train -n yolox-s -d 4 -b 32 --fp16 +2. yolox_hailo_4cls + + +3. yolox_hailo + + +======================================== +Prune Training +======================================== +1. yolox-s: +TBD + +2. yolox_hailo: +CUDA_VISIBLE_DEVICES=0,1,2,3 python tools/train.py -n yolox_hailo_prune -d 4 -b 32 --fp16 --recipe recipes/recipe_yolox_hailo_pruning.md -c ~/workspace/sparsity/YOLOX/yolox_hailo_pas_best_ckpt.pth -expn testtest --resume + +======================================== +Evaluation +======================================== +1. yolox-s: +CUDA_VISIBLE_DEVICES=6 python -m yolox.tools.eval -n yolox-s -d 1 -b 16 -c yolox_s.pth --conf 0.05 + +2. yolox_hailo: +# CUDA_VISIBLE_DEVICES=6 python tools/eval.py -n yolox_hailo -d 1 -b 16 -c ~/workspace/sparsity/YOLOX/yolox_hailo_pas_best_ckpt.pth --conf 0.05 +# CUDA_VISIBLE_DEVICES=6 python -m yolox.tools.eval -n yolox_hailo -d 1 -b 16 -c ~/workspace/sparsity/YOLOX/yolox_hailo_pas_best_ckpt.pth --conf 0.05 +CUDA_VISIBLE_DEVICES=6 python -m yolox.tools.eval -n yolox_hailo -d 1 -b 16 -c ~/workspace/sparsity/YOLOX/yolox_hailo_pas_best_ckpt.pth --conf 0.05 --deploy + + +======================================== +Export ONNX +======================================== +1. yolox-s: +python -m yolox.tools.export_onnx -n yolox-s --output-name yolox_s_test.onnx -o 11 -expn yolox_s_test -c yolox_s.pth + +2. yolox_hailo: +python -m yolox.tools.export_onnx -n yolox_hailo --output-name yolox_hailo.onnx -o 11 -expn yolox_hailo -c ~/workspace/sparsity/YOLOX/yolox_hailo_pas_best_ckpt.pth + + diff --git a/training/yolox_hailo/README.rst b/training/yolox_hailo/README.rst new file mode 100644 index 00000000..20041fb0 --- /dev/null +++ b/training/yolox_hailo/README.rst @@ -0,0 +1,113 @@ +======================== +YOLOX-hailo Retraining +======================== + +* To learn more about yolox-hailo look `here `_ + +---------------------------------------------------------------------------------------- + +Prerequisites +------------- + + +* docker (\ `installation instructions `_\ ) +* nvidia-docker2 (\ `nvidia installation instructions `_\ ) + + **NOTE:**\ In case you use Hailo Software Suite docker, make sure you are doing all the following instructions outside of this docker. + + +Environment Preparations +------------------------ + +#. | Build the docker image: + + .. code-block:: + + + cd hailo_model_zoo/training/yolox_hailo + docker build --build-arg timezone=`cat /etc/timezone` -t yolox_hailo:v0 . + + + | The following optional arguments can be passed via --build-arg: + + * ``timezone`` - a string for setting up timezone. E.g. "Asia/Jerusalem" + * ``user`` - username for a local non-root user. Defaults to 'hailo'. + * ``group`` - default group for a local non-root user. Defaults to 'hailo'. + * ``uid`` - user id for a local non-root user. + * ``gid`` - group id for a local non-root user. + +#. | Start your docker: + + .. code-block:: + + + docker run --name "your_docker_name" -it --gpus all -u "username" --ipc=host -v /path/to/local/data/dir:/path/to/docker/data/dir yolox_hailo:v0 + + + * ``docker run`` create a new docker container. + * ``--name `` name for your container. + * ``-it`` runs the command interactively. + * ``--gpus all`` allows access to all GPUs. + * ``--ipc=host`` sets the IPC mode for the container. + * ``-v /path/to/local/data/dir:/path/to/docker/data/dir`` maps ``/path/to/local/data/dir`` from the host to the container. You can use this command multiple times to mount multiple directories. + * ``yolox_hailo:v0`` the name of the docker image. + +Training and exporting to ONNX +------------------------------ + +#. | Prepare your data: + + | You can use coco format, which is already supported for training on your own custom dataset. More information can be found `in this link `_ + +#. | Training: + + | Start training with the following command: + + .. code-block:: + + + python tools/train.py -n yolox_hailo -d 1 -b 8 -expn train1 --fp16 + + + * -f: experiment description file + * -d: number of gpu devices + * -b: total batch size, the recommended number for -b is num-gpu * 8 + + +#. | Exporting to onnx: + + | After finishing training run the following command: + + .. code-block:: + + + python tools/export_onnx.py -n yolox_hailo --output-name yolox_hailo.onnx -o 11 -c yolox_hailo_outputs/train1/best_ckpt.pth + + + + **NOTE:**\ Your trained model will be found under the following path: ``/workspace/YOLOX/yolox_hailo_outputs/train1/``\ , and the exported onnx will be written to ``/workspace/YOLOX/yolox_hailo.onnx`` + + +---- + +Compile the Model using Hailo Model Zoo +--------------------------------------- + +You can generate an HEF file for inference on Hailo-8 from your trained ONNX model. +In order to do so you need a working model-zoo environment. +Choose the corresponding YAML from our networks configuration directory, i.e. ``hailo_model_zoo/cfg/networks/yolox_hailo_pp.yaml``\ , and run compilation using the model zoo: + +.. code-block:: + + + hailomz compile --ckpt yolox_hailo.onnx --calib-path /path/to/calibration/imgs/dir/ --yaml path/to/yolox_hailo_pp_pruned50.yaml --start-node-names name1 name2 --end-node-names name1 + + +* | ``--ckpt`` - path to your ONNX file. +* | ``--calib-path`` - path to a directory with your calibration images in JPEG/png format +* | ``--yaml`` - path to your configuration YAML file. +* | ``--start-node-names`` and ``--end-node-names`` - node names for customizing parsing behavior (optional). +* | The model zoo will take care of adding the input normalization to be part of the model. + +.. note:: + More details about YAML files are `presented here <../../docs/YAML.rst>`_. From 22d5c42e05ed00b372ba0f4adeb6a52eb8beb6cc Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Sun, 7 Apr 2024 02:26:56 +0300 Subject: [PATCH 02/17] update-to-version-2.11.0 --- .../HAILO15H/HAILO15H_classification.rst | 126 ++++++------ .../HAILO15H/HAILO15H_depth_estimation.rst | 14 +- .../HAILO15H/HAILO15H_face_attribute.rst | 10 +- .../HAILO15H/HAILO15H_face_detection.rst | 26 +-- .../HAILO15H/HAILO15H_face_recognition.rst | 14 +- .../HAILO15H_facial_landmark_detection.rst | 10 +- .../HAILO15H_hand_landmark_detection.rst | 12 +- .../HAILO15H/HAILO15H_image_denoising.rst | 14 +- .../HAILO15H_instance_segmentation.rst | 42 ++-- .../HAILO15H_low_light_enhancement.rst | 14 +- .../HAILO15H/HAILO15H_object_detection.rst | 4 +- .../HAILO15H/HAILO15H_person_attribute.rst | 10 +- .../HAILO15H/HAILO15H_person_re_id.rst | 14 +- .../HAILO15H/HAILO15H_pose_estimation.rst | 26 +-- .../HAILO15H_semantic_segmentation.rst | 30 +-- ...HAILO15H_single_person_pose_estimation.rst | 14 +- .../HAILO15H_stereo_depth_estimation.rst | 10 +- .../HAILO15H/HAILO15H_super_resolution.rst | 18 +- .../HAILO15H_zero_shot_classification.rst | 10 +- .../HAILO15M/HAILO15M_classification.rst | 126 ++++++------ .../HAILO15M/HAILO15M_depth_estimation.rst | 14 +- .../HAILO15M/HAILO15M_face_attribute.rst | 10 +- .../HAILO15M/HAILO15M_face_detection.rst | 26 +-- .../HAILO15M/HAILO15M_face_recognition.rst | 14 +- .../HAILO15M_facial_landmark_detection.rst | 10 +- .../HAILO15M_hand_landmark_detection.rst | 12 +- .../HAILO15M/HAILO15M_image_denoising.rst | 14 +- .../HAILO15M_instance_segmentation.rst | 42 ++-- .../HAILO15M_low_light_enhancement.rst | 14 +- .../HAILO15M/HAILO15M_object_detection.rst | 4 +- .../HAILO15M/HAILO15M_person_attribute.rst | 10 +- .../HAILO15M/HAILO15M_person_re_id.rst | 14 +- .../HAILO15M/HAILO15M_pose_estimation.rst | 26 +-- .../HAILO15M_semantic_segmentation.rst | 30 +-- ...HAILO15M_single_person_pose_estimation.rst | 14 +- .../HAILO15M_stereo_depth_estimation.rst | 10 +- .../HAILO15M/HAILO15M_super_resolution.rst | 18 +- .../HAILO15M_zero_shot_classification.rst | 10 +- .../HAILO8/HAILO8_classification.rst | 126 ++++++------ .../HAILO8/HAILO8_depth_estimation.rst | 14 +- .../HAILO8/HAILO8_face_attribute.rst | 10 +- .../HAILO8/HAILO8_face_detection.rst | 26 +-- .../HAILO8/HAILO8_face_recognition.rst | 14 +- .../HAILO8_facial_landmark_detection.rst | 10 +- .../HAILO8/HAILO8_hand_landmark_detection.rst | 12 +- .../HAILO8/HAILO8_image_denoising.rst | 14 +- .../HAILO8/HAILO8_instance_segmentation.rst | 42 ++-- .../HAILO8/HAILO8_low_light_enhancement.rst | 14 +- .../HAILO8/HAILO8_object_detection.rst | 186 +++++++++--------- .../HAILO8/HAILO8_person_attribute.rst | 10 +- .../HAILO8/HAILO8_person_re_id.rst | 14 +- .../HAILO8/HAILO8_pose_estimation.rst | 26 +-- .../HAILO8/HAILO8_semantic_segmentation.rst | 30 +-- .../HAILO8_single_person_pose_estimation.rst | 14 +- .../HAILO8/HAILO8_stereo_depth_estimation.rst | 10 +- .../HAILO8/HAILO8_super_resolution.rst | 18 +- .../HAILO8_zero_shot_classification.rst | 10 +- .../HAILO8L/HAILO8l_classificaion.rst | 126 ++++++------ .../HAILO8L/HAILO8l_depth_estimation.rst | 14 +- .../HAILO8L/HAILO8l_face_attribute.rst | 10 +- .../HAILO8L/HAILO8l_face_detection.rst | 26 +-- .../HAILO8L/HAILO8l_face_recognition.rst | 14 +- .../HAILO8l_facial_landmark_detection.rst | 10 +- .../HAILO8l_hand_landmark_detection.rst | 12 +- .../HAILO8L/HAILO8l_image_denoising.rst | 14 +- .../HAILO8L/HAILO8l_instance_segmentation.rst | 42 ++-- .../HAILO8L/HAILO8l_low_light_enhancement.rst | 14 +- .../HAILO8L/HAILO8l_object_detection.rst | 186 +++++++++--------- .../HAILO8L/HAILO8l_person_attribute.rst | 10 +- .../HAILO8L/HAILO8l_person_re_id.rst | 14 +- .../HAILO8L/HAILO8l_pose_estimation.rst | 26 +-- .../HAILO8L/HAILO8l_semantic_segmentation.rst | 30 +-- .../HAILO8l_single_person_pose_estimation.rst | 14 +- .../HAILO8l_stereo_depth_estimation.rst | 10 +- .../HAILO8L/HAILO8l_super_resolution.rst | 18 +- .../HAILO8l_zero_shot_classification.rst | 10 +- 76 files changed, 1018 insertions(+), 1018 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_classification.rst b/docs/public_models/HAILO15H/HAILO15H_classification.rst index 56ac7acb..71f7406c 100644 --- a/docs/public_models/HAILO15H/HAILO15H_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Classification`_ - + .. _Classification: @@ -42,8 +42,8 @@ ImageNet - Pretrained - Source - Compiled - - NV12 Compiled - * - efficientnet_l + - NV12 Compiled + * - efficientnet_l - 80.46 - 79.36 - 84.6289 @@ -54,8 +54,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite0 + - `download `_ + * - efficientnet_lite0 - 74.99 - 73.81 - None @@ -66,8 +66,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite1 + - `download `_ + * - efficientnet_lite1 - 76.68 - 76.21 - None @@ -78,8 +78,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite2 + - `download `_ + * - efficientnet_lite2 - 77.45 - 76.74 - None @@ -90,8 +90,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite3 + - `download `_ + * - efficientnet_lite3 - 79.29 - 78.42 - None @@ -102,8 +102,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite4 + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 - 98.9888 @@ -114,8 +114,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_m + - `download `_ + * - efficientnet_m - 78.91 - 78.63 - 175.255 @@ -126,8 +126,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_s + - `download `_ + * - efficientnet_s - 77.64 - 77.32 - None @@ -138,8 +138,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - hardnet39ds + - `download `_ + * - hardnet39ds - 73.43 - 72.92 - 351 @@ -150,8 +150,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - hardnet68 + - `download `_ + * - hardnet68 - 75.47 - 75.04 - 150 @@ -162,8 +162,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - inception_v1 + - `download `_ + * - inception_v1 - 69.74 - 69.54 - 344 @@ -174,8 +174,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v1 + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 - 2874 @@ -186,8 +186,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v2_1.0 |rocket| + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 - 3455 @@ -198,8 +198,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v2_1.4 + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 - 580 @@ -210,8 +210,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v3 + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 - 377 @@ -222,8 +222,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v3_large_minimalistic + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - 70.61 - 2595 @@ -234,8 +234,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - regnetx_1.6gf + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 - 362 @@ -246,8 +246,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - regnetx_800mf + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 - 2559 @@ -258,8 +258,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - repvgg_a1 + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 - 1783 @@ -270,8 +270,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - repvgg_a2 + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 - 245 @@ -282,8 +282,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resmlp12_relu + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 - 86 @@ -294,8 +294,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnet_v1_18 + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 - 2031 @@ -306,8 +306,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnet_v1_34 + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 - 261 @@ -318,8 +318,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnet_v1_50 |rocket| |star| + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 - 246 @@ -330,8 +330,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnext26_32x4d + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 - 341 @@ -342,8 +342,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnext50_32x4d + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 - 191 @@ -354,8 +354,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - squeezenet_v1.1 + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 - 3327 @@ -366,8 +366,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_base_bn |rocket| + - `download `_ + * - vit_base_bn |rocket| - 79.98 - 78.58 - 59 @@ -378,8 +378,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_small_bn + - `download `_ + * - vit_small_bn - 78.12 - 77.02 - 117 @@ -390,8 +390,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_tiny_bn + - `download `_ + * - vit_tiny_bn - 68.95 - 67.15 - 211 diff --git a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst index d1ad814b..544d33d9 100644 --- a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Depth Estimation`_ - + .. _Depth Estimation: @@ -42,8 +42,8 @@ NYU - Pretrained - Source - Compiled - - NV12 Compiled - * - fast_depth |star| + - NV12 Compiled + * - fast_depth |star| - 0.6 - 0.62 - 1379 @@ -54,8 +54,8 @@ NYU - `download `_ - `link `_ - `download `_ - - `download `_ - * - scdepthv3 + - `download `_ + * - scdepthv3 - 0.48 - 0.51 - 204 diff --git a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst index 2ace0de4..ec7b8429 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Attribute`_ - + .. _Face Attribute: @@ -42,8 +42,8 @@ CELEBA - Pretrained - Source - Compiled - - NV12 Compiled - * - face_attr_resnet_v1_18 + - NV12 Compiled + * - face_attr_resnet_v1_18 - 81.19 - 81.09 - 2379 diff --git a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst index 6c5031db..13b564b5 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Detection`_ - + .. _Face Detection: @@ -42,8 +42,8 @@ WiderFace - Pretrained - Source - Compiled - - NV12 Compiled - * - lightface_slim |star| + - NV12 Compiled + * - lightface_slim |star| - 39.7 - 39.22 - 3968 @@ -54,8 +54,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - retinaface_mobilenet_v1 |star| + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 - 73 @@ -66,8 +66,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - scrfd_10g + - `download `_ + * - scrfd_10g - 82.13 - 82.03 - 134 @@ -78,8 +78,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - scrfd_2.5g + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 - 315 @@ -90,8 +90,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - scrfd_500m + - `download `_ + * - scrfd_500m - 68.98 - 68.88 - 344 diff --git a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst index 66a62bb2..638f17de 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Recognition`_ - + .. _Face Recognition: @@ -42,8 +42,8 @@ LFW - Pretrained - Source - Compiled - - NV12 Compiled - * - arcface_mobilefacenet + - NV12 Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 - 1924 @@ -54,8 +54,8 @@ LFW - `download `_ - `link `_ - `download `_ - - `download `_ - * - arcface_r50 + - `download `_ + * - arcface_r50 - 99.72 - 99.71 - 154 diff --git a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst index 362a27a6..55af00b7 100644 --- a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Facial Landmark Detection`_ - + .. _Facial Landmark Detection: @@ -42,8 +42,8 @@ AFLW2k3d - Pretrained - Source - Compiled - - NV12 Compiled - * - tddfa_mobilenet_v1 |star| + - NV12 Compiled + * - tddfa_mobilenet_v1 |star| - 3.68 - 4.05 - 8936 diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst index 0565bae8..7e259035 100644 --- a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Hand Landmark detection`_ - + .. _Hand Landmark detection: @@ -26,7 +26,7 @@ Hand Landmark detection Hand Landmark ^^^^^^^^^^^^^ - + .. list-table:: :header-rows: 1 @@ -39,8 +39,8 @@ Hand Landmark - Pretrained - Source - Compiled - - NV12 Compiled - * - hand_landmark_lite + - NV12 Compiled + * - hand_landmark_lite - 1340 - 1340 - 224x224x3 diff --git a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 63ed98d9..1f674b49 100644 --- a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Image Denoising`_ - + .. _Image Denoising: @@ -42,8 +42,8 @@ BSD68 - Pretrained - Source - Compiled - - NV12 Compiled - * - dncnn3 + - NV12 Compiled + * - dncnn3 - 31.46 - 31.26 - 44 @@ -74,8 +74,8 @@ CBSD68 - Pretrained - Source - Compiled - - NV12 Compiled - * - dncnn_color_blind + - NV12 Compiled + * - dncnn_color_blind - 33.87 - 32.97 - 33 diff --git a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index 63677445..b94e162c 100644 --- a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Instance Segmentation`_ - + .. _Instance Segmentation: @@ -42,8 +42,8 @@ COCO - Pretrained - Source - Compiled - - NV12 Compiled - * - yolact_regnetx_1.6gf + - NV12 Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 - 47 @@ -54,8 +54,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolact_regnetx_800mf + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 - 57 @@ -66,8 +66,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5l_seg + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 - 32 @@ -78,8 +78,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5m_seg + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 - 60 @@ -90,8 +90,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5n_seg |star| + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 - 167 @@ -102,8 +102,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5s_seg + - `download `_ + * - yolov5s_seg - 31.57 - 30.8 - 117 @@ -114,8 +114,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8m_seg + - `download `_ + * - yolov8m_seg - 40.6 - 39.85 - 44 @@ -126,8 +126,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8n_seg + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 - 196 @@ -138,8 +138,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8s_seg + - `download `_ + * - yolov8s_seg - 36.63 - 36.13 - 89 diff --git a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index b8e1474e..a0fd4716 100644 --- a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Low Light Enhancement`_ - + .. _Low Light Enhancement: @@ -42,8 +42,8 @@ LOL - Pretrained - Source - Compiled - - NV12 Compiled - * - zero_dce + - NV12 Compiled + * - zero_dce - 16.23 - 16.24 - 114 @@ -54,8 +54,8 @@ LOL - `download `_ - `link `_ - `download `_ - - `download `_ - * - zero_dce_pp + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 - 29 diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index ac8856eb..e0d7d3b8 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst index 38e4431e..ae355da8 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Attribute`_ - + .. _Person Attribute: @@ -42,8 +42,8 @@ PETA - Pretrained - Source - Compiled - - NV12 Compiled - * - person_attr_resnet_v1_18 + - NV12 Compiled + * - person_attr_resnet_v1_18 - 82.5 - 82.61 - 1944 diff --git a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index b60ef669..cf6129ae 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Re-ID`_ - + .. _Person Re-ID: @@ -42,8 +42,8 @@ Market1501 - Pretrained - Source - Compiled - - NV12 Compiled - * - osnet_x1_0 + - NV12 Compiled + * - osnet_x1_0 - 94.43 - 93.63 - 167 @@ -54,8 +54,8 @@ Market1501 - `download `_ - `link `_ - `download `_ - - `download `_ - * - repvgg_a0_person_reid_512 |star| + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 - 5082 diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst index 5cc4c29e..f80ba637 100644 --- a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Pose Estimation`_ - + .. _Pose Estimation: @@ -42,8 +42,8 @@ COCO - Pretrained - Source - Compiled - - NV12 Compiled - * - centerpose_regnetx_1.6gf_fpn |star| + - NV12 Compiled + * - centerpose_regnetx_1.6gf_fpn |star| - 53.54 - 53.53 - 67 @@ -54,8 +54,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - centerpose_regnetx_800mf + - `download `_ + * - centerpose_regnetx_800mf - 44.07 - 43.07 - 85 @@ -66,8 +66,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - centerpose_repvgg_a0 |star| + - `download `_ + * - centerpose_repvgg_a0 |star| - 39.17 - 37.17 - 146 @@ -78,8 +78,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8m_pose + - `download `_ + * - yolov8m_pose - 64.26 - 61.66 - 54 @@ -90,8 +90,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8s_pose + - `download `_ + * - yolov8s_pose - 59.2 - 55.6 - 114 diff --git a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index 3075b15a..4c9b93d9 100644 --- a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Semantic Segmentation`_ - + .. _Semantic Segmentation: @@ -42,8 +42,8 @@ Cityscapes - Pretrained - Source - Compiled - - NV12 Compiled - * - fcn8_resnet_v1_18 |star| + - NV12 Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 - 24 @@ -54,8 +54,8 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ - * - segformer_b0_bn + - `download `_ + * - segformer_b0_bn - 69.81 - 68.01 - None @@ -66,8 +66,8 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ - * - stdc1 |rocket| + - `download `_ + * - stdc1 |rocket| - 74.57 - 73.92 - 13 @@ -98,8 +98,8 @@ Oxford-IIIT Pet - Pretrained - Source - Compiled - - NV12 Compiled - * - unet_mobilenet_v2 + - NV12 Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 - 206 @@ -130,8 +130,8 @@ Pascal VOC - Pretrained - Source - Compiled - - NV12 Compiled - * - deeplab_v3_mobilenet_v2 + - NV12 Compiled + * - deeplab_v3_mobilenet_v2 - 76.05 - 74.8 - 90 @@ -142,8 +142,8 @@ Pascal VOC - `download `_ - `link `_ - `download `_ - - `download `_ - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 - 97 diff --git a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index 58037f2e..b0e79ba4 100644 --- a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Single Person Pose Estimation`_ - + .. _Single Person Pose Estimation: @@ -42,8 +42,8 @@ COCO - Pretrained - Source - Compiled - - NV12 Compiled - * - mspn_regnetx_800mf |star| + - NV12 Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 - 305 @@ -54,8 +54,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_pose_small_bn + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 - 82 diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst index c6140ffa..22731c28 100644 --- a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Stereo Depth Estimation`_ - + .. _Stereo Depth Estimation: @@ -42,8 +42,8 @@ N/A - Pretrained - Source - Compiled - - NV12 Compiled - * - stereonet + - NV12 Compiled + * - stereonet - 91.79 - 89.14 - None diff --git a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index 1393ef05..d84eaa28 100644 --- a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Super Resolution`_ - + .. _Super Resolution: @@ -42,8 +42,8 @@ BSD100 - Pretrained - Source - Compiled - - NV12 Compiled - * - espcn_x2 + - NV12 Compiled + * - espcn_x2 - 31.4 - 30.3 - 1637 @@ -54,8 +54,8 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ - * - espcn_x3 + - `download `_ + * - espcn_x3 - 28.41 - 28.06 - 1925 @@ -66,8 +66,8 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ - * - espcn_x4 + - `download `_ + * - espcn_x4 - 26.83 - 26.58 - 1891 diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst index 91ad0b1f..f9e5c537 100644 --- a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Zero-shot Classification`_ - + .. _Zero-shot Classification: @@ -42,8 +42,8 @@ CIFAR100 - Pretrained - Source - Compiled - - NV12 Compiled - * - clip_resnet_50 + - NV12 Compiled + * - clip_resnet_50 - 42.07 - 38.57 - 139 diff --git a/docs/public_models/HAILO15M/HAILO15M_classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst index 82fa75f2..2226bb9d 100644 --- a/docs/public_models/HAILO15M/HAILO15M_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Classification`_ - + .. _Classification: @@ -42,8 +42,8 @@ ImageNet - Pretrained - Source - Compiled - - NV12 Compiled - * - efficientnet_l + - NV12 Compiled + * - efficientnet_l - 80.46 - 79.36 - 57.9951 @@ -54,8 +54,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite0 + - `download `_ + * - efficientnet_lite0 - 74.99 - 73.81 - None @@ -66,8 +66,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite1 + - `download `_ + * - efficientnet_lite1 - 76.68 - 76.21 - None @@ -78,8 +78,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite2 + - `download `_ + * - efficientnet_lite2 - 77.45 - 76.74 - None @@ -90,8 +90,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite3 + - `download `_ + * - efficientnet_lite3 - 79.29 - 78.42 - None @@ -102,8 +102,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_lite4 + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 - 73.7215 @@ -114,8 +114,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_m + - `download `_ + * - efficientnet_m - 78.91 - 78.63 - 127.623 @@ -126,8 +126,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - efficientnet_s + - `download `_ + * - efficientnet_s - 77.64 - 77.32 - None @@ -138,8 +138,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - hardnet39ds + - `download `_ + * - hardnet39ds - 73.43 - 72.92 - 280 @@ -150,8 +150,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - hardnet68 + - `download `_ + * - hardnet68 - 75.47 - 75.04 - 120 @@ -162,8 +162,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - inception_v1 + - `download `_ + * - inception_v1 - 69.74 - 69.54 - 234 @@ -174,8 +174,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v1 + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 - 1427 @@ -186,8 +186,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v2_1.0 |rocket| + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 - 352 @@ -198,8 +198,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v2_1.4 + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 - 267 @@ -210,8 +210,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v3 + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 - 323 @@ -222,8 +222,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - mobilenet_v3_large_minimalistic + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - 70.61 - 446 @@ -234,8 +234,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - regnetx_1.6gf + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 - 302 @@ -246,8 +246,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - regnetx_800mf + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 - 410 @@ -258,8 +258,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - repvgg_a1 + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 - 319 @@ -270,8 +270,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - repvgg_a2 + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 - 180 @@ -282,8 +282,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resmlp12_relu + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 - 87 @@ -294,8 +294,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnet_v1_18 + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 - 381 @@ -306,8 +306,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnet_v1_34 + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 - 209 @@ -318,8 +318,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnet_v1_50 |rocket| |star| + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 - 179 @@ -330,8 +330,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnext26_32x4d + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 - 249 @@ -342,8 +342,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - resnext50_32x4d + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 - 156 @@ -354,8 +354,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - squeezenet_v1.1 + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 - 647 @@ -366,8 +366,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_base_bn |rocket| + - `download `_ + * - vit_base_bn |rocket| - 79.98 - 78.58 - 43 @@ -378,8 +378,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_small_bn + - `download `_ + * - vit_small_bn - 78.12 - 77.02 - 97 @@ -390,8 +390,8 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_tiny_bn + - `download `_ + * - vit_tiny_bn - 68.95 - 67.15 - 174 diff --git a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index 64715e78..8e809d89 100644 --- a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Depth Estimation`_ - + .. _Depth Estimation: @@ -42,8 +42,8 @@ NYU - Pretrained - Source - Compiled - - NV12 Compiled - * - fast_depth |star| + - NV12 Compiled + * - fast_depth |star| - 0.6 - 0.62 - 324 @@ -54,8 +54,8 @@ NYU - `download `_ - `link `_ - `download `_ - - `download `_ - * - scdepthv3 + - `download `_ + * - scdepthv3 - 0.48 - 0.51 - 153 diff --git a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst index 66536455..cb53da27 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Attribute`_ - + .. _Face Attribute: @@ -42,8 +42,8 @@ CELEBA - Pretrained - Source - Compiled - - NV12 Compiled - * - face_attr_resnet_v1_18 + - NV12 Compiled + * - face_attr_resnet_v1_18 - 81.19 - 81.09 - 430 diff --git a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst index 0f0cc631..a9beceb5 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Detection`_ - + .. _Face Detection: @@ -42,8 +42,8 @@ WiderFace - Pretrained - Source - Compiled - - NV12 Compiled - * - lightface_slim |star| + - NV12 Compiled + * - lightface_slim |star| - 39.7 - 39.22 - 644 @@ -54,8 +54,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - retinaface_mobilenet_v1 |star| + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 - 49 @@ -66,8 +66,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - scrfd_10g + - `download `_ + * - scrfd_10g - 82.13 - 82.03 - 90 @@ -78,8 +78,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - scrfd_2.5g + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 - 206 @@ -90,8 +90,8 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ - * - scrfd_500m + - `download `_ + * - scrfd_500m - 68.98 - 68.88 - 232 diff --git a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index 952140e3..1cb25701 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Recognition`_ - + .. _Face Recognition: @@ -42,8 +42,8 @@ LFW - Pretrained - Source - Compiled - - NV12 Compiled - * - arcface_mobilefacenet + - NV12 Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 - 439 @@ -54,8 +54,8 @@ LFW - `download `_ - `link `_ - `download `_ - - `download `_ - * - arcface_r50 + - `download `_ + * - arcface_r50 - 99.72 - 99.71 - 113 diff --git a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst index 611c8a9d..0cee9595 100644 --- a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Facial Landmark Detection`_ - + .. _Facial Landmark Detection: @@ -42,8 +42,8 @@ AFLW2k3d - Pretrained - Source - Compiled - - NV12 Compiled - * - tddfa_mobilenet_v1 |star| + - NV12 Compiled + * - tddfa_mobilenet_v1 |star| - 3.68 - 4.05 - 4483 diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst index 38000d97..f1709dae 100644 --- a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Hand Landmark detection`_ - + .. _Hand Landmark detection: @@ -26,7 +26,7 @@ Hand Landmark detection Hand Landmark ^^^^^^^^^^^^^ - + .. list-table:: :header-rows: 1 @@ -39,8 +39,8 @@ Hand Landmark - Pretrained - Source - Compiled - - NV12 Compiled - * - hand_landmark_lite + - NV12 Compiled + * - hand_landmark_lite - 387 - 1102 - 224x224x3 diff --git a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index a3562a12..4e27717d 100644 --- a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Image Denoising`_ - + .. _Image Denoising: @@ -42,8 +42,8 @@ BSD68 - Pretrained - Source - Compiled - - NV12 Compiled - * - dncnn3 + - NV12 Compiled + * - dncnn3 - 31.46 - 31.26 - 20 @@ -74,8 +74,8 @@ CBSD68 - Pretrained - Source - Compiled - - NV12 Compiled - * - dncnn_color_blind + - NV12 Compiled + * - dncnn_color_blind - 33.87 - 32.97 - 20 diff --git a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index 2852b7c2..baafa56b 100644 --- a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Instance Segmentation`_ - + .. _Instance Segmentation: @@ -42,8 +42,8 @@ COCO - Pretrained - Source - Compiled - - NV12 Compiled - * - yolact_regnetx_1.6gf + - NV12 Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 - 34 @@ -54,8 +54,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolact_regnetx_800mf + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 - 40 @@ -66,8 +66,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5l_seg + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 - 20 @@ -78,8 +78,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5m_seg + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 - 43 @@ -90,8 +90,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5n_seg |star| + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 - 148 @@ -102,8 +102,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov5s_seg + - `download `_ + * - yolov5s_seg - 31.57 - 30.8 - 82 @@ -114,8 +114,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8m_seg + - `download `_ + * - yolov8m_seg - 40.6 - 39.85 - 30 @@ -126,8 +126,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8n_seg + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 - 143 @@ -138,8 +138,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8s_seg + - `download `_ + * - yolov8s_seg - 36.63 - 36.13 - 66 diff --git a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index 9c98d8f2..a0fb9ec4 100644 --- a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Low Light Enhancement`_ - + .. _Low Light Enhancement: @@ -42,8 +42,8 @@ LOL - Pretrained - Source - Compiled - - NV12 Compiled - * - zero_dce + - NV12 Compiled + * - zero_dce - 16.23 - 16.24 - 71 @@ -54,8 +54,8 @@ LOL - `download `_ - `link `_ - `download `_ - - `download `_ - * - zero_dce_pp + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 - 54 diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index b54c16fa..54fa9cd0 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst index 04802c5f..38ecf28c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Attribute`_ - + .. _Person Attribute: @@ -42,8 +42,8 @@ PETA - Pretrained - Source - Compiled - - NV12 Compiled - * - person_attr_resnet_v1_18 + - NV12 Compiled + * - person_attr_resnet_v1_18 - 82.5 - 82.61 - 361 diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst index 3c234b8a..0ac6e844 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Re-ID`_ - + .. _Person Re-ID: @@ -42,8 +42,8 @@ Market1501 - Pretrained - Source - Compiled - - NV12 Compiled - * - osnet_x1_0 + - NV12 Compiled + * - osnet_x1_0 - 94.43 - 93.63 - 114 @@ -54,8 +54,8 @@ Market1501 - `download `_ - `link `_ - `download `_ - - `download `_ - * - repvgg_a0_person_reid_512 |star| + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 - 2632 diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst index 0b16642b..31b336c7 100644 --- a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Pose Estimation`_ - + .. _Pose Estimation: @@ -42,8 +42,8 @@ COCO - Pretrained - Source - Compiled - - NV12 Compiled - * - centerpose_regnetx_1.6gf_fpn |star| + - NV12 Compiled + * - centerpose_regnetx_1.6gf_fpn |star| - 53.54 - 53.53 - 46 @@ -54,8 +54,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - centerpose_regnetx_800mf + - `download `_ + * - centerpose_regnetx_800mf - 44.07 - 43.07 - 56 @@ -66,8 +66,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - centerpose_repvgg_a0 |star| + - `download `_ + * - centerpose_repvgg_a0 |star| - 39.17 - 37.17 - 95 @@ -78,8 +78,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8m_pose + - `download `_ + * - yolov8m_pose - 64.26 - 61.66 - 37 @@ -90,8 +90,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - yolov8s_pose + - `download `_ + * - yolov8s_pose - 59.2 - 55.6 - 79 diff --git a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index 6df69c7f..cf821320 100644 --- a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Semantic Segmentation`_ - + .. _Semantic Segmentation: @@ -42,8 +42,8 @@ Cityscapes - Pretrained - Source - Compiled - - NV12 Compiled - * - fcn8_resnet_v1_18 |star| + - NV12 Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 - 18 @@ -54,8 +54,8 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ - * - segformer_b0_bn + - `download `_ + * - segformer_b0_bn - 69.81 - 68.01 - None @@ -66,8 +66,8 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ - * - stdc1 |rocket| + - `download `_ + * - stdc1 |rocket| - 74.57 - 73.92 - 15 @@ -98,8 +98,8 @@ Oxford-IIIT Pet - Pretrained - Source - Compiled - - NV12 Compiled - * - unet_mobilenet_v2 + - NV12 Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 - 133 @@ -130,8 +130,8 @@ Pascal VOC - Pretrained - Source - Compiled - - NV12 Compiled - * - deeplab_v3_mobilenet_v2 + - NV12 Compiled + * - deeplab_v3_mobilenet_v2 - 76.05 - 74.8 - 42 @@ -142,8 +142,8 @@ Pascal VOC - `download `_ - `link `_ - `download `_ - - `download `_ - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 - 87 diff --git a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index e7ec4403..909257db 100644 --- a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Single Person Pose Estimation`_ - + .. _Single Person Pose Estimation: @@ -42,8 +42,8 @@ COCO - Pretrained - Source - Compiled - - NV12 Compiled - * - mspn_regnetx_800mf |star| + - NV12 Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 - 279 @@ -54,8 +54,8 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ - * - vit_pose_small_bn + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 - 73 diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst index ea70258b..d715b47d 100644 --- a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Stereo Depth Estimation`_ - + .. _Stereo Depth Estimation: @@ -42,8 +42,8 @@ N/A - Pretrained - Source - Compiled - - NV12 Compiled - * - stereonet + - NV12 Compiled + * - stereonet - 91.79 - 89.14 - None diff --git a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index b246fd69..6c11406f 100644 --- a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Super Resolution`_ - + .. _Super Resolution: @@ -42,8 +42,8 @@ BSD100 - Pretrained - Source - Compiled - - NV12 Compiled - * - espcn_x2 + - NV12 Compiled + * - espcn_x2 - 31.4 - 30.3 - 1637 @@ -54,8 +54,8 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ - * - espcn_x3 + - `download `_ + * - espcn_x3 - 28.41 - 28.06 - 1925 @@ -66,8 +66,8 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ - * - espcn_x4 + - `download `_ + * - espcn_x4 - 26.83 - 26.58 - 1908 diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst index ed50f045..02cf3ec8 100644 --- a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Zero-shot Classification`_ - + .. _Zero-shot Classification: @@ -42,8 +42,8 @@ CIFAR100 - Pretrained - Source - Compiled - - NV12 Compiled - * - clip_resnet_50 + - NV12 Compiled + * - clip_resnet_50 - 42.07 - 38.57 - 85 diff --git a/docs/public_models/HAILO8/HAILO8_classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst index 05913af8..d2082ae2 100644 --- a/docs/public_models/HAILO8/HAILO8_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Classification`_ - + .. _Classification: @@ -41,8 +41,8 @@ ImageNet - OPS (G) - Pretrained - Source - - Compiled - * - efficientnet_l + - Compiled + * - efficientnet_l - 80.46 - 79.36 - 221 @@ -52,8 +52,8 @@ ImageNet - 19.4 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite0 + - `download `_ + * - efficientnet_lite0 - 74.99 - 73.81 - 1731 @@ -63,8 +63,8 @@ ImageNet - 0.78 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite1 + - `download `_ + * - efficientnet_lite1 - 76.68 - 76.21 - 892 @@ -74,8 +74,8 @@ ImageNet - 1.22 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite2 + - `download `_ + * - efficientnet_lite2 - 77.45 - 76.74 - 433 @@ -85,8 +85,8 @@ ImageNet - 1.74 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite3 + - `download `_ + * - efficientnet_lite3 - 79.29 - 78.42 - 223 @@ -96,8 +96,8 @@ ImageNet - 2.8 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite4 + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 - 84 @@ -107,8 +107,8 @@ ImageNet - 5.10 - `download `_ - `link `_ - - `download `_ - * - efficientnet_m |rocket| + - `download `_ + * - efficientnet_m |rocket| - 78.91 - 78.63 - 890 @@ -118,8 +118,8 @@ ImageNet - 7.32 - `download `_ - `link `_ - - `download `_ - * - efficientnet_s + - `download `_ + * - efficientnet_s - 77.64 - 77.32 - 1036 @@ -129,8 +129,8 @@ ImageNet - 4.72 - `download `_ - `link `_ - - `download `_ - * - hardnet39ds + - `download `_ + * - hardnet39ds - 73.43 - 72.92 - 313 @@ -140,8 +140,8 @@ ImageNet - 0.86 - `download `_ - `link `_ - - `download `_ - * - hardnet68 + - `download `_ + * - hardnet68 - 75.47 - 75.04 - 121 @@ -151,8 +151,8 @@ ImageNet - 8.5 - `download `_ - `link `_ - - `download `_ - * - inception_v1 + - `download `_ + * - inception_v1 - 69.74 - 69.54 - 928 @@ -162,8 +162,8 @@ ImageNet - 3 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v1 + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 - 3489 @@ -173,8 +173,8 @@ ImageNet - 1.14 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v2_1.0 |rocket| + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 - 2434 @@ -184,8 +184,8 @@ ImageNet - 0.62 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v2_1.4 + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 - 1669 @@ -195,8 +195,8 @@ ImageNet - 1.18 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v3 + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 - 2415 @@ -206,8 +206,8 @@ ImageNet - 2 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v3_large_minimalistic + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - 70.61 - 3518 @@ -217,8 +217,8 @@ ImageNet - 0.42 - `download `_ - `link `_ - - `download `_ - * - regnetx_1.6gf + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 - 2321 @@ -228,8 +228,8 @@ ImageNet - 3.22 - `download `_ - `link `_ - - `download `_ - * - regnetx_800mf + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 - 3506 @@ -239,8 +239,8 @@ ImageNet - 1.6 - `download `_ - `link `_ - - `download `_ - * - repvgg_a1 + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 - 2545 @@ -250,8 +250,8 @@ ImageNet - 4.7 - `download `_ - `link `_ - - `download `_ - * - repvgg_a2 + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 - 911 @@ -261,8 +261,8 @@ ImageNet - 10.2 - `download `_ - `link `_ - - `download `_ - * - resmlp12_relu + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 - 1429 @@ -272,8 +272,8 @@ ImageNet - 6.04 - `download `_ - `link `_ - - `download `_ - * - resnet_v1_18 + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 - 2533 @@ -283,8 +283,8 @@ ImageNet - 3.64 - `download `_ - `link `_ - - `download `_ - * - resnet_v1_34 + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 - 1346 @@ -294,8 +294,8 @@ ImageNet - 7.34 - `download `_ - `link `_ - - `download `_ - * - resnet_v1_50 |rocket| |star| + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 - 1394 @@ -305,8 +305,8 @@ ImageNet - 6.98 - `download `_ - `link `_ - - `download `_ - * - resnext26_32x4d + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 - 1630 @@ -316,8 +316,8 @@ ImageNet - 4.96 - `download `_ - `link `_ - - `download `_ - * - resnext50_32x4d + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 - 398 @@ -327,8 +327,8 @@ ImageNet - 8.48 - `download `_ - `link `_ - - `download `_ - * - squeezenet_v1.1 + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 - 3035 @@ -338,8 +338,8 @@ ImageNet - 0.78 - `download `_ - `link `_ - - `download `_ - * - vit_base_bn |rocket| + - `download `_ + * - vit_base_bn |rocket| - 79.98 - 78.58 - 36 @@ -349,8 +349,8 @@ ImageNet - 35.188 - `download `_ - `link `_ - - `download `_ - * - vit_small_bn + - `download `_ + * - vit_small_bn - 78.12 - 77.02 - 113 @@ -360,8 +360,8 @@ ImageNet - 8.62 - `download `_ - `link `_ - - `download `_ - * - vit_tiny_bn + - `download `_ + * - vit_tiny_bn - 68.95 - 67.15 - 184 diff --git a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst index 93a8dcb2..189787bb 100644 --- a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Depth Estimation`_ - + .. _Depth Estimation: @@ -41,8 +41,8 @@ NYU - OPS (G) - Pretrained - Source - - Compiled - * - fast_depth |star| + - Compiled + * - fast_depth |star| - 0.6 - 0.62 - 1739 @@ -52,8 +52,8 @@ NYU - 0.74 - `download `_ - `link `_ - - `download `_ - * - scdepthv3 + - `download `_ + * - scdepthv3 - 0.48 - 0.51 - 777 diff --git a/docs/public_models/HAILO8/HAILO8_face_attribute.rst b/docs/public_models/HAILO8/HAILO8_face_attribute.rst index eda30aa1..fa935dba 100644 --- a/docs/public_models/HAILO8/HAILO8_face_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Attribute`_ - + .. _Face Attribute: @@ -41,8 +41,8 @@ CELEBA - OPS (G) - Pretrained - Source - - Compiled - * - face_attr_resnet_v1_18 + - Compiled + * - face_attr_resnet_v1_18 - 81.19 - 81.09 - 2929 diff --git a/docs/public_models/HAILO8/HAILO8_face_detection.rst b/docs/public_models/HAILO8/HAILO8_face_detection.rst index 46959fb3..c1fd8c0f 100644 --- a/docs/public_models/HAILO8/HAILO8_face_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Detection`_ - + .. _Face Detection: @@ -41,8 +41,8 @@ WiderFace - OPS (G) - Pretrained - Source - - Compiled - * - lightface_slim |star| + - Compiled + * - lightface_slim |star| - 39.7 - 39.22 - 4206 @@ -52,8 +52,8 @@ WiderFace - 0.16 - `download `_ - `link `_ - - `download `_ - * - retinaface_mobilenet_v1 |star| + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 - 104 @@ -63,8 +63,8 @@ WiderFace - 25.14 - `download `_ - `link `_ - - `download `_ - * - scrfd_10g + - `download `_ + * - scrfd_10g - 82.13 - 82.03 - 303 @@ -74,8 +74,8 @@ WiderFace - 26.74 - `download `_ - `link `_ - - `download `_ - * - scrfd_2.5g + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 - 733 @@ -85,8 +85,8 @@ WiderFace - 6.88 - `download `_ - `link `_ - - `download `_ - * - scrfd_500m + - `download `_ + * - scrfd_500m - 68.98 - 68.88 - 831 diff --git a/docs/public_models/HAILO8/HAILO8_face_recognition.rst b/docs/public_models/HAILO8/HAILO8_face_recognition.rst index b557b86f..c2e097be 100644 --- a/docs/public_models/HAILO8/HAILO8_face_recognition.rst +++ b/docs/public_models/HAILO8/HAILO8_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Recognition`_ - + .. _Face Recognition: @@ -41,8 +41,8 @@ LFW - OPS (G) - Pretrained - Source - - Compiled - * - arcface_mobilefacenet + - Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 - 3458 @@ -52,8 +52,8 @@ LFW - 0.88 - `download `_ - `link `_ - - `download `_ - * - arcface_r50 + - `download `_ + * - arcface_r50 - 99.72 - 99.71 - 108 diff --git a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst index c7346266..3bed33b0 100644 --- a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Facial Landmark Detection`_ - + .. _Facial Landmark Detection: @@ -41,8 +41,8 @@ AFLW2k3d - OPS (G) - Pretrained - Source - - Compiled - * - tddfa_mobilenet_v1 |star| + - Compiled + * - tddfa_mobilenet_v1 |star| - 3.68 - 4.05 - 10084 diff --git a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst index 5f263026..dfd91d0a 100644 --- a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Hand Landmark detection`_ - + .. _Hand Landmark detection: @@ -26,7 +26,7 @@ Hand Landmark detection Hand Landmark ^^^^^^^^^^^^^ - + .. list-table:: :header-rows: 1 @@ -38,8 +38,8 @@ Hand Landmark - OPS (G) - Pretrained - Source - - Compiled - * - hand_landmark_lite + - Compiled + * - hand_landmark_lite - 2672 - 2672 - 224x224x3 diff --git a/docs/public_models/HAILO8/HAILO8_image_denoising.rst b/docs/public_models/HAILO8/HAILO8_image_denoising.rst index b1093ef8..225caec1 100644 --- a/docs/public_models/HAILO8/HAILO8_image_denoising.rst +++ b/docs/public_models/HAILO8/HAILO8_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Image Denoising`_ - + .. _Image Denoising: @@ -41,8 +41,8 @@ BSD68 - OPS (G) - Pretrained - Source - - Compiled - * - dncnn3 + - Compiled + * - dncnn3 - 31.46 - 31.26 - 60 @@ -71,8 +71,8 @@ CBSD68 - OPS (G) - Pretrained - Source - - Compiled - * - dncnn_color_blind + - Compiled + * - dncnn_color_blind - 33.87 - 32.97 - 60 diff --git a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst index 03a5282a..eaf9ae64 100644 --- a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Instance Segmentation`_ - + .. _Instance Segmentation: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - yolact_regnetx_1.6gf + - Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 - 47 @@ -52,8 +52,8 @@ COCO - 125.34 - `download `_ - `link `_ - - `download `_ - * - yolact_regnetx_800mf + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 - 45 @@ -63,8 +63,8 @@ COCO - 116.75 - `download `_ - `link `_ - - `download `_ - * - yolov5l_seg + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 - 32 @@ -74,8 +74,8 @@ COCO - 147.88 - `download `_ - `link `_ - - `download `_ - * - yolov5m_seg + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 - 61 @@ -85,8 +85,8 @@ COCO - 70.94 - `download `_ - `link `_ - - `download `_ - * - yolov5n_seg |star| + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 - 184 @@ -96,8 +96,8 @@ COCO - 7.1 - `download `_ - `link `_ - - `download `_ - * - yolov5s_seg + - `download `_ + * - yolov5s_seg - 31.57 - 30.8 - 92 @@ -107,8 +107,8 @@ COCO - 26.42 - `download `_ - `link `_ - - `download `_ - * - yolov8m_seg + - `download `_ + * - yolov8m_seg - 40.6 - 39.85 - 45 @@ -118,8 +118,8 @@ COCO - 110.2 - `download `_ - `link `_ - - `download `_ - * - yolov8n_seg + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 - 173 @@ -129,8 +129,8 @@ COCO - 12.04 - `download `_ - `link `_ - - `download `_ - * - yolov8s_seg + - `download `_ + * - yolov8s_seg - 36.63 - 36.13 - 103 diff --git a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst index f9d104ad..a4ff41ed 100644 --- a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst +++ b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Low Light Enhancement`_ - + .. _Low Light Enhancement: @@ -41,8 +41,8 @@ LOL - OPS (G) - Pretrained - Source - - Compiled - * - zero_dce + - Compiled + * - zero_dce - 16.23 - 16.24 - 118 @@ -52,8 +52,8 @@ LOL - 38.2 - `download `_ - `link `_ - - `download `_ - * - zero_dce_pp + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 - 101 diff --git a/docs/public_models/HAILO8/HAILO8_object_detection.rst b/docs/public_models/HAILO8/HAILO8_object_detection.rst index 70dc035c..6e693ac1 100644 --- a/docs/public_models/HAILO8/HAILO8_object_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Object Detection`_ - + .. _Object Detection: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - centernet_resnet_v1_18_postprocess + - Compiled + * - centernet_resnet_v1_18_postprocess - 26.3 - 23.31 - 441 @@ -52,8 +52,8 @@ COCO - 31.21 - `download `_ - `link `_ - - `download `_ - * - centernet_resnet_v1_50_postprocess + - `download `_ + * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 - 75 @@ -63,8 +63,8 @@ COCO - 56.92 - `download `_ - `link `_ - - `download `_ - * - damoyolo_tinynasL20_T + - `download `_ + * - damoyolo_tinynasL20_T - 42.8 - 42.3 - 130 @@ -74,8 +74,8 @@ COCO - 18.02 - `download `_ - `link `_ - - `download `_ - * - damoyolo_tinynasL25_S + - `download `_ + * - damoyolo_tinynasL25_S - 46.53 - 45.34 - 228 @@ -85,8 +85,8 @@ COCO - 37.64 - `download `_ - `link `_ - - `download `_ - * - damoyolo_tinynasL35_M + - `download `_ + * - damoyolo_tinynasL35_M - 49.7 - 47.7 - 54 @@ -96,8 +96,8 @@ COCO - 61.64 - `download `_ - `link `_ - - `download `_ - * - detr_resnet_v1_18_bn + - `download `_ + * - detr_resnet_v1_18_bn - 33.91 - 30.91 - 28 @@ -107,8 +107,8 @@ COCO - 61.87 - `download `_ - `link `_ - - `download `_ - * - efficientdet_lite0 + - `download `_ + * - efficientdet_lite0 - 27.32 - 26.49 - 87 @@ -118,8 +118,8 @@ COCO - 1.94 - `download `_ - `link `_ - - `download `_ - * - efficientdet_lite1 + - `download `_ + * - efficientdet_lite1 - 32.27 - 31.72 - 61 @@ -129,8 +129,8 @@ COCO - 4 - `download `_ - `link `_ - - `download `_ - * - efficientdet_lite2 + - `download `_ + * - efficientdet_lite2 - 35.95 - 34.67 - 42 @@ -140,8 +140,8 @@ COCO - 6.84 - `download `_ - `link `_ - - `download `_ - * - nanodet_repvgg |star| + - `download `_ + * - nanodet_repvgg |star| - 29.3 - 28.53 - 820 @@ -151,8 +151,8 @@ COCO - 11.28 - `download `_ - `link `_ - - `download `_ - * - nanodet_repvgg_a12 + - `download `_ + * - nanodet_repvgg_a12 - 33.73 - 32.13 - 400 @@ -162,8 +162,8 @@ COCO - 28.23 - `download `_ - `link `_ - - `download `_ - * - nanodet_repvgg_a1_640 + - `download `_ + * - nanodet_repvgg_a1_640 - 33.28 - 32.88 - 280 @@ -173,8 +173,8 @@ COCO - 42.8 - `download `_ - `link `_ - - `download `_ - * - ssd_mobilenet_v1 |rocket| |star| + - `download `_ + * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 - 1015 @@ -184,8 +184,8 @@ COCO - 2.5 - `download `_ - `link `_ - - `download `_ - * - ssd_mobilenet_v2 + - `download `_ + * - ssd_mobilenet_v2 - 24.15 - 22.95 - 142 @@ -195,8 +195,8 @@ COCO - 1.52 - `download `_ - `link `_ - - `download `_ - * - tiny_yolov3 + - `download `_ + * - tiny_yolov3 - 14.66 - 14.41 - 1044 @@ -206,8 +206,8 @@ COCO - 5.58 - `download `_ - `link `_ - - `download `_ - * - tiny_yolov4 + - `download `_ + * - tiny_yolov4 - 19.18 - 17.73 - 1337 @@ -217,8 +217,8 @@ COCO - 6.92 - `download `_ - `link `_ - - `download `_ - * - yolov3 |star| + - `download `_ + * - yolov3 |star| - 38.42 - 38.37 - 33 @@ -228,8 +228,8 @@ COCO - 158.10 - `download `_ - `link `_ - - `download `_ - * - yolov3_416 + - `download `_ + * - yolov3_416 - 37.73 - 37.53 - 45 @@ -239,8 +239,8 @@ COCO - 65.94 - `download `_ - `link `_ - - `download `_ - * - yolov3_gluon |star| + - `download `_ + * - yolov3_gluon |star| - 37.28 - 35.64 - 37 @@ -250,8 +250,8 @@ COCO - 158.1 - `download `_ - `link `_ - - `download `_ - * - yolov3_gluon_416 |star| + - `download `_ + * - yolov3_gluon_416 |star| - 36.27 - 34.92 - 47 @@ -261,8 +261,8 @@ COCO - 65.94 - `download `_ - `link `_ - - `download `_ - * - yolov4_leaky |star| + - `download `_ + * - yolov4_leaky |star| - 42.37 - 41.08 - 44 @@ -272,8 +272,8 @@ COCO - 91.04 - `download `_ - `link `_ - - `download `_ - * - yolov5m + - `download `_ + * - yolov5m - 42.59 - 41.19 - 156 @@ -283,8 +283,8 @@ COCO - 52.17 - `download `_ - `link `_ - - `download `_ - * - yolov5m6_6.1 + - `download `_ + * - yolov5m6_6.1 - 50.67 - 48.97 - 25 @@ -294,8 +294,8 @@ COCO - 200.04 - `download `_ - `link `_ - - `download `_ - * - yolov5m_6.1 + - `download `_ + * - yolov5m_6.1 - 44.8 - 43.36 - 81 @@ -305,8 +305,8 @@ COCO - 48.96 - `download `_ - `link `_ - - `download `_ - * - yolov5m_wo_spp |rocket| + - `download `_ + * - yolov5m_wo_spp |rocket| - 43.06 - 41.06 - 217.983 @@ -316,8 +316,8 @@ COCO - 52.88 - `download `_ - `link `_ - - `download `_ - * - yolov5s |star| + - `download `_ + * - yolov5s |star| - 35.33 - 33.98 - 379 @@ -327,8 +327,8 @@ COCO - 17.44 - `download `_ - `link `_ - - `download `_ - * - yolov5s_c3tr + - `download `_ + * - yolov5s_c3tr - 37.13 - 35.63 - 133 @@ -338,8 +338,8 @@ COCO - 17.02 - `download `_ - `link `_ - - `download `_ - * - yolov5xs_wo_spp + - `download `_ + * - yolov5xs_wo_spp - 33.18 - 32.2 - 168 @@ -349,8 +349,8 @@ COCO - 11.36 - `download `_ - `link `_ - - `download `_ - * - yolov5xs_wo_spp_nms_core + - `download `_ + * - yolov5xs_wo_spp_nms_core - 32.57 - 30.86 - 100 @@ -360,8 +360,8 @@ COCO - 11.36 - `download `_ - `link `_ - - `download `_ - * - yolov6n + - `download `_ + * - yolov6n - 34.28 - 32.28 - 1251 @@ -371,8 +371,8 @@ COCO - 11.12 - `download `_ - `link `_ - - `download `_ - * - yolov6n_0.2.1 + - `download `_ + * - yolov6n_0.2.1 - 35.16 - 33.87 - 805 @@ -382,8 +382,8 @@ COCO - 11.06 - `download `_ - `link `_ - - `download `_ - * - yolov7 + - `download `_ + * - yolov7 - 50.59 - 47.89 - 45 @@ -393,8 +393,8 @@ COCO - 104.51 - `download `_ - `link `_ - - `download `_ - * - yolov7_tiny + - `download `_ + * - yolov7_tiny - 37.07 - 36.07 - 373 @@ -404,8 +404,8 @@ COCO - 13.74 - `download `_ - `link `_ - - `download `_ - * - yolov7e6 + - `download `_ + * - yolov7e6 - 55.37 - 53.47 - 6 @@ -415,8 +415,8 @@ COCO - 515.12 - `download `_ - `link `_ - - `download `_ - * - yolov8l + - `download `_ + * - yolov8l - 52.44 - 51.78 - 29 @@ -426,8 +426,8 @@ COCO - 165.3 - `download `_ - `link `_ - - `download `_ - * - yolov8m + - `download `_ + * - yolov8m - 49.91 - 49.11 - 59 @@ -437,8 +437,8 @@ COCO - 78.93 - `download `_ - `link `_ - - `download `_ - * - yolov8n + - `download `_ + * - yolov8n - 37.02 - 36.32 - 1024 @@ -448,8 +448,8 @@ COCO - 8.74 - `download `_ - `link `_ - - `download `_ - * - yolov8s + - `download `_ + * - yolov8s - 44.58 - 43.98 - 396 @@ -459,8 +459,8 @@ COCO - 28.6 - `download `_ - `link `_ - - `download `_ - * - yolov8x + - `download `_ + * - yolov8x - 53.45 - 52.75 - 18 @@ -470,8 +470,8 @@ COCO - 258 - `download `_ - `link `_ - - `download `_ - * - yolov9c + - `download `_ + * - yolov9c - 52.8 - 50.7 - 36 @@ -481,8 +481,8 @@ COCO - 102.1 - `download `_ - `link `_ - - `download `_ - * - yolox_l_leaky |star| + - `download `_ + * - yolox_l_leaky |star| - 48.69 - 46.59 - 32 @@ -492,8 +492,8 @@ COCO - 155.3 - `download `_ - `link `_ - - `download `_ - * - yolox_s_leaky + - `download `_ + * - yolox_s_leaky - 38.12 - 37.27 - 250 @@ -503,8 +503,8 @@ COCO - 26.74 - `download `_ - `link `_ - - `download `_ - * - yolox_s_wide_leaky + - `download `_ + * - yolox_s_wide_leaky - 42.4 - 40.97 - 73 @@ -514,8 +514,8 @@ COCO - 59.46 - `download `_ - `link `_ - - `download `_ - * - yolox_tiny + - `download `_ + * - yolox_tiny - 32.64 - 31.39 - 226 @@ -544,8 +544,8 @@ VisDrone - OPS (G) - Pretrained - Source - - Compiled - * - ssd_mobilenet_v1_visdrone |star| + - Compiled + * - ssd_mobilenet_v1_visdrone |star| - 2.37 - 2.22 - 1212 diff --git a/docs/public_models/HAILO8/HAILO8_person_attribute.rst b/docs/public_models/HAILO8/HAILO8_person_attribute.rst index 1ae38aef..890eb665 100644 --- a/docs/public_models/HAILO8/HAILO8_person_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Attribute`_ - + .. _Person Attribute: @@ -41,8 +41,8 @@ PETA - OPS (G) - Pretrained - Source - - Compiled - * - person_attr_resnet_v1_18 + - Compiled + * - person_attr_resnet_v1_18 - 82.5 - 82.61 - 2523 diff --git a/docs/public_models/HAILO8/HAILO8_person_re_id.rst b/docs/public_models/HAILO8/HAILO8_person_re_id.rst index 56dbe68b..8f1e6b0b 100644 --- a/docs/public_models/HAILO8/HAILO8_person_re_id.rst +++ b/docs/public_models/HAILO8/HAILO8_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Re-ID`_ - + .. _Person Re-ID: @@ -41,8 +41,8 @@ Market1501 - OPS (G) - Pretrained - Source - - Compiled - * - osnet_x1_0 + - Compiled + * - osnet_x1_0 - 94.43 - 93.63 - 157 @@ -52,8 +52,8 @@ Market1501 - 1.98 - `download `_ - `link `_ - - `download `_ - * - repvgg_a0_person_reid_512 |star| + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 - 5204 diff --git a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst index 4156adc7..dde6509d 100644 --- a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Pose Estimation`_ - + .. _Pose Estimation: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - centerpose_regnetx_1.6gf_fpn |star| + - Compiled + * - centerpose_regnetx_1.6gf_fpn |star| - 53.54 - 53.53 - 132 @@ -52,8 +52,8 @@ COCO - 64.58 - `download `_ - `link `_ - - `download `_ - * - centerpose_regnetx_800mf + - `download `_ + * - centerpose_regnetx_800mf - 44.07 - 43.07 - 132 @@ -63,8 +63,8 @@ COCO - 86.12 - `download `_ - `link `_ - - `download `_ - * - centerpose_repvgg_a0 |star| + - `download `_ + * - centerpose_repvgg_a0 |star| - 39.17 - 37.17 - 512 @@ -74,8 +74,8 @@ COCO - 28.27 - `download `_ - `link `_ - - `download `_ - * - yolov8m_pose + - `download `_ + * - yolov8m_pose - 64.26 - 61.66 - 54 @@ -85,8 +85,8 @@ COCO - 81.02 - `download `_ - `link `_ - - `download `_ - * - yolov8s_pose + - `download `_ + * - yolov8s_pose - 59.2 - 55.6 - 125 diff --git a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index 8b3b039f..9d75a379 100644 --- a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Semantic Segmentation`_ - + .. _Semantic Segmentation: @@ -41,8 +41,8 @@ Cityscapes - OPS (G) - Pretrained - Source - - Compiled - * - fcn8_resnet_v1_18 |star| + - Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 - 42 @@ -52,8 +52,8 @@ Cityscapes - 142.82 - `download `_ - `link `_ - - `download `_ - * - segformer_b0_bn + - `download `_ + * - segformer_b0_bn - 69.81 - 68.01 - 8 @@ -63,8 +63,8 @@ Cityscapes - 35.76 - `download `_ - `link `_ - - `download `_ - * - stdc1 |rocket| + - `download `_ + * - stdc1 |rocket| - 74.57 - 73.92 - 52 @@ -93,8 +93,8 @@ Oxford-IIIT Pet - OPS (G) - Pretrained - Source - - Compiled - * - unet_mobilenet_v2 + - Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 - 445 @@ -123,8 +123,8 @@ Pascal VOC - OPS (G) - Pretrained - Source - - Compiled - * - deeplab_v3_mobilenet_v2 + - Compiled + * - deeplab_v3_mobilenet_v2 - 76.05 - 74.8 - 122 @@ -134,8 +134,8 @@ Pascal VOC - 17.65 - `download `_ - `link `_ - - `download `_ - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 - 262 diff --git a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst index e3e16181..fa1a34aa 100644 --- a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Single Person Pose Estimation`_ - + .. _Single Person Pose Estimation: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - mspn_regnetx_800mf |star| + - Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 - 1716 @@ -52,8 +52,8 @@ COCO - 2.94 - `download `_ - `link `_ - - `download `_ - * - vit_pose_small_bn + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 - 66 diff --git a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst index d920e3eb..5404dd73 100644 --- a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Stereo Depth Estimation`_ - + .. _Stereo Depth Estimation: @@ -41,8 +41,8 @@ N/A - OPS (G) - Pretrained - Source - - Compiled - * - stereonet + - Compiled + * - stereonet - 91.79 - 89.14 - 5 diff --git a/docs/public_models/HAILO8/HAILO8_super_resolution.rst b/docs/public_models/HAILO8/HAILO8_super_resolution.rst index 145b6ba7..3e46c7f1 100644 --- a/docs/public_models/HAILO8/HAILO8_super_resolution.rst +++ b/docs/public_models/HAILO8/HAILO8_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Super Resolution`_ - + .. _Super Resolution: @@ -41,8 +41,8 @@ BSD100 - OPS (G) - Pretrained - Source - - Compiled - * - espcn_x2 + - Compiled + * - espcn_x2 - 31.4 - 30.3 - 1164 @@ -52,8 +52,8 @@ BSD100 - 1.6 - `download `_ - `link `_ - - `download `_ - * - espcn_x3 + - `download `_ + * - espcn_x3 - 28.41 - 28.06 - 2218 @@ -63,8 +63,8 @@ BSD100 - 0.76 - `download `_ - `link `_ - - `download `_ - * - espcn_x4 + - `download `_ + * - espcn_x4 - 26.83 - 26.58 - 2189 diff --git a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst index 3a1615b0..f749d545 100644 --- a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Zero-shot Classification`_ - + .. _Zero-shot Classification: @@ -41,8 +41,8 @@ CIFAR100 - OPS (G) - Pretrained - Source - - Compiled - * - clip_resnet_50 + - Compiled + * - clip_resnet_50 - 42.07 - 38.57 - 88 diff --git a/docs/public_models/HAILO8L/HAILO8l_classificaion.rst b/docs/public_models/HAILO8L/HAILO8l_classificaion.rst index eb5196cd..066359e4 100644 --- a/docs/public_models/HAILO8L/HAILO8l_classificaion.rst +++ b/docs/public_models/HAILO8L/HAILO8l_classificaion.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Classification`_ - + .. _Classification: @@ -41,8 +41,8 @@ ImageNet - OPS (G) - Pretrained - Source - - Compiled - * - efficientnet_l + - Compiled + * - efficientnet_l - 80.46 - 79.36 - 56 @@ -52,8 +52,8 @@ ImageNet - 19.4 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite0 + - `download `_ + * - efficientnet_lite0 - 74.99 - 73.81 - 202 @@ -63,8 +63,8 @@ ImageNet - 0.78 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite1 + - `download `_ + * - efficientnet_lite1 - 76.68 - 76.21 - 148 @@ -74,8 +74,8 @@ ImageNet - 1.22 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite2 + - `download `_ + * - efficientnet_lite2 - 77.45 - 76.74 - 106 @@ -85,8 +85,8 @@ ImageNet - 1.74 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite3 + - `download `_ + * - efficientnet_lite3 - 79.29 - 78.42 - 83 @@ -96,8 +96,8 @@ ImageNet - 2.8 - `download `_ - `link `_ - - `download `_ - * - efficientnet_lite4 + - `download `_ + * - efficientnet_lite4 - 80.79 - 79.99 - 60 @@ -107,8 +107,8 @@ ImageNet - 5.10 - `download `_ - `link `_ - - `download `_ - * - efficientnet_m |rocket| + - `download `_ + * - efficientnet_m |rocket| - 78.91 - 78.63 - 113 @@ -118,8 +118,8 @@ ImageNet - 7.32 - `download `_ - `link `_ - - `download `_ - * - efficientnet_s + - `download `_ + * - efficientnet_s - 77.64 - 77.32 - 158 @@ -129,8 +129,8 @@ ImageNet - 4.72 - `download `_ - `link `_ - - `download `_ - * - hardnet39ds + - `download `_ + * - hardnet39ds - 73.43 - 72.92 - 247 @@ -140,8 +140,8 @@ ImageNet - 0.86 - `download `_ - `link `_ - - `download `_ - * - hardnet68 + - `download `_ + * - hardnet68 - 75.47 - 75.04 - 90 @@ -151,8 +151,8 @@ ImageNet - 8.5 - `download `_ - `link `_ - - `download `_ - * - inception_v1 + - `download `_ + * - inception_v1 - 69.74 - 69.54 - 230 @@ -162,8 +162,8 @@ ImageNet - 3 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v1 + - `download `_ + * - mobilenet_v1 - 70.97 - 70.26 - 1866 @@ -173,8 +173,8 @@ ImageNet - 1.14 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v2_1.0 |rocket| + - `download `_ + * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 - 1738 @@ -184,8 +184,8 @@ ImageNet - 0.62 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v2_1.4 + - `download `_ + * - mobilenet_v2_1.4 - 74.18 - 73.18 - 185 @@ -195,8 +195,8 @@ ImageNet - 1.18 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v3 + - `download `_ + * - mobilenet_v3 - 72.21 - 71.73 - 220 @@ -206,8 +206,8 @@ ImageNet - 2 - `download `_ - `link `_ - - `download `_ - * - mobilenet_v3_large_minimalistic + - `download `_ + * - mobilenet_v3_large_minimalistic - 72.11 - 70.61 - 378 @@ -217,8 +217,8 @@ ImageNet - 0.42 - `download `_ - `link `_ - - `download `_ - * - regnetx_1.6gf + - `download `_ + * - regnetx_1.6gf - 77.05 - 76.75 - 223 @@ -228,8 +228,8 @@ ImageNet - 3.22 - `download `_ - `link `_ - - `download `_ - * - regnetx_800mf + - `download `_ + * - regnetx_800mf - 75.16 - 74.84 - 280 @@ -239,8 +239,8 @@ ImageNet - 1.6 - `download `_ - `link `_ - - `download `_ - * - repvgg_a1 + - `download `_ + * - repvgg_a1 - 74.4 - 72.4 - 233 @@ -250,8 +250,8 @@ ImageNet - 4.7 - `download `_ - `link `_ - - `download `_ - * - repvgg_a2 + - `download `_ + * - repvgg_a2 - 76.52 - 74.52 - 121 @@ -261,8 +261,8 @@ ImageNet - 10.2 - `download `_ - `link `_ - - `download `_ - * - resmlp12_relu + - `download `_ + * - resmlp12_relu - 75.26 - 74.32 - 45 @@ -272,8 +272,8 @@ ImageNet - 6.04 - `download `_ - `link `_ - - `download `_ - * - resnet_v1_18 + - `download `_ + * - resnet_v1_18 - 71.26 - 71.06 - 915 @@ -283,8 +283,8 @@ ImageNet - 3.64 - `download `_ - `link `_ - - `download `_ - * - resnet_v1_34 + - `download `_ + * - resnet_v1_34 - 72.7 - 72.14 - 131 @@ -294,8 +294,8 @@ ImageNet - 7.34 - `download `_ - `link `_ - - `download `_ - * - resnet_v1_50 |rocket| |star| + - `download `_ + * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 - 120 @@ -305,8 +305,8 @@ ImageNet - 6.98 - `download `_ - `link `_ - - `download `_ - * - resnext26_32x4d + - `download `_ + * - resnext26_32x4d - 76.18 - 75.78 - 194 @@ -316,8 +316,8 @@ ImageNet - 4.96 - `download `_ - `link `_ - - `download `_ - * - resnext50_32x4d + - `download `_ + * - resnext50_32x4d - 79.31 - 78.21 - 104 @@ -327,8 +327,8 @@ ImageNet - 8.48 - `download `_ - `link `_ - - `download `_ - * - squeezenet_v1.1 + - `download `_ + * - squeezenet_v1.1 - 59.85 - 59.4 - 1730 @@ -338,8 +338,8 @@ ImageNet - 0.78 - `download `_ - `link `_ - - `download `_ - * - vit_base_bn + - `download `_ + * - vit_base_bn - 79.98 - 78.58 - 29 @@ -349,8 +349,8 @@ ImageNet - 35.188 - `download `_ - `link `_ - - `download `_ - * - vit_small_bn + - `download `_ + * - vit_small_bn - 78.12 - 77.02 - 86 @@ -360,8 +360,8 @@ ImageNet - 8.62 - `download `_ - `link `_ - - `download `_ - * - vit_tiny_bn + - `download `_ + * - vit_tiny_bn - 68.95 - 67.15 - 126 diff --git a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst index a44650a0..99d0ac93 100644 --- a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Depth Estimation`_ - + .. _Depth Estimation: @@ -41,8 +41,8 @@ NYU - OPS (G) - Pretrained - Source - - Compiled - * - fast_depth |star| + - Compiled + * - fast_depth |star| - 0.6 - 0.62 - 299 @@ -52,8 +52,8 @@ NYU - 0.74 - `download `_ - `link `_ - - `download `_ - * - scdepthv3 + - `download `_ + * - scdepthv3 - 0.48 - 0.51 - 114 diff --git a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst index a2bbd86e..e1069ccb 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Attribute`_ - + .. _Face Attribute: @@ -41,8 +41,8 @@ CELEBA - OPS (G) - Pretrained - Source - - Compiled - * - face_attr_resnet_v1_18 + - Compiled + * - face_attr_resnet_v1_18 - 81.19 - 81.09 - 670 diff --git a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst index 16ad2f0e..c3d4b5b7 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Detection`_ - + .. _Face Detection: @@ -41,8 +41,8 @@ WiderFace - OPS (G) - Pretrained - Source - - Compiled - * - lightface_slim |star| + - Compiled + * - lightface_slim |star| - 39.7 - 39.22 - 1249 @@ -52,8 +52,8 @@ WiderFace - 0.16 - `download `_ - `link `_ - - `download `_ - * - retinaface_mobilenet_v1 |star| + - `download `_ + * - retinaface_mobilenet_v1 |star| - 81.27 - 81.17 - 45 @@ -63,8 +63,8 @@ WiderFace - 25.14 - `download `_ - `link `_ - - `download `_ - * - scrfd_10g + - `download `_ + * - scrfd_10g - 82.13 - 82.03 - 93 @@ -74,8 +74,8 @@ WiderFace - 26.74 - `download `_ - `link `_ - - `download `_ - * - scrfd_2.5g + - `download `_ + * - scrfd_2.5g - 76.59 - 76.32 - 195 @@ -85,8 +85,8 @@ WiderFace - 6.88 - `download `_ - `link `_ - - `download `_ - * - scrfd_500m + - `download `_ + * - scrfd_500m - 68.98 - 68.88 - 206 diff --git a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst index 1313f75f..cb14c23b 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Face Recognition`_ - + .. _Face Recognition: @@ -41,8 +41,8 @@ LFW - OPS (G) - Pretrained - Source - - Compiled - * - arcface_mobilefacenet + - Compiled + * - arcface_mobilefacenet - 99.43 - 99.41 - 328 @@ -52,8 +52,8 @@ LFW - 0.88 - `download `_ - `link `_ - - `download `_ - * - arcface_r50 + - `download `_ + * - arcface_r50 - 99.72 - 99.71 - 80 diff --git a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst index a1b187b4..6cc7e12a 100644 --- a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Facial Landmark Detection`_ - + .. _Facial Landmark Detection: @@ -41,8 +41,8 @@ AFLW2k3d - OPS (G) - Pretrained - Source - - Compiled - * - tddfa_mobilenet_v1 |star| + - Compiled + * - tddfa_mobilenet_v1 |star| - 3.68 - 4.05 - 5401 diff --git a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst index e2aa3fe0..60885d12 100644 --- a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Hand Landmark detection`_ - + .. _Hand Landmark detection: @@ -26,7 +26,7 @@ Hand Landmark detection Hand Landmark ^^^^^^^^^^^^^ - + .. list-table:: :header-rows: 1 @@ -38,8 +38,8 @@ Hand Landmark - OPS (G) - Pretrained - Source - - Compiled - * - hand_landmark_lite + - Compiled + * - hand_landmark_lite - 292 - 980 - 224x224x3 diff --git a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst index 0cc48160..05665c8e 100644 --- a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst +++ b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Image Denoising`_ - + .. _Image Denoising: @@ -41,8 +41,8 @@ BSD68 - OPS (G) - Pretrained - Source - - Compiled - * - dncnn3 + - Compiled + * - dncnn3 - 31.46 - 31.26 - 29 @@ -71,8 +71,8 @@ CBSD68 - OPS (G) - Pretrained - Source - - Compiled - * - dncnn_color_blind + - Compiled + * - dncnn_color_blind - 33.87 - 32.97 - 29 diff --git a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst index b77661fb..5095a396 100644 --- a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Instance Segmentation`_ - + .. _Instance Segmentation: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - yolact_regnetx_1.6gf + - Compiled + * - yolact_regnetx_1.6gf - 27.57 - 27.27 - 31 @@ -52,8 +52,8 @@ COCO - 125.34 - `download `_ - `link `_ - - `download `_ - * - yolact_regnetx_800mf + - `download `_ + * - yolact_regnetx_800mf - 25.61 - 25.5 - 33 @@ -63,8 +63,8 @@ COCO - 116.75 - `download `_ - `link `_ - - `download `_ - * - yolov5l_seg + - `download `_ + * - yolov5l_seg - 39.78 - 39.09 - 18 @@ -74,8 +74,8 @@ COCO - 147.88 - `download `_ - `link `_ - - `download `_ - * - yolov5m_seg + - `download `_ + * - yolov5m_seg - 37.05 - 36.32 - 40 @@ -85,8 +85,8 @@ COCO - 70.94 - `download `_ - `link `_ - - `download `_ - * - yolov5n_seg |star| + - `download `_ + * - yolov5n_seg |star| - 23.35 - 22.75 - 122 @@ -96,8 +96,8 @@ COCO - 7.1 - `download `_ - `link `_ - - `download `_ - * - yolov5s_seg + - `download `_ + * - yolov5s_seg - 31.57 - 30.8 - 77 @@ -107,8 +107,8 @@ COCO - 26.42 - `download `_ - `link `_ - - `download `_ - * - yolov8m_seg + - `download `_ + * - yolov8m_seg - 40.6 - 39.85 - 29 @@ -118,8 +118,8 @@ COCO - 110.2 - `download `_ - `link `_ - - `download `_ - * - yolov8n_seg + - `download `_ + * - yolov8n_seg - 30.32 - 29.68 - 119 @@ -129,8 +129,8 @@ COCO - 12.04 - `download `_ - `link `_ - - `download `_ - * - yolov8s_seg + - `download `_ + * - yolov8s_seg - 36.63 - 36.13 - 60 diff --git a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst index 3a6352b0..6f10e8ec 100644 --- a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst +++ b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Low Light Enhancement`_ - + .. _Low Light Enhancement: @@ -41,8 +41,8 @@ LOL - OPS (G) - Pretrained - Source - - Compiled - * - zero_dce + - Compiled + * - zero_dce - 16.23 - 16.24 - 69 @@ -52,8 +52,8 @@ LOL - 38.2 - `download `_ - `link `_ - - `download `_ - * - zero_dce_pp + - `download `_ + * - zero_dce_pp - 15.95 - 15.82 - 34 diff --git a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst index ef5828e0..514b3a97 100644 --- a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Object Detection`_ - + .. _Object Detection: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - centernet_resnet_v1_18_postprocess + - Compiled + * - centernet_resnet_v1_18_postprocess - 26.3 - 23.31 - 85 @@ -52,8 +52,8 @@ COCO - 31.21 - `download `_ - `link `_ - - `download `_ - * - centernet_resnet_v1_50_postprocess + - `download `_ + * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 - 52 @@ -63,8 +63,8 @@ COCO - 56.92 - `download `_ - `link `_ - - `download `_ - * - damoyolo_tinynasL20_T + - `download `_ + * - damoyolo_tinynasL20_T - 42.8 - 42.3 - 92 @@ -74,8 +74,8 @@ COCO - 18.02 - `download `_ - `link `_ - - `download `_ - * - damoyolo_tinynasL25_S + - `download `_ + * - damoyolo_tinynasL25_S - 46.53 - 45.34 - 80 @@ -85,8 +85,8 @@ COCO - 37.64 - `download `_ - `link `_ - - `download `_ - * - damoyolo_tinynasL35_M + - `download `_ + * - damoyolo_tinynasL35_M - 49.7 - 47.7 - 36 @@ -96,8 +96,8 @@ COCO - 61.64 - `download `_ - `link `_ - - `download `_ - * - detr_resnet_v1_18_bn + - `download `_ + * - detr_resnet_v1_18_bn - 33.91 - 30.91 - 15 @@ -107,8 +107,8 @@ COCO - 61.87 - `download `_ - `link `_ - - `download `_ - * - efficientdet_lite0 + - `download `_ + * - efficientdet_lite0 - 27.32 - 26.49 - 71 @@ -118,8 +118,8 @@ COCO - 1.94 - `download `_ - `link `_ - - `download `_ - * - efficientdet_lite1 + - `download `_ + * - efficientdet_lite1 - 32.27 - 31.72 - 44 @@ -129,8 +129,8 @@ COCO - 4 - `download `_ - `link `_ - - `download `_ - * - efficientdet_lite2 + - `download `_ + * - efficientdet_lite2 - 35.95 - 34.67 - 25 @@ -140,8 +140,8 @@ COCO - 6.84 - `download `_ - `link `_ - - `download `_ - * - nanodet_repvgg |star| + - `download `_ + * - nanodet_repvgg |star| - 29.3 - 28.53 - 176 @@ -151,8 +151,8 @@ COCO - 11.28 - `download `_ - `link `_ - - `download `_ - * - nanodet_repvgg_a12 + - `download `_ + * - nanodet_repvgg_a12 - 33.73 - 32.13 - 108 @@ -162,8 +162,8 @@ COCO - 28.23 - `download `_ - `link `_ - - `download `_ - * - nanodet_repvgg_a1_640 + - `download `_ + * - nanodet_repvgg_a1_640 - 33.28 - 32.88 - 82 @@ -173,8 +173,8 @@ COCO - 42.8 - `download `_ - `link `_ - - `download `_ - * - ssd_mobilenet_v1 |rocket| |star| + - `download `_ + * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 - 147 @@ -184,8 +184,8 @@ COCO - 2.5 - `download `_ - `link `_ - - `download `_ - * - ssd_mobilenet_v2 + - `download `_ + * - ssd_mobilenet_v2 - 24.15 - 22.95 - 97 @@ -195,8 +195,8 @@ COCO - 1.52 - `download `_ - `link `_ - - `download `_ - * - tiny_yolov3 |rocket| + - `download `_ + * - tiny_yolov3 |rocket| - 14.66 - 14.41 - 623 @@ -206,8 +206,8 @@ COCO - 5.58 - `download `_ - `link `_ - - `download `_ - * - tiny_yolov4 + - `download `_ + * - tiny_yolov4 - 19.18 - 17.73 - 474 @@ -217,8 +217,8 @@ COCO - 6.92 - `download `_ - `link `_ - - `download `_ - * - yolov3 |star| + - `download `_ + * - yolov3 |star| - 38.42 - 38.37 - 15 @@ -228,8 +228,8 @@ COCO - 158.10 - `download `_ - `link `_ - - `download `_ - * - yolov3_416 + - `download `_ + * - yolov3_416 - 37.73 - 37.53 - 25 @@ -239,8 +239,8 @@ COCO - 65.94 - `download `_ - `link `_ - - `download `_ - * - yolov3_gluon |star| + - `download `_ + * - yolov3_gluon |star| - 37.28 - 35.64 - 14 @@ -250,8 +250,8 @@ COCO - 158.1 - `download `_ - `link `_ - - `download `_ - * - yolov3_gluon_416 |star| + - `download `_ + * - yolov3_gluon_416 |star| - 36.27 - 34.92 - 25 @@ -261,8 +261,8 @@ COCO - 65.94 - `download `_ - `link `_ - - `download `_ - * - yolov4_leaky |star| + - `download `_ + * - yolov4_leaky |star| - 42.37 - 41.08 - 24 @@ -272,8 +272,8 @@ COCO - 91.04 - `download `_ - `link `_ - - `download `_ - * - yolov5m + - `download `_ + * - yolov5m - 42.59 - 41.19 - 46 @@ -283,8 +283,8 @@ COCO - 52.17 - `download `_ - `link `_ - - `download `_ - * - yolov5m6_6.1 + - `download `_ + * - yolov5m6_6.1 - 50.67 - 48.97 - 14 @@ -294,8 +294,8 @@ COCO - 200.04 - `download `_ - `link `_ - - `download `_ - * - yolov5m_6.1 + - `download `_ + * - yolov5m_6.1 - 44.8 - 43.36 - 54 @@ -305,8 +305,8 @@ COCO - 48.96 - `download `_ - `link `_ - - `download `_ - * - yolov5m_wo_spp + - `download `_ + * - yolov5m_wo_spp - 43.06 - 41.06 - 50.931 @@ -316,8 +316,8 @@ COCO - 52.88 - `download `_ - `link `_ - - `download `_ - * - yolov5s |star| + - `download `_ + * - yolov5s |star| - 35.33 - 33.98 - 93 @@ -327,8 +327,8 @@ COCO - 17.44 - `download `_ - `link `_ - - `download `_ - * - yolov5s_c3tr + - `download `_ + * - yolov5s_c3tr - 37.13 - 35.63 - 84 @@ -338,8 +338,8 @@ COCO - 17.02 - `download `_ - `link `_ - - `download `_ - * - yolov5xs_wo_spp + - `download `_ + * - yolov5xs_wo_spp - 33.18 - 32.2 - 131 @@ -349,8 +349,8 @@ COCO - 11.36 - `download `_ - `link `_ - - `download `_ - * - yolov5xs_wo_spp_nms_core + - `download `_ + * - yolov5xs_wo_spp_nms_core - 32.57 - 30.86 - 130 @@ -360,8 +360,8 @@ COCO - 11.36 - `download `_ - `link `_ - - `download `_ - * - yolov6n + - `download `_ + * - yolov6n - 34.28 - 32.28 - 162 @@ -371,8 +371,8 @@ COCO - 11.12 - `download `_ - `link `_ - - `download `_ - * - yolov6n_0.2.1 + - `download `_ + * - yolov6n_0.2.1 - 35.16 - 33.87 - 158 @@ -382,8 +382,8 @@ COCO - 11.06 - `download `_ - `link `_ - - `download `_ - * - yolov7 + - `download `_ + * - yolov7 - 50.59 - 47.89 - 24 @@ -393,8 +393,8 @@ COCO - 104.51 - `download `_ - `link `_ - - `download `_ - * - yolov7_tiny + - `download `_ + * - yolov7_tiny - 37.07 - 36.07 - 121 @@ -404,8 +404,8 @@ COCO - 13.74 - `download `_ - `link `_ - - `download `_ - * - yolov7e6 + - `download `_ + * - yolov7e6 - 55.37 - 53.47 - 4 @@ -415,8 +415,8 @@ COCO - 515.12 - `download `_ - `link `_ - - `download `_ - * - yolov8l + - `download `_ + * - yolov8l - 52.44 - 51.78 - 19 @@ -426,8 +426,8 @@ COCO - 165.3 - `download `_ - `link `_ - - `download `_ - * - yolov8m + - `download `_ + * - yolov8m - 49.91 - 49.11 - 38 @@ -437,8 +437,8 @@ COCO - 78.93 - `download `_ - `link `_ - - `download `_ - * - yolov8n + - `download `_ + * - yolov8n - 37.02 - 36.32 - 144 @@ -448,8 +448,8 @@ COCO - 8.74 - `download `_ - `link `_ - - `download `_ - * - yolov8s + - `download `_ + * - yolov8s - 44.58 - 43.98 - 87 @@ -459,8 +459,8 @@ COCO - 28.6 - `download `_ - `link `_ - - `download `_ - * - yolov8x + - `download `_ + * - yolov8x - 53.45 - 52.75 - 10 @@ -470,8 +470,8 @@ COCO - 258 - `download `_ - `link `_ - - `download `_ - * - yolov9c + - `download `_ + * - yolov9c - 52.8 - 50.7 - None @@ -481,8 +481,8 @@ COCO - 102.1 - `download `_ - `link `_ - - `download `_ - * - yolox_l_leaky |star| + - `download `_ + * - yolox_l_leaky |star| - 48.69 - 46.59 - 19 @@ -492,8 +492,8 @@ COCO - 155.3 - `download `_ - `link `_ - - `download `_ - * - yolox_s_leaky + - `download `_ + * - yolox_s_leaky - 38.12 - 37.27 - 79 @@ -503,8 +503,8 @@ COCO - 26.74 - `download `_ - `link `_ - - `download `_ - * - yolox_s_wide_leaky + - `download `_ + * - yolox_s_wide_leaky - 42.4 - 40.97 - 48 @@ -514,8 +514,8 @@ COCO - 59.46 - `download `_ - `link `_ - - `download `_ - * - yolox_tiny + - `download `_ + * - yolox_tiny - 32.64 - 31.39 - 152 @@ -544,8 +544,8 @@ VisDrone - OPS (G) - Pretrained - Source - - Compiled - * - ssd_mobilenet_v1_visdrone |star| + - Compiled + * - ssd_mobilenet_v1_visdrone |star| - 2.37 - 2.22 - 200 diff --git a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst index 6d95e49b..4d890e97 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Attribute`_ - + .. _Person Attribute: @@ -41,8 +41,8 @@ PETA - OPS (G) - Pretrained - Source - - Compiled - * - person_attr_resnet_v1_18 + - Compiled + * - person_attr_resnet_v1_18 - 82.5 - 82.61 - 1062 diff --git a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst index 4197365b..1ef85181 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Person Re-ID`_ - + .. _Person Re-ID: @@ -41,8 +41,8 @@ Market1501 - OPS (G) - Pretrained - Source - - Compiled - * - osnet_x1_0 + - Compiled + * - osnet_x1_0 - 94.43 - 93.63 - 107 @@ -52,8 +52,8 @@ Market1501 - 1.98 - `download `_ - `link `_ - - `download `_ - * - repvgg_a0_person_reid_512 |star| + - `download `_ + * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 - 3526 diff --git a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst index c0b6b65f..da4875f9 100644 --- a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Pose Estimation`_ - + .. _Pose Estimation: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - centerpose_regnetx_1.6gf_fpn |star| + - Compiled + * - centerpose_regnetx_1.6gf_fpn |star| - 53.54 - 53.53 - 42 @@ -52,8 +52,8 @@ COCO - 64.58 - `download `_ - `link `_ - - `download `_ - * - centerpose_regnetx_800mf + - `download `_ + * - centerpose_regnetx_800mf - 44.07 - 43.07 - 67 @@ -63,8 +63,8 @@ COCO - 86.12 - `download `_ - `link `_ - - `download `_ - * - centerpose_repvgg_a0 |star| + - `download `_ + * - centerpose_repvgg_a0 |star| - 39.17 - 37.17 - 96 @@ -74,8 +74,8 @@ COCO - 28.27 - `download `_ - `link `_ - - `download `_ - * - yolov8m_pose + - `download `_ + * - yolov8m_pose - 64.26 - 61.66 - 36 @@ -85,8 +85,8 @@ COCO - 81.02 - `download `_ - `link `_ - - `download `_ - * - yolov8s_pose + - `download `_ + * - yolov8s_pose - 59.2 - 55.6 - 82 diff --git a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst index 867e5a9d..8e3711a1 100644 --- a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Semantic Segmentation`_ - + .. _Semantic Segmentation: @@ -41,8 +41,8 @@ Cityscapes - OPS (G) - Pretrained - Source - - Compiled - * - fcn8_resnet_v1_18 |star| + - Compiled + * - fcn8_resnet_v1_18 |star| - 69.41 - 69.21 - 15 @@ -52,8 +52,8 @@ Cityscapes - 142.82 - `download `_ - `link `_ - - `download `_ - * - segformer_b0_bn + - `download `_ + * - segformer_b0_bn - 69.81 - 68.01 - None @@ -63,8 +63,8 @@ Cityscapes - 35.76 - `download `_ - `link `_ - - `download `_ - * - stdc1 + - `download `_ + * - stdc1 - 74.57 - 73.92 - 13 @@ -93,8 +93,8 @@ Oxford-IIIT Pet - OPS (G) - Pretrained - Source - - Compiled - * - unet_mobilenet_v2 + - Compiled + * - unet_mobilenet_v2 - 77.32 - 77.02 - 100 @@ -123,8 +123,8 @@ Pascal VOC - OPS (G) - Pretrained - Source - - Compiled - * - deeplab_v3_mobilenet_v2 |rocket| + - Compiled + * - deeplab_v3_mobilenet_v2 |rocket| - 76.05 - 74.8 - 35 @@ -134,8 +134,8 @@ Pascal VOC - 17.65 - `download `_ - `link `_ - - `download `_ - * - deeplab_v3_mobilenet_v2_wo_dilation + - `download `_ + * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 - 59 diff --git a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst index dd2da21f..ddf2d3ef 100644 --- a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Single Person Pose Estimation`_ - + .. _Single Person Pose Estimation: @@ -41,8 +41,8 @@ COCO - OPS (G) - Pretrained - Source - - Compiled - * - mspn_regnetx_800mf |star| + - Compiled + * - mspn_regnetx_800mf |star| - 70.8 - 70.3 - 173 @@ -52,8 +52,8 @@ COCO - 2.94 - `download `_ - `link `_ - - `download `_ - * - vit_pose_small_bn + - `download `_ + * - vit_pose_small_bn - 72.01 - 70.81 - 52 diff --git a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst index eff62cfc..2784be43 100644 --- a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Stereo Depth Estimation`_ - + .. _Stereo Depth Estimation: @@ -41,8 +41,8 @@ N/A - OPS (G) - Pretrained - Source - - Compiled - * - stereonet + - Compiled + * - stereonet - 91.79 - 89.14 - None diff --git a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst index 698ef27a..fe5c5563 100644 --- a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst +++ b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Super Resolution`_ - + .. _Super Resolution: @@ -41,8 +41,8 @@ BSD100 - OPS (G) - Pretrained - Source - - Compiled - * - espcn_x2 + - Compiled + * - espcn_x2 - 31.4 - 30.3 - 1164 @@ -52,8 +52,8 @@ BSD100 - 1.6 - `download `_ - `link `_ - - `download `_ - * - espcn_x3 + - `download `_ + * - espcn_x3 - 28.41 - 28.06 - 2217 @@ -63,8 +63,8 @@ BSD100 - 0.76 - `download `_ - `link `_ - - `download `_ - * - espcn_x4 + - `download `_ + * - espcn_x4 - 26.83 - 26.58 - 2189 diff --git a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst index bcc6283d..2350c833 100644 --- a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: images/rocket.png +.. |rocket| image:: docs/images/rocket.png :width: 18 -.. |star| image:: images/star.png +.. |star| image:: docs/images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. @@ -17,7 +17,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Supported tasks: * `Zero-shot Classification`_ - + .. _Zero-shot Classification: @@ -41,8 +41,8 @@ CIFAR100 - OPS (G) - Pretrained - Source - - Compiled - * - clip_resnet_50 + - Compiled + * - clip_resnet_50 - 42.07 - 38.57 - 66 From f1761c51579216db3a4bba8e26c9437ef7282ebe Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 10:41:29 +0300 Subject: [PATCH 03/17] Update README.rst --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index cd2ec700..b32948a5 100644 --- a/README.rst +++ b/README.rst @@ -57,13 +57,13 @@ The models are divided to: * Public models - which were trained on publicly available datasets. - * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-15M - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-15M - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ From aaa28e2ee487637ec312d836be55688d49a2f0c8 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 10:56:59 +0300 Subject: [PATCH 04/17] Update PUBLIC_MODELS.rst --- docs/PUBLIC_MODELS.rst | 142 ++++++++++++++++++++--------------------- 1 file changed, 71 insertions(+), 71 deletions(-) diff --git a/docs/PUBLIC_MODELS.rst b/docs/PUBLIC_MODELS.rst index 58d32f56..fbd8e8f6 100644 --- a/docs/PUBLIC_MODELS.rst +++ b/docs/PUBLIC_MODELS.rst @@ -10,97 +10,97 @@ Hailo provides different pre-trained models in ONNX / TF formats and pre-compile - Hailo-15H - Hailo-15M * - Classification - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Object Detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Semantic Segmentation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Pose Estimation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Single Person Pose Estimation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Face Detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Instance Segmentation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Depth Estimation - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Facial Landmark Detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Person Re-ID - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Super Resolution - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Face Recognition - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Person Attribute - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Face Attribute - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Zero-shot Classification - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ - NA - NA * - Stereo Depth Estimation - - `Link `_ + - `Link `_ - NA - NA - NA * - Low Light Enhancement - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Image Denoising - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ * - Hand Landmark detection - - `Link `_ - - `Link `_ - - `Link `_ - - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ + - `Link `_ From 0e31ab04b104d4dd6cf4971f01f3d4380d332c27 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 11:01:39 +0300 Subject: [PATCH 05/17] Update README.rst --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index b32948a5..1e853f03 100644 --- a/README.rst +++ b/README.rst @@ -57,13 +57,13 @@ The models are divided to: * Public models - which were trained on publicly available datasets. - * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-15M - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-15M - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ From 2a7e742f4a2ef03a4de6bb9c733e6f340fd6e697 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 11:28:06 +0300 Subject: [PATCH 06/17] Rename HAILO8l_classificaion.rst to HAILO8l_classification.rst --- .../{HAILO8l_classificaion.rst => HAILO8l_classification.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/public_models/HAILO8L/{HAILO8l_classificaion.rst => HAILO8l_classification.rst} (100%) diff --git a/docs/public_models/HAILO8L/HAILO8l_classificaion.rst b/docs/public_models/HAILO8L/HAILO8l_classification.rst similarity index 100% rename from docs/public_models/HAILO8L/HAILO8l_classificaion.rst rename to docs/public_models/HAILO8L/HAILO8l_classification.rst From c8a086a34f57e29bfc303e22b2106e4e6a4967d0 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 11:29:54 +0300 Subject: [PATCH 07/17] Update README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 1e853f03..b0d0205f 100644 --- a/README.rst +++ b/README.rst @@ -59,7 +59,7 @@ The models are divided to: * For Hailo-8 - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ - * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ + * For Hailo-8L - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ * For Hailo-15H - `Classification `_, `Object Detection `_, `Segmentation `_, `other tasks `_ From 2154f6d4afb78173093afac2995ece061cc196e6 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 11:35:55 +0300 Subject: [PATCH 08/17] Update HAILO8_classification.rst --- docs/public_models/HAILO8/HAILO8_classification.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/public_models/HAILO8/HAILO8_classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst index d2082ae2..afdcb318 100644 --- a/docs/public_models/HAILO8/HAILO8_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. From 12286eaac1116baf3cec69386f46521d8fe4d42a Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Sun, 7 Apr 2024 11:41:27 +0300 Subject: [PATCH 09/17] fix rocket and star --- docs/public_models/HAILO15H/HAILO15H_classification.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_face_attribute.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_face_detection.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_face_recognition.rst | 4 ++-- .../HAILO15H/HAILO15H_facial_landmark_detection.rst | 4 ++-- .../HAILO15H/HAILO15H_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_image_denoising.rst | 4 ++-- .../public_models/HAILO15H/HAILO15H_instance_segmentation.rst | 4 ++-- .../public_models/HAILO15H/HAILO15H_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_object_detection.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_person_attribute.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_person_re_id.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst | 4 ++-- .../public_models/HAILO15H/HAILO15H_semantic_segmentation.rst | 4 ++-- .../HAILO15H/HAILO15H_single_person_pose_estimation.rst | 4 ++-- .../HAILO15H/HAILO15H_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_super_resolution.rst | 4 ++-- .../HAILO15H/HAILO15H_zero_shot_classification.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_classification.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_face_attribute.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_face_detection.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_face_recognition.rst | 4 ++-- .../HAILO15M/HAILO15M_facial_landmark_detection.rst | 4 ++-- .../HAILO15M/HAILO15M_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_image_denoising.rst | 4 ++-- .../public_models/HAILO15M/HAILO15M_instance_segmentation.rst | 4 ++-- .../public_models/HAILO15M/HAILO15M_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_object_detection.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_person_attribute.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_person_re_id.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst | 4 ++-- .../public_models/HAILO15M/HAILO15M_semantic_segmentation.rst | 4 ++-- .../HAILO15M/HAILO15M_single_person_pose_estimation.rst | 4 ++-- .../HAILO15M/HAILO15M_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_super_resolution.rst | 4 ++-- .../HAILO15M/HAILO15M_zero_shot_classification.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_classification.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_face_attribute.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_face_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_face_recognition.rst | 4 ++-- .../public_models/HAILO8/HAILO8_facial_landmark_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_image_denoising.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_instance_segmentation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_object_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_person_attribute.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_person_re_id.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_pose_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst | 4 ++-- .../HAILO8/HAILO8_single_person_pose_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_super_resolution.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_classificaion.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_face_attribute.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_face_detection.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_face_recognition.rst | 4 ++-- .../HAILO8L/HAILO8l_facial_landmark_detection.rst | 4 ++-- .../public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_image_denoising.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_object_detection.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_person_attribute.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_person_re_id.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst | 4 ++-- .../HAILO8L/HAILO8l_single_person_pose_estimation.rst | 4 ++-- .../public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_super_resolution.rst | 4 ++-- .../HAILO8L/HAILO8l_zero_shot_classification.rst | 4 ++-- 76 files changed, 152 insertions(+), 152 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_classification.rst b/docs/public_models/HAILO15H/HAILO15H_classification.rst index 71f7406c..7daf2a2e 100644 --- a/docs/public_models/HAILO15H/HAILO15H_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst index 544d33d9..259196e9 100644 --- a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst index ec7b8429..301f3f09 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst index 13b564b5..eb886f49 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst index 638f17de..5ae1e482 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst index 55af00b7..c9ec9011 100644 --- a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst index 7e259035..eef4bfed 100644 --- a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 1f674b49..062aef06 100644 --- a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index b94e162c..33d0c1c8 100644 --- a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index a0fd4716..058fafd6 100644 --- a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index e0d7d3b8..a3723cde 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst index ae355da8..fbdc040b 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index cf6129ae..c4bda3ef 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst index f80ba637..5714e566 100644 --- a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index 4c9b93d9..14b75862 100644 --- a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index b0e79ba4..b995f191 100644 --- a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst index 22731c28..753c46fb 100644 --- a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index d84eaa28..dd7a096e 100644 --- a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst index f9e5c537..54886a20 100644 --- a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst index 2226bb9d..36305a44 100644 --- a/docs/public_models/HAILO15M/HAILO15M_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index 8e809d89..8eab643d 100644 --- a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst index cb53da27..48f95850 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst index a9beceb5..db03510d 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index 1cb25701..5e7cc228 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst index 0cee9595..6480dfd5 100644 --- a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst index f1709dae..b592f2b9 100644 --- a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index 4e27717d..fe537272 100644 --- a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index baafa56b..a8c7439c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index a0fb9ec4..6c117b70 100644 --- a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index 54fa9cd0..7c64242e 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst index 38ecf28c..997437d7 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst index 0ac6e844..856ce8d3 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst index 31b336c7..6f835b35 100644 --- a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index cf821320..97a67634 100644 --- a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index 909257db..c6f55457 100644 --- a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst index d715b47d..e00999bc 100644 --- a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index 6c11406f..cb240f87 100644 --- a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst index 02cf3ec8..d9a75898 100644 --- a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst index d2082ae2..afdcb318 100644 --- a/docs/public_models/HAILO8/HAILO8_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst index 189787bb..6482ac96 100644 --- a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_face_attribute.rst b/docs/public_models/HAILO8/HAILO8_face_attribute.rst index fa935dba..3b6e2c87 100644 --- a/docs/public_models/HAILO8/HAILO8_face_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_face_detection.rst b/docs/public_models/HAILO8/HAILO8_face_detection.rst index c1fd8c0f..8fcd7de3 100644 --- a/docs/public_models/HAILO8/HAILO8_face_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_face_recognition.rst b/docs/public_models/HAILO8/HAILO8_face_recognition.rst index c2e097be..eaa842e0 100644 --- a/docs/public_models/HAILO8/HAILO8_face_recognition.rst +++ b/docs/public_models/HAILO8/HAILO8_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst index 3bed33b0..84de8044 100644 --- a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst index dfd91d0a..6b892c37 100644 --- a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_image_denoising.rst b/docs/public_models/HAILO8/HAILO8_image_denoising.rst index 225caec1..beffcbef 100644 --- a/docs/public_models/HAILO8/HAILO8_image_denoising.rst +++ b/docs/public_models/HAILO8/HAILO8_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst index eaf9ae64..858c36e4 100644 --- a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst index a4ff41ed..196cbe1f 100644 --- a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst +++ b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_object_detection.rst b/docs/public_models/HAILO8/HAILO8_object_detection.rst index 6e693ac1..321a01b9 100644 --- a/docs/public_models/HAILO8/HAILO8_object_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_person_attribute.rst b/docs/public_models/HAILO8/HAILO8_person_attribute.rst index 890eb665..2d2e06a2 100644 --- a/docs/public_models/HAILO8/HAILO8_person_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_person_re_id.rst b/docs/public_models/HAILO8/HAILO8_person_re_id.rst index 8f1e6b0b..6ccd61fc 100644 --- a/docs/public_models/HAILO8/HAILO8_person_re_id.rst +++ b/docs/public_models/HAILO8/HAILO8_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst index dde6509d..0aad77f4 100644 --- a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index 9d75a379..354d962f 100644 --- a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst index fa1a34aa..95e6f964 100644 --- a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst index 5404dd73..6abe7de5 100644 --- a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_super_resolution.rst b/docs/public_models/HAILO8/HAILO8_super_resolution.rst index 3e46c7f1..b7cdbdf8 100644 --- a/docs/public_models/HAILO8/HAILO8_super_resolution.rst +++ b/docs/public_models/HAILO8/HAILO8_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst index f749d545..81285500 100644 --- a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_classificaion.rst b/docs/public_models/HAILO8L/HAILO8l_classificaion.rst index 066359e4..b330d4c7 100644 --- a/docs/public_models/HAILO8L/HAILO8l_classificaion.rst +++ b/docs/public_models/HAILO8L/HAILO8l_classificaion.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst index 99d0ac93..8d2849a6 100644 --- a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst index e1069ccb..4aeef03b 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst index c3d4b5b7..1f0c3bbd 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst index cb14c23b..749fa5b8 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst index 6cc7e12a..0b118c8c 100644 --- a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst index 60885d12..1023aa19 100644 --- a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst index 05665c8e..fd46c1d8 100644 --- a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst +++ b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst index 5095a396..96ac3ecd 100644 --- a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst index 6f10e8ec..65dd732b 100644 --- a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst +++ b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst index 514b3a97..c89e92b6 100644 --- a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst index 4d890e97..2b463199 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst index 1ef85181..2c0f3374 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst index da4875f9..d03d4d07 100644 --- a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst index 8e3711a1..5fb28496 100644 --- a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst index ddf2d3ef..e05b849b 100644 --- a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst index 2784be43..28581831 100644 --- a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst index fe5c5563..1c14410a 100644 --- a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst +++ b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. diff --git a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst index 2350c833..245eddd8 100644 --- a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst @@ -2,10 +2,10 @@ Public Pre-Trained Models ========================= -.. |rocket| image:: docs/images/rocket.png +.. |rocket| image:: ../../images/rocket.png :width: 18 -.. |star| image:: docs/images/star.png +.. |star| image:: ../../images/star.png :width: 18 Here, we give the full list of publicly pre-trained models supported by the Hailo Model Zoo. From 18b5286174b95a094f0128ecb67f488824c54ee4 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 12:38:51 +0300 Subject: [PATCH 10/17] Update README.rst --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index b0d0205f..7863073d 100644 --- a/README.rst +++ b/README.rst @@ -22,14 +22,14 @@ Hailo Model Zoo :height: 20 -.. |compiler| image:: https://img.shields.io/badge/Hailo%20Dataflow%20Compiler-3.26.0-brightgreen.svg +.. |compiler| image:: https://img.shields.io/badge/Hailo%20Dataflow%20Compiler-3.27.0-brightgreen.svg :target: https://hailo.ai/company-overview/contact-us/ :alt: Hailo Dataflow Compiler :width: 180 :height: 20 -.. |runtime| image:: https://img.shields.io/badge/HailoRT%20(optional)-4.16.0-brightgreen.svg +.. |runtime| image:: https://img.shields.io/badge/HailoRT%20(optional)-4.17.0-brightgreen.svg :target: https://hailo.ai/company-overview/contact-us/ :alt: HailoRT :width: 170 From a022c227dd2a0d55aa8df979f93c76e4511cd901 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Sun, 7 Apr 2024 14:47:48 +0300 Subject: [PATCH 11/17] Update PUBLIC_MODELS.rst --- docs/PUBLIC_MODELS.rst | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/docs/PUBLIC_MODELS.rst b/docs/PUBLIC_MODELS.rst index fbd8e8f6..ba7cde32 100644 --- a/docs/PUBLIC_MODELS.rst +++ b/docs/PUBLIC_MODELS.rst @@ -11,77 +11,77 @@ Hailo provides different pre-trained models in ONNX / TF formats and pre-compile - Hailo-15M * - Classification - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Object Detection - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Semantic Segmentation - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Pose Estimation - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Single Person Pose Estimation - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Face Detection - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Instance Segmentation - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Depth Estimation - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Facial Landmark Detection - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Person Re-ID - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Super Resolution - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Face Recognition - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Person Attribute - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Face Attribute - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Zero-shot Classification - `Link `_ - - `Link `_ + - `Link `_ - NA - NA * - Stereo Depth Estimation @@ -91,16 +91,16 @@ Hailo provides different pre-trained models in ONNX / TF formats and pre-compile - NA * - Low Light Enhancement - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Image Denoising - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ * - Hand Landmark detection - `Link `_ - - `Link `_ + - `Link `_ - `Link `_ - `Link `_ From 27aa3127dcc1c3aec4114e3b53c79bb9375dc9e3 Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Sun, 7 Apr 2024 15:06:18 +0300 Subject: [PATCH 12/17] fix nv12 link --- .../HAILO15H/HAILO15H_classification.rst | 60 ++++++------- .../HAILO15H/HAILO15H_depth_estimation.rst | 4 +- .../HAILO15H/HAILO15H_face_detection.rst | 8 +- .../HAILO15H/HAILO15H_face_recognition.rst | 2 +- .../HAILO15H_hand_landmark_detection.rst | 2 +- .../HAILO15H/HAILO15H_image_denoising.rst | 4 +- .../HAILO15H_instance_segmentation.rst | 18 ++-- .../HAILO15H_low_light_enhancement.rst | 4 +- .../HAILO15H/HAILO15H_object_detection.rst | 88 +++++++++---------- .../HAILO15H/HAILO15H_person_re_id.rst | 4 +- .../HAILO15H/HAILO15H_pose_estimation.rst | 10 +-- .../HAILO15H_semantic_segmentation.rst | 12 +-- ...HAILO15H_single_person_pose_estimation.rst | 2 +- .../HAILO15H_stereo_depth_estimation.rst | 2 +- .../HAILO15H/HAILO15H_super_resolution.rst | 6 +- .../HAILO15H_zero_shot_classification.rst | 2 +- .../HAILO15M/HAILO15M_classification.rst | 60 ++++++------- .../HAILO15M/HAILO15M_depth_estimation.rst | 4 +- .../HAILO15M/HAILO15M_face_detection.rst | 8 +- .../HAILO15M/HAILO15M_face_recognition.rst | 2 +- .../HAILO15M_hand_landmark_detection.rst | 2 +- .../HAILO15M/HAILO15M_image_denoising.rst | 4 +- .../HAILO15M_instance_segmentation.rst | 18 ++-- .../HAILO15M_low_light_enhancement.rst | 4 +- .../HAILO15M/HAILO15M_object_detection.rst | 88 +++++++++---------- .../HAILO15M/HAILO15M_person_re_id.rst | 4 +- .../HAILO15M/HAILO15M_pose_estimation.rst | 10 +-- .../HAILO15M_semantic_segmentation.rst | 12 +-- ...HAILO15M_single_person_pose_estimation.rst | 2 +- .../HAILO15M_stereo_depth_estimation.rst | 2 +- .../HAILO15M/HAILO15M_super_resolution.rst | 6 +- .../HAILO15M_zero_shot_classification.rst | 2 +- 32 files changed, 228 insertions(+), 228 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_classification.rst b/docs/public_models/HAILO15H/HAILO15H_classification.rst index 7daf2a2e..e85e60cd 100644 --- a/docs/public_models/HAILO15H/HAILO15H_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_classification.rst @@ -54,7 +54,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite0 - 74.99 - 73.81 @@ -66,7 +66,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite1 - 76.68 - 76.21 @@ -78,7 +78,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite2 - 77.45 - 76.74 @@ -90,7 +90,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite3 - 79.29 - 78.42 @@ -102,7 +102,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite4 - 80.79 - 79.99 @@ -114,7 +114,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_m - 78.91 - 78.63 @@ -126,7 +126,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_s - 77.64 - 77.32 @@ -138,7 +138,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - hardnet39ds - 73.43 - 72.92 @@ -150,7 +150,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - hardnet68 - 75.47 - 75.04 @@ -162,7 +162,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - inception_v1 - 69.74 - 69.54 @@ -174,7 +174,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v1 - 70.97 - 70.26 @@ -186,7 +186,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 @@ -198,7 +198,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v2_1.4 - 74.18 - 73.18 @@ -210,7 +210,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v3 - 72.21 - 71.73 @@ -222,7 +222,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v3_large_minimalistic - 72.11 - 70.61 @@ -234,7 +234,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - regnetx_1.6gf - 77.05 - 76.75 @@ -246,7 +246,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - regnetx_800mf - 75.16 - 74.84 @@ -258,7 +258,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - repvgg_a1 - 74.4 - 72.4 @@ -270,7 +270,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - repvgg_a2 - 76.52 - 74.52 @@ -282,7 +282,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resmlp12_relu - 75.26 - 74.32 @@ -294,7 +294,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnet_v1_18 - 71.26 - 71.06 @@ -306,7 +306,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnet_v1_34 - 72.7 - 72.14 @@ -318,7 +318,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 @@ -330,7 +330,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnext26_32x4d - 76.18 - 75.78 @@ -342,7 +342,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnext50_32x4d - 79.31 - 78.21 @@ -354,7 +354,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - squeezenet_v1.1 - 59.85 - 59.4 @@ -366,7 +366,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - vit_base_bn |rocket| - 79.98 - 78.58 @@ -378,7 +378,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - vit_small_bn - 78.12 - 77.02 @@ -390,7 +390,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - vit_tiny_bn - 68.95 - 67.15 @@ -402,4 +402,4 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst index 259196e9..a61ab81f 100644 --- a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst @@ -54,7 +54,7 @@ NYU - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scdepthv3 - 0.48 - 0.51 @@ -66,4 +66,4 @@ NYU - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst index eb886f49..df5079e0 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst @@ -66,7 +66,7 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scrfd_10g - 82.13 - 82.03 @@ -78,7 +78,7 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scrfd_2.5g - 76.59 - 76.32 @@ -90,7 +90,7 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scrfd_500m - 68.98 - 68.88 @@ -102,4 +102,4 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst index 5ae1e482..16434427 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst @@ -66,4 +66,4 @@ LFW - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst index eef4bfed..a21aa1de 100644 --- a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -49,4 +49,4 @@ Hand Landmark - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 062aef06..5621cc5b 100644 --- a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -54,7 +54,7 @@ BSD68 - `download `_ - `link `_ - `download `_ - - `download `_ + - None CBSD68 ^^^^^^ @@ -86,4 +86,4 @@ CBSD68 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index 33d0c1c8..c2f0f074 100644 --- a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -54,7 +54,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolact_regnetx_800mf - 25.61 - 25.5 @@ -66,7 +66,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5l_seg - 39.78 - 39.09 @@ -78,7 +78,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m_seg - 37.05 - 36.32 @@ -90,7 +90,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5n_seg |star| - 23.35 - 22.75 @@ -102,7 +102,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5s_seg - 31.57 - 30.8 @@ -114,7 +114,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8m_seg - 40.6 - 39.85 @@ -126,7 +126,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8n_seg - 30.32 - 29.68 @@ -138,7 +138,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8s_seg - 36.63 - 36.13 @@ -150,4 +150,4 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index 058fafd6..cda6a43e 100644 --- a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -54,7 +54,7 @@ LOL - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - zero_dce_pp - 15.95 - 15.82 @@ -66,4 +66,4 @@ LOL - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index a3723cde..169a3f92 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -54,7 +54,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 @@ -66,7 +66,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - damoyolo_tinynasL20_T - 42.8 - 42.3 @@ -78,7 +78,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - damoyolo_tinynasL25_S - 46.53 - 45.34 @@ -90,7 +90,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - damoyolo_tinynasL35_M - 49.7 - 47.7 @@ -102,7 +102,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - detr_resnet_v1_18_bn - 33.91 - 30.91 @@ -114,7 +114,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientdet_lite0 - 27.32 - 26.49 @@ -126,7 +126,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientdet_lite1 - 32.27 - 31.72 @@ -138,7 +138,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientdet_lite2 - 35.95 - 34.67 @@ -150,7 +150,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - nanodet_repvgg |star| - 29.3 - 28.53 @@ -162,7 +162,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - nanodet_repvgg_a12 - 33.73 - 32.13 @@ -174,7 +174,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - nanodet_repvgg_a1_640 - 33.28 - 32.88 @@ -186,7 +186,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 @@ -198,7 +198,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - ssd_mobilenet_v2 - 24.15 - 22.95 @@ -210,7 +210,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - tiny_yolov3 - 14.66 - 14.41 @@ -222,7 +222,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - tiny_yolov4 - 19.18 - 17.73 @@ -234,7 +234,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3 |star| - 38.42 - 38.37 @@ -246,7 +246,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3_416 - 37.73 - 37.53 @@ -258,7 +258,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3_gluon |star| - 37.28 - 35.64 @@ -270,7 +270,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3_gluon_416 |star| - 36.27 - 34.92 @@ -282,7 +282,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov4_leaky |star| - 42.37 - 41.08 @@ -294,7 +294,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m - 42.59 - 41.19 @@ -306,7 +306,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m6_6.1 - 50.67 - 48.97 @@ -318,7 +318,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m_6.1 - 44.8 - 43.36 @@ -330,7 +330,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m_wo_spp |rocket| - 43.06 - 41.06 @@ -354,7 +354,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5s_c3tr - 37.13 - 35.63 @@ -366,7 +366,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5xs_wo_spp - 33.18 - 32.2 @@ -378,7 +378,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5xs_wo_spp_nms_core - 32.57 - 30.86 @@ -390,7 +390,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov6n - 34.28 - 32.28 @@ -402,7 +402,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov6n_0.2.1 - 35.16 - 33.87 @@ -414,7 +414,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov7 - 50.59 - 47.89 @@ -426,7 +426,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov7_tiny - 37.07 - 36.07 @@ -438,7 +438,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov7e6 - 55.37 - 53.47 @@ -450,7 +450,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8l - 52.44 - 51.78 @@ -462,7 +462,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8m - 49.91 - 49.11 @@ -474,7 +474,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8n - 37.02 - 36.32 @@ -486,7 +486,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8s - 44.58 - 43.98 @@ -498,7 +498,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8x - 53.45 - 52.75 @@ -510,7 +510,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov9c - 52.8 - 50.7 @@ -522,7 +522,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_l_leaky |star| - 48.69 - 46.59 @@ -534,7 +534,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_s_leaky - 38.12 - 37.27 @@ -546,7 +546,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_s_wide_leaky - 42.4 - 40.97 @@ -558,7 +558,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_tiny - 32.64 - 31.39 @@ -570,7 +570,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None VisDrone ^^^^^^^^ @@ -602,4 +602,4 @@ VisDrone - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index c4bda3ef..ab9a3f69 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -54,7 +54,7 @@ Market1501 - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 @@ -66,4 +66,4 @@ Market1501 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst index 5714e566..24c39345 100644 --- a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -54,7 +54,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - centerpose_regnetx_800mf - 44.07 - 43.07 @@ -66,7 +66,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - centerpose_repvgg_a0 |star| - 39.17 - 37.17 @@ -78,7 +78,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8m_pose - 64.26 - 61.66 @@ -90,7 +90,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8s_pose - 59.2 - 55.6 @@ -102,4 +102,4 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index 14b75862..dc9c48f7 100644 --- a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -54,7 +54,7 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - segformer_b0_bn - 69.81 - 68.01 @@ -66,7 +66,7 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - stdc1 |rocket| - 74.57 - 73.92 @@ -78,7 +78,7 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ + - None Oxford-IIIT Pet ^^^^^^^^^^^^^^^ @@ -110,7 +110,7 @@ Oxford-IIIT Pet - `download `_ - `link `_ - `download `_ - - `download `_ + - None Pascal VOC ^^^^^^^^^^ @@ -142,7 +142,7 @@ Pascal VOC - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 @@ -154,4 +154,4 @@ Pascal VOC - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index b995f191..6e8bbc1a 100644 --- a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -66,4 +66,4 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst index 753c46fb..f0cde859 100644 --- a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -54,4 +54,4 @@ N/A - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index dd7a096e..26fc0774 100644 --- a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -54,7 +54,7 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - espcn_x3 - 28.41 - 28.06 @@ -66,7 +66,7 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - espcn_x4 - 26.83 - 26.58 @@ -78,4 +78,4 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst index 54886a20..b23f96ea 100644 --- a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -54,4 +54,4 @@ CIFAR100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst index 36305a44..87bf073d 100644 --- a/docs/public_models/HAILO15M/HAILO15M_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -54,7 +54,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite0 - 74.99 - 73.81 @@ -66,7 +66,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite1 - 76.68 - 76.21 @@ -78,7 +78,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite2 - 77.45 - 76.74 @@ -90,7 +90,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite3 - 79.29 - 78.42 @@ -102,7 +102,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_lite4 - 80.79 - 79.99 @@ -114,7 +114,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_m - 78.91 - 78.63 @@ -126,7 +126,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientnet_s - 77.64 - 77.32 @@ -138,7 +138,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - hardnet39ds - 73.43 - 72.92 @@ -150,7 +150,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - hardnet68 - 75.47 - 75.04 @@ -162,7 +162,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - inception_v1 - 69.74 - 69.54 @@ -174,7 +174,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v1 - 70.97 - 70.26 @@ -186,7 +186,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v2_1.0 |rocket| - 71.78 - 71.0 @@ -198,7 +198,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v2_1.4 - 74.18 - 73.18 @@ -210,7 +210,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v3 - 72.21 - 71.73 @@ -222,7 +222,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - mobilenet_v3_large_minimalistic - 72.11 - 70.61 @@ -234,7 +234,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - regnetx_1.6gf - 77.05 - 76.75 @@ -246,7 +246,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - regnetx_800mf - 75.16 - 74.84 @@ -258,7 +258,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - repvgg_a1 - 74.4 - 72.4 @@ -270,7 +270,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - repvgg_a2 - 76.52 - 74.52 @@ -282,7 +282,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resmlp12_relu - 75.26 - 74.32 @@ -294,7 +294,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnet_v1_18 - 71.26 - 71.06 @@ -306,7 +306,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnet_v1_34 - 72.7 - 72.14 @@ -318,7 +318,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnet_v1_50 |rocket| |star| - 75.12 - 74.47 @@ -330,7 +330,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnext26_32x4d - 76.18 - 75.78 @@ -342,7 +342,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - resnext50_32x4d - 79.31 - 78.21 @@ -354,7 +354,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - squeezenet_v1.1 - 59.85 - 59.4 @@ -366,7 +366,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - vit_base_bn |rocket| - 79.98 - 78.58 @@ -378,7 +378,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - vit_small_bn - 78.12 - 77.02 @@ -390,7 +390,7 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - vit_tiny_bn - 68.95 - 67.15 @@ -402,4 +402,4 @@ ImageNet - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index 8eab643d..dcb8565a 100644 --- a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -54,7 +54,7 @@ NYU - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scdepthv3 - 0.48 - 0.51 @@ -66,4 +66,4 @@ NYU - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst index db03510d..e12564e4 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -66,7 +66,7 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scrfd_10g - 82.13 - 82.03 @@ -78,7 +78,7 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scrfd_2.5g - 76.59 - 76.32 @@ -90,7 +90,7 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - scrfd_500m - 68.98 - 68.88 @@ -102,4 +102,4 @@ WiderFace - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index 5e7cc228..64f67714 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -66,4 +66,4 @@ LFW - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst index b592f2b9..2b682ddb 100644 --- a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -49,4 +49,4 @@ Hand Landmark - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index fe537272..39f0ab90 100644 --- a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -54,7 +54,7 @@ BSD68 - `download `_ - `link `_ - `download `_ - - `download `_ + - None CBSD68 ^^^^^^ @@ -86,4 +86,4 @@ CBSD68 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index a8c7439c..9e64f946 100644 --- a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -54,7 +54,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolact_regnetx_800mf - 25.61 - 25.5 @@ -66,7 +66,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5l_seg - 39.78 - 39.09 @@ -78,7 +78,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m_seg - 37.05 - 36.32 @@ -90,7 +90,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5n_seg |star| - 23.35 - 22.75 @@ -102,7 +102,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5s_seg - 31.57 - 30.8 @@ -114,7 +114,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8m_seg - 40.6 - 39.85 @@ -126,7 +126,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8n_seg - 30.32 - 29.68 @@ -138,7 +138,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8s_seg - 36.63 - 36.13 @@ -150,4 +150,4 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index 6c117b70..899d4821 100644 --- a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -54,7 +54,7 @@ LOL - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - zero_dce_pp - 15.95 - 15.82 @@ -66,4 +66,4 @@ LOL - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index 7c64242e..a6e632f0 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -54,7 +54,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - centernet_resnet_v1_50_postprocess - 31.78 - 29.23 @@ -66,7 +66,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - damoyolo_tinynasL20_T - 42.8 - 42.3 @@ -78,7 +78,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - damoyolo_tinynasL25_S - 46.53 - 45.34 @@ -90,7 +90,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - damoyolo_tinynasL35_M - 49.7 - 47.7 @@ -102,7 +102,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - detr_resnet_v1_18_bn - 33.91 - 30.91 @@ -114,7 +114,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientdet_lite0 - 27.32 - 26.49 @@ -126,7 +126,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientdet_lite1 - 32.27 - 31.72 @@ -138,7 +138,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - efficientdet_lite2 - 35.95 - 34.67 @@ -150,7 +150,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - nanodet_repvgg |star| - 29.3 - 28.53 @@ -162,7 +162,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - nanodet_repvgg_a12 - 33.73 - 32.13 @@ -174,7 +174,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - nanodet_repvgg_a1_640 - 33.28 - 32.88 @@ -186,7 +186,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - ssd_mobilenet_v1 |rocket| |star| - 23.19 - 22.29 @@ -198,7 +198,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - ssd_mobilenet_v2 - 24.15 - 22.95 @@ -210,7 +210,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - tiny_yolov3 - 14.66 - 14.41 @@ -222,7 +222,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - tiny_yolov4 - 19.18 - 17.73 @@ -234,7 +234,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3 |star| - 38.42 - 38.37 @@ -246,7 +246,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3_416 - 37.73 - 37.53 @@ -258,7 +258,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3_gluon |star| - 37.28 - 35.64 @@ -270,7 +270,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov3_gluon_416 |star| - 36.27 - 34.92 @@ -282,7 +282,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov4_leaky |star| - 42.37 - 41.08 @@ -294,7 +294,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m - 42.59 - 41.19 @@ -306,7 +306,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m6_6.1 - 50.67 - 48.97 @@ -318,7 +318,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m_6.1 - 44.8 - 43.36 @@ -330,7 +330,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5m_wo_spp |rocket| - 43.06 - 41.06 @@ -354,7 +354,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5s_c3tr - 37.13 - 35.63 @@ -366,7 +366,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5xs_wo_spp - 33.18 - 32.2 @@ -378,7 +378,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5xs_wo_spp_nms_core - 32.57 - 30.86 @@ -390,7 +390,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov6n - 34.28 - 32.28 @@ -402,7 +402,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov6n_0.2.1 - 35.16 - 33.87 @@ -414,7 +414,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov7 - 50.59 - 47.89 @@ -426,7 +426,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov7_tiny - 37.07 - 36.07 @@ -438,7 +438,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov7e6 - 55.37 - 53.47 @@ -450,7 +450,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8l - 52.44 - 51.78 @@ -462,7 +462,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8m - 49.91 - 49.11 @@ -474,7 +474,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8n - 37.02 - 36.32 @@ -486,7 +486,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8s - 44.58 - 43.98 @@ -498,7 +498,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8x - 53.45 - 52.75 @@ -510,7 +510,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov9c - 52.8 - 50.7 @@ -522,7 +522,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_l_leaky |star| - 48.69 - 46.59 @@ -534,7 +534,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_s_leaky - 38.12 - 37.27 @@ -546,7 +546,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_s_wide_leaky - 42.4 - 40.97 @@ -558,7 +558,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolox_tiny - 32.64 - 31.39 @@ -570,7 +570,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None VisDrone ^^^^^^^^ @@ -602,4 +602,4 @@ VisDrone - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst index 856ce8d3..e694d3fc 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -54,7 +54,7 @@ Market1501 - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - repvgg_a0_person_reid_512 |star| - 89.9 - 89.3 @@ -66,4 +66,4 @@ Market1501 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst index 6f835b35..3c181dc9 100644 --- a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -54,7 +54,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - centerpose_regnetx_800mf - 44.07 - 43.07 @@ -66,7 +66,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - centerpose_repvgg_a0 |star| - 39.17 - 37.17 @@ -78,7 +78,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8m_pose - 64.26 - 61.66 @@ -90,7 +90,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov8s_pose - 59.2 - 55.6 @@ -102,4 +102,4 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index 97a67634..038fc9b4 100644 --- a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -54,7 +54,7 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - segformer_b0_bn - 69.81 - 68.01 @@ -66,7 +66,7 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - stdc1 |rocket| - 74.57 - 73.92 @@ -78,7 +78,7 @@ Cityscapes - `download `_ - `link `_ - `download `_ - - `download `_ + - None Oxford-IIIT Pet ^^^^^^^^^^^^^^^ @@ -110,7 +110,7 @@ Oxford-IIIT Pet - `download `_ - `link `_ - `download `_ - - `download `_ + - None Pascal VOC ^^^^^^^^^^ @@ -142,7 +142,7 @@ Pascal VOC - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - deeplab_v3_mobilenet_v2_wo_dilation - 71.46 - 71.26 @@ -154,4 +154,4 @@ Pascal VOC - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index c6f55457..13650e24 100644 --- a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -66,4 +66,4 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst index e00999bc..9cd9ce8c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -54,4 +54,4 @@ N/A - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index cb240f87..1edabb30 100644 --- a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -54,7 +54,7 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - espcn_x3 - 28.41 - 28.06 @@ -66,7 +66,7 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - espcn_x4 - 26.83 - 26.58 @@ -78,4 +78,4 @@ BSD100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst index d9a75898..b68fe45f 100644 --- a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -54,4 +54,4 @@ CIFAR100 - `download `_ - `link `_ - `download `_ - - `download `_ + - None From 585648ce74856cf84cb04277a5c1065c9544902b Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Sun, 7 Apr 2024 16:39:22 +0300 Subject: [PATCH 13/17] fix title --- docs/public_models/HAILO15H/HAILO15H_classification.rst | 6 ++---- docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_face_attribute.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_face_detection.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_face_recognition.rst | 7 ++----- .../HAILO15H/HAILO15H_facial_landmark_detection.rst | 7 ++----- .../HAILO15H/HAILO15H_hand_landmark_detection.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_image_denoising.rst | 7 ++----- .../HAILO15H/HAILO15H_instance_segmentation.rst | 7 ++----- .../HAILO15H/HAILO15H_low_light_enhancement.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_object_detection.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_person_attribute.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_person_re_id.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst | 7 ++----- .../HAILO15H/HAILO15H_semantic_segmentation.rst | 7 ++----- .../HAILO15H/HAILO15H_single_person_pose_estimation.rst | 7 ++----- .../HAILO15H/HAILO15H_stereo_depth_estimation.rst | 7 ++----- docs/public_models/HAILO15H/HAILO15H_super_resolution.rst | 7 ++----- .../HAILO15H/HAILO15H_zero_shot_classification.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_classification.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_face_attribute.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_face_detection.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_face_recognition.rst | 7 ++----- .../HAILO15M/HAILO15M_facial_landmark_detection.rst | 7 ++----- .../HAILO15M/HAILO15M_hand_landmark_detection.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_image_denoising.rst | 7 ++----- .../HAILO15M/HAILO15M_instance_segmentation.rst | 7 ++----- .../HAILO15M/HAILO15M_low_light_enhancement.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_object_detection.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_person_attribute.rst | 6 ++---- docs/public_models/HAILO15M/HAILO15M_person_re_id.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst | 7 ++----- .../HAILO15M/HAILO15M_semantic_segmentation.rst | 7 ++----- .../HAILO15M/HAILO15M_single_person_pose_estimation.rst | 7 ++----- .../HAILO15M/HAILO15M_stereo_depth_estimation.rst | 7 ++----- docs/public_models/HAILO15M/HAILO15M_super_resolution.rst | 7 ++----- .../HAILO15M/HAILO15M_zero_shot_classification.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_classification.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_depth_estimation.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_face_attribute.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_face_detection.rst | 6 ++---- docs/public_models/HAILO8/HAILO8_face_recognition.rst | 7 ++----- .../HAILO8/HAILO8_facial_landmark_detection.rst | 7 ++----- .../HAILO8/HAILO8_hand_landmark_detection.rst | 6 ++---- docs/public_models/HAILO8/HAILO8_image_denoising.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_instance_segmentation.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_object_detection.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_person_attribute.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_person_re_id.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_pose_estimation.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst | 7 ++----- .../HAILO8/HAILO8_single_person_pose_estimation.rst | 7 ++----- .../HAILO8/HAILO8_stereo_depth_estimation.rst | 7 ++----- docs/public_models/HAILO8/HAILO8_super_resolution.rst | 7 ++----- .../HAILO8/HAILO8_zero_shot_classification.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_classification.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_face_attribute.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_face_detection.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_face_recognition.rst | 7 ++----- .../HAILO8L/HAILO8l_facial_landmark_detection.rst | 7 ++----- .../HAILO8L/HAILO8l_hand_landmark_detection.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_image_denoising.rst | 7 ++----- .../HAILO8L/HAILO8l_instance_segmentation.rst | 7 ++----- .../HAILO8L/HAILO8l_low_light_enhancement.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_object_detection.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_person_attribute.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_person_re_id.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst | 7 ++----- .../HAILO8L/HAILO8l_semantic_segmentation.rst | 7 ++----- .../HAILO8L/HAILO8l_single_person_pose_estimation.rst | 7 ++----- .../HAILO8L/HAILO8l_stereo_depth_estimation.rst | 7 ++----- docs/public_models/HAILO8L/HAILO8l_super_resolution.rst | 7 ++----- .../HAILO8L/HAILO8l_zero_shot_classification.rst | 7 ++----- 76 files changed, 152 insertions(+), 376 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_classification.rst b/docs/public_models/HAILO15H/HAILO15H_classification.rst index e85e60cd..7295b4fd 100644 --- a/docs/public_models/HAILO15H/HAILO15H_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Classification Hailo15H +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - * `Classification`_ .. _Classification: diff --git a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst index a61ab81f..6cc4a273 100644 --- a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Depth Estimation Hailo15H +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Depth Estimation`_ .. _Depth Estimation: diff --git a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst index 301f3f09..94f050a0 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Attribute Hailo15H +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Attribute`_ .. _Face Attribute: diff --git a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst index df5079e0..fe3d9389 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Detection Hailo15H +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Detection`_ .. _Face Detection: diff --git a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst index 16434427..2c561346 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Recognition Hailo15H +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Recognition`_ .. _Face Recognition: diff --git a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst index c9ec9011..fbef587a 100644 --- a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Facial Landmark Detection Hailo15H +============================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Facial Landmark Detection`_ .. _Facial Landmark Detection: diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst index a21aa1de..07c2b70a 100644 --- a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Hand Landmark detection HAILO15H +============================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Hand Landmark detection`_ .. _Hand Landmark detection: diff --git a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 5621cc5b..399897d3 100644 --- a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Image Denoising HAILO15H +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Image Denoising`_ .. _Image Denoising: diff --git a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index c2f0f074..252962f3 100644 --- a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Instance Segmentation HAILO15H +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Instance Segmentation`_ .. _Instance Segmentation: diff --git a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index cda6a43e..aaa205d4 100644 --- a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Low Light Enhancement HAILO15H +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Low Light Enhancement`_ .. _Low Light Enhancement: diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index 169a3f92..320a4715 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Object Detection HAILO15H +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Object Detection`_ .. _Object Detection: diff --git a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst index fbdc040b..10854075 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Attribute HAILO15H +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Attribute`_ .. _Person Attribute: diff --git a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index ab9a3f69..1ca322d6 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Re-ID HAILO15H +================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Re-ID`_ .. _Person Re-ID: diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst index 24c39345..0f1d2bf6 100644 --- a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Pose Estimation HAILO15H +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Pose Estimation`_ .. _Pose Estimation: diff --git a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index dc9c48f7..9029489b 100644 --- a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Semantic Segmentation HAILO15H +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Semantic Segmentation`_ .. _Semantic Segmentation: diff --git a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index 6e8bbc1a..d9937655 100644 --- a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Single Person Pose Estimation HAILO15H +================================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Single Person Pose Estimation`_ .. _Single Person Pose Estimation: diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst index f0cde859..b7ad64c6 100644 --- a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Stereo Depth Estimation HAILO15H +============================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Stereo Depth Estimation`_ .. _Stereo Depth Estimation: diff --git a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index 26fc0774..425f5a9c 100644 --- a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Super Resolution HAILO15H +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Super Resolution`_ .. _Super Resolution: diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst index b23f96ea..c13b5f64 100644 --- a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Zero-shot Classification HAILO15H +============================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Zero-shot Classification`_ .. _Zero-shot Classification: diff --git a/docs/public_models/HAILO15M/HAILO15M_classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst index 87bf073d..d0ae1156 100644 --- a/docs/public_models/HAILO15M/HAILO15M_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Classification HAILO15M +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Classification`_ .. _Classification: diff --git a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index dcb8565a..3d0801f5 100644 --- a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Depth Estimation HAILO15M +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Depth Estimation`_ .. _Depth Estimation: diff --git a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst index 48f95850..b83dc282 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Attribute HAILO15M +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Attribute`_ .. _Face Attribute: diff --git a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst index e12564e4..9f372c4c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Detection HAILO15M +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Detection`_ .. _Face Detection: diff --git a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index 64f67714..ee98061a 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Recognition HAILO15M +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Recognition`_ .. _Face Recognition: diff --git a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst index 6480dfd5..6832999c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Facial Landmark Detection HAILO15M +============================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Facial Landmark Detection`_ .. _Facial Landmark Detection: diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst index 2b682ddb..926e685c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Hand Landmark detection HAILO15M +============================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Hand Landmark detection`_ .. _Hand Landmark detection: diff --git a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index 39f0ab90..f5aa3fcb 100644 --- a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Image Denoising HAILO15M +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Image Denoising`_ .. _Image Denoising: diff --git a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index 9e64f946..56b0950c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Instance Segmentation HAILO15M +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Instance Segmentation`_ .. _Instance Segmentation: diff --git a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index 899d4821..d208be5f 100644 --- a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Low Light Enhancement HAILO15M +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Low Light Enhancement`_ .. _Low Light Enhancement: diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index a6e632f0..49f82a63 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Object Detection HAILO15M +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Object Detection`_ .. _Object Detection: diff --git a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst index 997437d7..32e4667f 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Attribute HAILO15M +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - * `Person Attribute`_ .. _Person Attribute: diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst index e694d3fc..c5197613 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Re-ID HAILO15M +================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Re-ID`_ .. _Person Re-ID: diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst index 3c181dc9..a53b619e 100644 --- a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Pose Estimation HAILO15M +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Pose Estimation`_ .. _Pose Estimation: diff --git a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index 038fc9b4..0606d492 100644 --- a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Semantic Segmentation HAILO15M +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Semantic Segmentation`_ .. _Semantic Segmentation: diff --git a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index 13650e24..cbed9102 100644 --- a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Single Person Pose Estimation HAILO15M +================================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Single Person Pose Estimation`_ .. _Single Person Pose Estimation: diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst index 9cd9ce8c..241f0d24 100644 --- a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Stereo Depth Estimation HAILO15M +============================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Stereo Depth Estimation`_ .. _Stereo Depth Estimation: diff --git a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index 1edabb30..6610b963 100644 --- a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Super Resolution HAILO15M +===================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Super Resolution`_ .. _Super Resolution: diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst index b68fe45f..2bd30014 100644 --- a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Zero-shot Classification HAILO15M +============================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Zero-shot Classification`_ .. _Zero-shot Classification: diff --git a/docs/public_models/HAILO8/HAILO8_classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst index afdcb318..f5c93cd0 100644 --- a/docs/public_models/HAILO8/HAILO8_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Classification HAILO8 +================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Classification`_ .. _Classification: diff --git a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst index 6482ac96..3dc9a4d4 100644 --- a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Depth Estimation HAILO8 +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Depth Estimation`_ .. _Depth Estimation: diff --git a/docs/public_models/HAILO8/HAILO8_face_attribute.rst b/docs/public_models/HAILO8/HAILO8_face_attribute.rst index 3b6e2c87..2039187c 100644 --- a/docs/public_models/HAILO8/HAILO8_face_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Attribute HAILO8 +================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Attribute`_ .. _Face Attribute: diff --git a/docs/public_models/HAILO8/HAILO8_face_detection.rst b/docs/public_models/HAILO8/HAILO8_face_detection.rst index 8fcd7de3..b7e644aa 100644 --- a/docs/public_models/HAILO8/HAILO8_face_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Detection HAILO8 +================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - * `Face Detection`_ .. _Face Detection: diff --git a/docs/public_models/HAILO8/HAILO8_face_recognition.rst b/docs/public_models/HAILO8/HAILO8_face_recognition.rst index eaa842e0..1d2b8358 100644 --- a/docs/public_models/HAILO8/HAILO8_face_recognition.rst +++ b/docs/public_models/HAILO8/HAILO8_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Recognition HAILO8 +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Recognition`_ .. _Face Recognition: diff --git a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst index 84de8044..83969505 100644 --- a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Facial Landmark Detection HAILO8 +============================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Facial Landmark Detection`_ .. _Facial Landmark Detection: diff --git a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst index 6b892c37..6de96011 100644 --- a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Hand Landmark detection HAILO8 +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,7 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - * `Hand Landmark detection`_ .. _Hand Landmark detection: diff --git a/docs/public_models/HAILO8/HAILO8_image_denoising.rst b/docs/public_models/HAILO8/HAILO8_image_denoising.rst index beffcbef..a9e466b1 100644 --- a/docs/public_models/HAILO8/HAILO8_image_denoising.rst +++ b/docs/public_models/HAILO8/HAILO8_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Image Denoising HAILO8 +================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Image Denoising`_ .. _Image Denoising: diff --git a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst index 858c36e4..ad803249 100644 --- a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Instance Segmentation HAILO8 +======================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Instance Segmentation`_ .. _Instance Segmentation: diff --git a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst index 196cbe1f..2ce4bb19 100644 --- a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst +++ b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Low Light Enhancement HAILO8 +======================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Low Light Enhancement`_ .. _Low Light Enhancement: diff --git a/docs/public_models/HAILO8/HAILO8_object_detection.rst b/docs/public_models/HAILO8/HAILO8_object_detection.rst index 321a01b9..bec55f0f 100644 --- a/docs/public_models/HAILO8/HAILO8_object_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Object Detection HAILO8 +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Object Detection`_ .. _Object Detection: diff --git a/docs/public_models/HAILO8/HAILO8_person_attribute.rst b/docs/public_models/HAILO8/HAILO8_person_attribute.rst index 2d2e06a2..95fb884e 100644 --- a/docs/public_models/HAILO8/HAILO8_person_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Attribute HAILO8 +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Attribute`_ .. _Person Attribute: diff --git a/docs/public_models/HAILO8/HAILO8_person_re_id.rst b/docs/public_models/HAILO8/HAILO8_person_re_id.rst index 6ccd61fc..d6537c0a 100644 --- a/docs/public_models/HAILO8/HAILO8_person_re_id.rst +++ b/docs/public_models/HAILO8/HAILO8_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Re-ID HAILO8 +=============================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Re-ID`_ .. _Person Re-ID: diff --git a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst index 0aad77f4..44899417 100644 --- a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Pose Estimation HAILO8 +================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Pose Estimation`_ .. _Pose Estimation: diff --git a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index 354d962f..917f0045 100644 --- a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Semantic Segmentation HAILO8 +======================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Semantic Segmentation`_ .. _Semantic Segmentation: diff --git a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst index 95e6f964..e35fca45 100644 --- a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Single Person Pose Estimation HAILO8 +================================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Single Person Pose Estimation`_ .. _Single Person Pose Estimation: diff --git a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst index 6abe7de5..82934488 100644 --- a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Stereo Depth Estimation HAILO8 +========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Stereo Depth Estimation`_ .. _Stereo Depth Estimation: diff --git a/docs/public_models/HAILO8/HAILO8_super_resolution.rst b/docs/public_models/HAILO8/HAILO8_super_resolution.rst index b7cdbdf8..8de3c0ea 100644 --- a/docs/public_models/HAILO8/HAILO8_super_resolution.rst +++ b/docs/public_models/HAILO8/HAILO8_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Super Resolution HAILO8 +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Super Resolution`_ .. _Super Resolution: diff --git a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst index 81285500..93858ad6 100644 --- a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Zero-shot Classification HAILO8 +=========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Zero-shot Classification`_ .. _Zero-shot Classification: diff --git a/docs/public_models/HAILO8L/HAILO8l_classification.rst b/docs/public_models/HAILO8L/HAILO8l_classification.rst index b330d4c7..bcf6eb8a 100644 --- a/docs/public_models/HAILO8L/HAILO8l_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Classification HAILO8L +================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Classification`_ .. _Classification: diff --git a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst index 8d2849a6..f9c53b12 100644 --- a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Depth Estimation HAILO8L +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Depth Estimation`_ .. _Depth Estimation: diff --git a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst index 4aeef03b..650f9467 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Attribute HAILO8L +================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Attribute`_ .. _Face Attribute: diff --git a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst index 1f0c3bbd..4d9c6aac 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Detection HAILO8L +================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Detection`_ .. _Face Detection: diff --git a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst index 749fa5b8..06599fc5 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Face Recognition HAILO8L +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Face Recognition`_ .. _Face Recognition: diff --git a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst index 0b118c8c..d377d943 100644 --- a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Facial Landmark Detection HAILO8L +============================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Facial Landmark Detection`_ .. _Facial Landmark Detection: diff --git a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst index 1023aa19..7e6fe570 100644 --- a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Hand Landmark detection HAILO8L +=========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Hand Landmark detection`_ .. _Hand Landmark detection: diff --git a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst index fd46c1d8..95f46319 100644 --- a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst +++ b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Image Denoising HAILO8L +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Image Denoising`_ .. _Image Denoising: diff --git a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst index 96ac3ecd..b572df8d 100644 --- a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Instance Segmentation HAILO8L +========================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Instance Segmentation`_ .. _Instance Segmentation: diff --git a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst index 65dd732b..26f1961b 100644 --- a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst +++ b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Low Light Enhancement HAILO8L +========================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Low Light Enhancement`_ .. _Low Light Enhancement: diff --git a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst index c89e92b6..6745546e 100644 --- a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Object Detection HAILO8L +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Object Detection`_ .. _Object Detection: diff --git a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst index 2b463199..8edf7f99 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Attribute HAILO8L +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Attribute`_ .. _Person Attribute: diff --git a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst index 2c0f3374..5bde8d4c 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Person Re-ID HAILO8L +================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Person Re-ID`_ .. _Person Re-ID: diff --git a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst index d03d4d07..2fb93fad 100644 --- a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Pose Estimation HAILO8L +=================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Pose Estimation`_ .. _Pose Estimation: diff --git a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst index 5fb28496..cdcf7330 100644 --- a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Semantic Segmentation HAILO8L +========================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Semantic Segmentation`_ .. _Semantic Segmentation: diff --git a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst index e05b849b..6a1f4c6a 100644 --- a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Single Person Pose Estimation HAILO8L +================================================================= .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Single Person Pose Estimation`_ .. _Single Person Pose Estimation: diff --git a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst index 28581831..061547f2 100644 --- a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Stereo Depth Estimation HAILO8L +=========================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Stereo Depth Estimation`_ .. _Stereo Depth Estimation: diff --git a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst index 1c14410a..2f7250e3 100644 --- a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst +++ b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Super Resolution HAILO8L +==================================================== .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Super Resolution`_ .. _Super Resolution: diff --git a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst index 245eddd8..1db3b519 100644 --- a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models -========================= +Public Pre-Trained Models - Zero-shot Classification HAILO8L +============================================================ .. |rocket| image:: ../../images/rocket.png :width: 18 @@ -14,9 +14,6 @@ Here, we give the full list of publicly pre-trained models supported by the Hail * Networks available in `TAPPAS `_ are marked with |star| * Benchmark, TAPPAS and Recommended networks run in performance mode * All models were compiled using Hailo Dataflow Compiler v3.27.0 -* Supported tasks: - - * `Zero-shot Classification`_ .. _Zero-shot Classification: From d2bc8b985a8c3e0042d8d46a9921dd8150ffd49b Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Sun, 7 Apr 2024 16:46:23 +0300 Subject: [PATCH 14/17] fix title capital --- .../public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_image_denoising.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_object_detection.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_person_attribute.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_person_re_id.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst | 2 +- .../HAILO15H/HAILO15H_single_person_pose_estimation.rst | 2 +- .../public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst | 2 +- docs/public_models/HAILO15H/HAILO15H_super_resolution.rst | 2 +- .../HAILO15H/HAILO15H_zero_shot_classification.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_classification.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_face_attribute.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_face_detection.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_face_recognition.rst | 2 +- .../HAILO15M/HAILO15M_facial_landmark_detection.rst | 2 +- .../public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_image_denoising.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_object_detection.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_person_attribute.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_person_re_id.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst | 2 +- .../HAILO15M/HAILO15M_single_person_pose_estimation.rst | 2 +- .../public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_super_resolution.rst | 2 +- .../HAILO15M/HAILO15M_zero_shot_classification.rst | 2 +- docs/public_models/HAILO8/HAILO8_classification.rst | 2 +- docs/public_models/HAILO8/HAILO8_depth_estimation.rst | 2 +- docs/public_models/HAILO8/HAILO8_face_attribute.rst | 2 +- docs/public_models/HAILO8/HAILO8_face_detection.rst | 2 +- docs/public_models/HAILO8/HAILO8_face_recognition.rst | 2 +- docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst | 2 +- docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst | 2 +- docs/public_models/HAILO8/HAILO8_image_denoising.rst | 2 +- docs/public_models/HAILO8/HAILO8_instance_segmentation.rst | 2 +- docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst | 2 +- docs/public_models/HAILO8/HAILO8_object_detection.rst | 2 +- docs/public_models/HAILO8/HAILO8_person_attribute.rst | 2 +- docs/public_models/HAILO8/HAILO8_person_re_id.rst | 2 +- docs/public_models/HAILO8/HAILO8_pose_estimation.rst | 2 +- docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst | 2 +- .../HAILO8/HAILO8_single_person_pose_estimation.rst | 2 +- docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst | 2 +- docs/public_models/HAILO8/HAILO8_super_resolution.rst | 2 +- docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_classification.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_face_attribute.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_face_detection.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_face_recognition.rst | 2 +- .../public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_image_denoising.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_object_detection.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_person_attribute.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_person_re_id.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst | 2 +- .../HAILO8L/HAILO8l_single_person_pose_estimation.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_super_resolution.rst | 2 +- docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst | 2 +- 70 files changed, 70 insertions(+), 70 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst index 07c2b70a..5d369c16 100644 --- a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Hand Landmark detection HAILO15H +Public Pre-Trained Models - Hand Landmark detection Hailo15H ============================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 399897d3..0a83106a 100644 --- a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Image Denoising HAILO15H +Public Pre-Trained Models - Image Denoising Hailo15H ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index 252962f3..a7f859d1 100644 --- a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Instance Segmentation HAILO15H +Public Pre-Trained Models - Instance Segmentation Hailo15H ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index aaa205d4..c263d06a 100644 --- a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Low Light Enhancement HAILO15H +Public Pre-Trained Models - Low Light Enhancement Hailo15H ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index 320a4715..be610d12 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Object Detection HAILO15H +Public Pre-Trained Models - Object Detection Hailo15H ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst index 10854075..bd0edf1f 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Attribute HAILO15H +Public Pre-Trained Models - Person Attribute Hailo15H ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index 1ca322d6..74491743 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Re-ID HAILO15H +Public Pre-Trained Models - Person Re-ID Hailo15H ================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst index 0f1d2bf6..a6bde5dc 100644 --- a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Pose Estimation HAILO15H +Public Pre-Trained Models - Pose Estimation Hailo15H ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index 9029489b..7e8eb475 100644 --- a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Semantic Segmentation HAILO15H +Public Pre-Trained Models - Semantic Segmentation Hailo15H ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index d9937655..9c1add84 100644 --- a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Single Person Pose Estimation HAILO15H +Public Pre-Trained Models - Single Person Pose Estimation Hailo15H ================================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst index b7ad64c6..647995b9 100644 --- a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Stereo Depth Estimation HAILO15H +Public Pre-Trained Models - Stereo Depth Estimation Hailo15H ============================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index 425f5a9c..c631c944 100644 --- a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Super Resolution HAILO15H +Public Pre-Trained Models - Super Resolution Hailo15H ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst index c13b5f64..4032ba0d 100644 --- a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Zero-shot Classification HAILO15H +Public Pre-Trained Models - Zero-shot Classification Hailo15H ============================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst index d0ae1156..38063bf6 100644 --- a/docs/public_models/HAILO15M/HAILO15M_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Classification HAILO15M +Public Pre-Trained Models - Classification Hailo15M =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index 3d0801f5..ceaad506 100644 --- a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Depth Estimation HAILO15M +Public Pre-Trained Models - Depth Estimation Hailo15M ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst index b83dc282..53c002cb 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Attribute HAILO15M +Public Pre-Trained Models - Face Attribute Hailo15M =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst index 9f372c4c..e75121ef 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Detection HAILO15M +Public Pre-Trained Models - Face Detection Hailo15M =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index ee98061a..5222e811 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Recognition HAILO15M +Public Pre-Trained Models - Face Recognition Hailo15M ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst index 6832999c..40ab98a8 100644 --- a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Facial Landmark Detection HAILO15M +Public Pre-Trained Models - Facial Landmark Detection Hailo15M ============================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst index 926e685c..9dd1e7c2 100644 --- a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Hand Landmark detection HAILO15M +Public Pre-Trained Models - Hand Landmark detection Hailo15M ============================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index f5aa3fcb..630f45a1 100644 --- a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Image Denoising HAILO15M +Public Pre-Trained Models - Image Denoising Hailo15M ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index 56b0950c..1092e7ae 100644 --- a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Instance Segmentation HAILO15M +Public Pre-Trained Models - Instance Segmentation Hailo15M ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index d208be5f..72bc8490 100644 --- a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Low Light Enhancement HAILO15M +Public Pre-Trained Models - Low Light Enhancement Hailo15M ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index 49f82a63..412a1f98 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Object Detection HAILO15M +Public Pre-Trained Models - Object Detection Hailo15M ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst index 32e4667f..19c408ad 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Attribute HAILO15M +Public Pre-Trained Models - Person Attribute Hailo15M ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst index c5197613..8ed1221c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Re-ID HAILO15M +Public Pre-Trained Models - Person Re-ID Hailo15M ================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst index a53b619e..0d816e75 100644 --- a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Pose Estimation HAILO15M +Public Pre-Trained Models - Pose Estimation Hailo15M ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index 0606d492..daf31436 100644 --- a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Semantic Segmentation HAILO15M +Public Pre-Trained Models - Semantic Segmentation Hailo15M ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index cbed9102..d684d295 100644 --- a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Single Person Pose Estimation HAILO15M +Public Pre-Trained Models - Single Person Pose Estimation Hailo15M ================================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst index 241f0d24..7b6d8051 100644 --- a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Stereo Depth Estimation HAILO15M +Public Pre-Trained Models - Stereo Depth Estimation Hailo15M ============================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index 6610b963..4999a7eb 100644 --- a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Super Resolution HAILO15M +Public Pre-Trained Models - Super Resolution Hailo15M ===================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst index 2bd30014..dbc14fe7 100644 --- a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Zero-shot Classification HAILO15M +Public Pre-Trained Models - Zero-shot Classification Hailo15M ============================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst index f5c93cd0..45c8e462 100644 --- a/docs/public_models/HAILO8/HAILO8_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Classification HAILO8 +Public Pre-Trained Models - Classification Hailo8 ================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst index 3dc9a4d4..cfd3c9bf 100644 --- a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Depth Estimation HAILO8 +Public Pre-Trained Models - Depth Estimation Hailo8 =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_face_attribute.rst b/docs/public_models/HAILO8/HAILO8_face_attribute.rst index 2039187c..0d0faa26 100644 --- a/docs/public_models/HAILO8/HAILO8_face_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_face_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Attribute HAILO8 +Public Pre-Trained Models - Face Attribute Hailo8 ================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_face_detection.rst b/docs/public_models/HAILO8/HAILO8_face_detection.rst index b7e644aa..4a83b89f 100644 --- a/docs/public_models/HAILO8/HAILO8_face_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_face_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Detection HAILO8 +Public Pre-Trained Models - Face Detection Hailo8 ================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_face_recognition.rst b/docs/public_models/HAILO8/HAILO8_face_recognition.rst index 1d2b8358..863c2120 100644 --- a/docs/public_models/HAILO8/HAILO8_face_recognition.rst +++ b/docs/public_models/HAILO8/HAILO8_face_recognition.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Recognition HAILO8 +Public Pre-Trained Models - Face Recognition Hailo8 =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst index 83969505..770f19e5 100644 --- a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Facial Landmark Detection HAILO8 +Public Pre-Trained Models - Facial Landmark Detection Hailo8 ============================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst index 6de96011..2103980a 100644 --- a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Hand Landmark detection HAILO8 +Public Pre-Trained Models - Hand Landmark detection Hailo8 ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_image_denoising.rst b/docs/public_models/HAILO8/HAILO8_image_denoising.rst index a9e466b1..bc9cf561 100644 --- a/docs/public_models/HAILO8/HAILO8_image_denoising.rst +++ b/docs/public_models/HAILO8/HAILO8_image_denoising.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Image Denoising HAILO8 +Public Pre-Trained Models - Image Denoising Hailo8 ================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst index ad803249..32f141cb 100644 --- a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Instance Segmentation HAILO8 +Public Pre-Trained Models - Instance Segmentation Hailo8 ======================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst index 2ce4bb19..a54dd2bc 100644 --- a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst +++ b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Low Light Enhancement HAILO8 +Public Pre-Trained Models - Low Light Enhancement Hailo8 ======================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_object_detection.rst b/docs/public_models/HAILO8/HAILO8_object_detection.rst index bec55f0f..9b21e866 100644 --- a/docs/public_models/HAILO8/HAILO8_object_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_object_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Object Detection HAILO8 +Public Pre-Trained Models - Object Detection Hailo8 =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_person_attribute.rst b/docs/public_models/HAILO8/HAILO8_person_attribute.rst index 95fb884e..ec0accb5 100644 --- a/docs/public_models/HAILO8/HAILO8_person_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_person_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Attribute HAILO8 +Public Pre-Trained Models - Person Attribute Hailo8 =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_person_re_id.rst b/docs/public_models/HAILO8/HAILO8_person_re_id.rst index d6537c0a..014ff88c 100644 --- a/docs/public_models/HAILO8/HAILO8_person_re_id.rst +++ b/docs/public_models/HAILO8/HAILO8_person_re_id.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Re-ID HAILO8 +Public Pre-Trained Models - Person Re-ID Hailo8 =============================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst index 44899417..172a0f9b 100644 --- a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Pose Estimation HAILO8 +Public Pre-Trained Models - Pose Estimation Hailo8 ================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index 917f0045..c3b77868 100644 --- a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Semantic Segmentation HAILO8 +Public Pre-Trained Models - Semantic Segmentation Hailo8 ======================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst index e35fca45..b3d5efa5 100644 --- a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Single Person Pose Estimation HAILO8 +Public Pre-Trained Models - Single Person Pose Estimation Hailo8 ================================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst index 82934488..f7b7a914 100644 --- a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Stereo Depth Estimation HAILO8 +Public Pre-Trained Models - Stereo Depth Estimation Hailo8 ========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_super_resolution.rst b/docs/public_models/HAILO8/HAILO8_super_resolution.rst index 8de3c0ea..7816ad9b 100644 --- a/docs/public_models/HAILO8/HAILO8_super_resolution.rst +++ b/docs/public_models/HAILO8/HAILO8_super_resolution.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Super Resolution HAILO8 +Public Pre-Trained Models - Super Resolution Hailo8 =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst index 93858ad6..ca6c04fe 100644 --- a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Zero-shot Classification HAILO8 +Public Pre-Trained Models - Zero-shot Classification Hailo8 =========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_classification.rst b/docs/public_models/HAILO8L/HAILO8l_classification.rst index bcf6eb8a..0d3a120d 100644 --- a/docs/public_models/HAILO8L/HAILO8l_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Classification HAILO8L +Public Pre-Trained Models - Classification Hailo8L ================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst index f9c53b12..8982043f 100644 --- a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Depth Estimation HAILO8L +Public Pre-Trained Models - Depth Estimation Hailo8L ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst index 650f9467..e69b3519 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Attribute HAILO8L +Public Pre-Trained Models - Face Attribute Hailo8L ================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst index 4d9c6aac..1208ca5d 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Detection HAILO8L +Public Pre-Trained Models - Face Detection Hailo8L ================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst index 06599fc5..0d328fe4 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Face Recognition HAILO8L +Public Pre-Trained Models - Face Recognition Hailo8L ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst index d377d943..236438f3 100644 --- a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Facial Landmark Detection HAILO8L +Public Pre-Trained Models - Facial Landmark Detection Hailo8L ============================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst index 7e6fe570..30c9ecd0 100644 --- a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Hand Landmark detection HAILO8L +Public Pre-Trained Models - Hand Landmark detection Hailo8L =========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst index 95f46319..8e6abf00 100644 --- a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst +++ b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Image Denoising HAILO8L +Public Pre-Trained Models - Image Denoising Hailo8L =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst index b572df8d..543447f7 100644 --- a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Instance Segmentation HAILO8L +Public Pre-Trained Models - Instance Segmentation Hailo8L ========================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst index 26f1961b..1e2ccd44 100644 --- a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst +++ b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Low Light Enhancement HAILO8L +Public Pre-Trained Models - Low Light Enhancement Hailo8L ========================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst index 6745546e..c64592d8 100644 --- a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Object Detection HAILO8L +Public Pre-Trained Models - Object Detection Hailo8L ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst index 8edf7f99..73bbbd31 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Attribute HAILO8L +Public Pre-Trained Models - Person Attribute Hailo8L ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst index 5bde8d4c..9be02e83 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Person Re-ID HAILO8L +Public Pre-Trained Models - Person Re-ID Hailo8L ================================================ .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst index 2fb93fad..3ab127d4 100644 --- a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Pose Estimation HAILO8L +Public Pre-Trained Models - Pose Estimation Hailo8L =================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst index cdcf7330..6c56192f 100644 --- a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Semantic Segmentation HAILO8L +Public Pre-Trained Models - Semantic Segmentation Hailo8L ========================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst index 6a1f4c6a..a584e327 100644 --- a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Single Person Pose Estimation HAILO8L +Public Pre-Trained Models - Single Person Pose Estimation Hailo8L ================================================================= .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst index 061547f2..0ac94f1c 100644 --- a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Stereo Depth Estimation HAILO8L +Public Pre-Trained Models - Stereo Depth Estimation Hailo8L =========================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst index 2f7250e3..b4845581 100644 --- a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst +++ b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Super Resolution HAILO8L +Public Pre-Trained Models - Super Resolution Hailo8L ==================================================== .. |rocket| image:: ../../images/rocket.png diff --git a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst index 1db3b519..2ca2b089 100644 --- a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst @@ -1,5 +1,5 @@ -Public Pre-Trained Models - Zero-shot Classification HAILO8L +Public Pre-Trained Models - Zero-shot Classification Hailo8L ============================================================ .. |rocket| image:: ../../images/rocket.png From 78516b49dccaf8bcabe15518b23b0c313de7a276 Mon Sep 17 00:00:00 2001 From: HailoModelZoo <87389434+HailoModelZoo@users.noreply.github.com> Date: Mon, 8 Apr 2024 09:55:12 +0300 Subject: [PATCH 15/17] Update HAILO8_semantic_segmentation.rst --- docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index c3b77868..f8006b57 100644 --- a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -64,8 +64,8 @@ Cityscapes * - stdc1 |rocket| - 74.57 - 73.92 - - 52 - - 52 + - 54 + - 54 - 1024x1920x3 - 8.27 - 126.47 From 5ebffc3d19b390c001cd4902ee7cf3cf2839ed5b Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Mon, 8 Apr 2024 15:34:38 +0300 Subject: [PATCH 16/17] fix --- docs/public_models/HAILO15H/HAILO15H_classification.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_face_attribute.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_face_detection.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_face_recognition.rst | 4 ++-- .../HAILO15H/HAILO15H_facial_landmark_detection.rst | 4 ++-- .../HAILO15H/HAILO15H_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_image_denoising.rst | 4 ++-- .../HAILO15H/HAILO15H_instance_segmentation.rst | 4 ++-- .../HAILO15H/HAILO15H_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_object_detection.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_person_attribute.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_person_re_id.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst | 4 ++-- .../HAILO15H/HAILO15H_semantic_segmentation.rst | 4 ++-- .../HAILO15H/HAILO15H_single_person_pose_estimation.rst | 4 ++-- .../HAILO15H/HAILO15H_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15H/HAILO15H_super_resolution.rst | 4 ++-- .../HAILO15H/HAILO15H_zero_shot_classification.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_classification.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_face_attribute.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_face_detection.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_face_recognition.rst | 4 ++-- .../HAILO15M/HAILO15M_facial_landmark_detection.rst | 4 ++-- .../HAILO15M/HAILO15M_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_image_denoising.rst | 4 ++-- .../HAILO15M/HAILO15M_instance_segmentation.rst | 4 ++-- .../HAILO15M/HAILO15M_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_object_detection.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_person_attribute.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_person_re_id.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst | 4 ++-- .../HAILO15M/HAILO15M_semantic_segmentation.rst | 4 ++-- .../HAILO15M/HAILO15M_single_person_pose_estimation.rst | 4 ++-- .../HAILO15M/HAILO15M_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO15M/HAILO15M_super_resolution.rst | 4 ++-- .../HAILO15M/HAILO15M_zero_shot_classification.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_classification.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_face_attribute.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_face_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_face_recognition.rst | 4 ++-- .../HAILO8/HAILO8_facial_landmark_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_image_denoising.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_instance_segmentation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_object_detection.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_person_attribute.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_person_re_id.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_pose_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst | 4 ++-- .../HAILO8/HAILO8_single_person_pose_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8/HAILO8_super_resolution.rst | 4 ++-- .../public_models/HAILO8/HAILO8_zero_shot_classification.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_classification.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_face_attribute.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_face_detection.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_face_recognition.rst | 4 ++-- .../HAILO8L/HAILO8l_facial_landmark_detection.rst | 4 ++-- .../HAILO8L/HAILO8l_hand_landmark_detection.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_image_denoising.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_object_detection.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_person_attribute.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_person_re_id.rst | 5 ++--- docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst | 4 ++-- .../HAILO8L/HAILO8l_single_person_pose_estimation.rst | 4 ++-- .../HAILO8L/HAILO8l_stereo_depth_estimation.rst | 4 ++-- docs/public_models/HAILO8L/HAILO8l_super_resolution.rst | 4 ++-- .../HAILO8L/HAILO8l_zero_shot_classification.rst | 4 ++-- 76 files changed, 152 insertions(+), 153 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_classification.rst b/docs/public_models/HAILO15H/HAILO15H_classification.rst index 7295b4fd..400a9431 100644 --- a/docs/public_models/HAILO15H/HAILO15H_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Classification Hailo15H -=================================================== +Hailo15H Classification +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst index 6cc4a273..f33c9563 100644 --- a/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Depth Estimation Hailo15H -===================================================== +Hailo15H Depth Estimation +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst index 94f050a0..e5790bdb 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Attribute Hailo15H -=================================================== +Hailo15H Face Attribute +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst index fe3d9389..67e5e126 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Detection Hailo15H -=================================================== +Hailo15H Face Detection +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst index 2c561346..e5d2a940 100644 --- a/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst +++ b/docs/public_models/HAILO15H/HAILO15H_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Recognition Hailo15H -===================================================== +Hailo15H Face Recognition +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst index fbef587a..8b1048c1 100644 --- a/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Facial Landmark Detection Hailo15H -============================================================== +Hailo15H Facial Landmark Detection +================================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst index 5d369c16..2f7d0919 100644 --- a/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Hand Landmark detection Hailo15H -============================================================ +Hailo15H Hand Landmark detection +================================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst index 0a83106a..32600201 100644 --- a/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst +++ b/docs/public_models/HAILO15H/HAILO15H_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Image Denoising Hailo15H -==================================================== +Hailo15H Image Denoising +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst index a7f859d1..e16332f1 100644 --- a/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Instance Segmentation Hailo15H -========================================================== +Hailo15H Instance Segmentation +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst index c263d06a..cb0fbfdc 100644 --- a/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst +++ b/docs/public_models/HAILO15H/HAILO15H_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Low Light Enhancement Hailo15H -========================================================== +Hailo15H Low Light Enhancement +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index be610d12..59c7cc1a 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Object Detection Hailo15H -===================================================== +Hailo15H Object Detection +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst index bd0edf1f..67893b64 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Attribute Hailo15H -===================================================== +Hailo15H Person Attribute +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst index 74491743..94d7b695 100644 --- a/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst +++ b/docs/public_models/HAILO15H/HAILO15H_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Re-ID Hailo15H -================================================= +Hailo15H Person Re-ID +===================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst index a6bde5dc..88105500 100644 --- a/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Pose Estimation Hailo15H -==================================================== +Hailo15H Pose Estimation +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst index 7e8eb475..902b7716 100644 --- a/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Semantic Segmentation Hailo15H -========================================================== +Hailo15H Semantic Segmentation +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst index 9c1add84..c010d3b1 100644 --- a/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Single Person Pose Estimation Hailo15H -================================================================== +Hailo15H Single Person Pose Estimation +====================================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst index 647995b9..9da3a313 100644 --- a/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15H/HAILO15H_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Stereo Depth Estimation Hailo15H -============================================================ +Hailo15H Stereo Depth Estimation +================================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst index c631c944..481ab132 100644 --- a/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst +++ b/docs/public_models/HAILO15H/HAILO15H_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Super Resolution Hailo15H -===================================================== +Hailo15H Super Resolution +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst index 4032ba0d..ebf5a050 100644 --- a/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst +++ b/docs/public_models/HAILO15H/HAILO15H_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Zero-shot Classification Hailo15H -============================================================= +Hailo15H Zero-shot Classification +================================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_classification.rst b/docs/public_models/HAILO15M/HAILO15M_classification.rst index 38063bf6..d65ca8d8 100644 --- a/docs/public_models/HAILO15M/HAILO15M_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Classification Hailo15M -=================================================== +Hailo15M Classification +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst index ceaad506..d869459c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Depth Estimation Hailo15M -===================================================== +Hailo15M Depth Estimation +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst index 53c002cb..d37f4b5c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Attribute Hailo15M -=================================================== +Hailo15M Face Attribute +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst index e75121ef..8be62cb8 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Detection Hailo15M -=================================================== +Hailo15M Face Detection +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst index 5222e811..45b4d029 100644 --- a/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst +++ b/docs/public_models/HAILO15M/HAILO15M_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Recognition Hailo15M -===================================================== +Hailo15M Face Recognition +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst index 40ab98a8..bc7c91d6 100644 --- a/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Facial Landmark Detection Hailo15M -============================================================== +Hailo15M Facial Landmark Detection +================================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst index 9dd1e7c2..9116227a 100644 --- a/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Hand Landmark detection Hailo15M -============================================================ +Hailo15M Hand Landmark detection +================================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst index 630f45a1..32d1e992 100644 --- a/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst +++ b/docs/public_models/HAILO15M/HAILO15M_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Image Denoising Hailo15M -==================================================== +Hailo15M Image Denoising +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst index 1092e7ae..7365adea 100644 --- a/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Instance Segmentation Hailo15M -========================================================== +Hailo15M Instance Segmentation +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst index 72bc8490..fa7fb770 100644 --- a/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst +++ b/docs/public_models/HAILO15M/HAILO15M_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Low Light Enhancement Hailo15M -========================================================== +Hailo15M Low Light Enhancement +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index 412a1f98..dda1d8b2 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Object Detection Hailo15M -===================================================== +Hailo15M Object Detection +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst index 19c408ad..8d20c53e 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Attribute Hailo15M -===================================================== +Hailo15M Person Attribute +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst index 8ed1221c..9d8c5371 100644 --- a/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst +++ b/docs/public_models/HAILO15M/HAILO15M_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Re-ID Hailo15M -================================================= +Hailo15M Person Re-ID +===================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst index 0d816e75..1c948dca 100644 --- a/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Pose Estimation Hailo15M -==================================================== +Hailo15M Pose Estimation +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst index daf31436..55b60a81 100644 --- a/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Semantic Segmentation Hailo15M -========================================================== +Hailo15M Semantic Segmentation +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst index d684d295..0d4ec95c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Single Person Pose Estimation Hailo15M -================================================================== +Hailo15M Single Person Pose Estimation +====================================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst index 7b6d8051..2de46b6c 100644 --- a/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO15M/HAILO15M_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Stereo Depth Estimation Hailo15M -============================================================ +Hailo15M Stereo Depth Estimation +================================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst index 4999a7eb..6f34935a 100644 --- a/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst +++ b/docs/public_models/HAILO15M/HAILO15M_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Super Resolution Hailo15M -===================================================== +Hailo15M Super Resolution +========================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst index dbc14fe7..01e89a6b 100644 --- a/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst +++ b/docs/public_models/HAILO15M/HAILO15M_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Zero-shot Classification Hailo15M -============================================================= +Hailo15M Zero-shot Classification +================================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_classification.rst b/docs/public_models/HAILO8/HAILO8_classification.rst index 45c8e462..1f5cf91c 100644 --- a/docs/public_models/HAILO8/HAILO8_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Classification Hailo8 -================================================= +Hailo8 Classification +===================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst index cfd3c9bf..df0d7711 100644 --- a/docs/public_models/HAILO8/HAILO8_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Depth Estimation Hailo8 -=================================================== +Hailo8 Depth Estimation +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_face_attribute.rst b/docs/public_models/HAILO8/HAILO8_face_attribute.rst index 0d0faa26..cc8254d4 100644 --- a/docs/public_models/HAILO8/HAILO8_face_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Attribute Hailo8 -================================================= +Hailo8 Face Attribute +===================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_face_detection.rst b/docs/public_models/HAILO8/HAILO8_face_detection.rst index 4a83b89f..801f9a20 100644 --- a/docs/public_models/HAILO8/HAILO8_face_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Detection Hailo8 -================================================= +Hailo8 Face Detection +===================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_face_recognition.rst b/docs/public_models/HAILO8/HAILO8_face_recognition.rst index 863c2120..88f47954 100644 --- a/docs/public_models/HAILO8/HAILO8_face_recognition.rst +++ b/docs/public_models/HAILO8/HAILO8_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Recognition Hailo8 -=================================================== +Hailo8 Face Recognition +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst index 770f19e5..47faf227 100644 --- a/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Facial Landmark Detection Hailo8 -============================================================ +Hailo8 Facial Landmark Detection +================================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst index 2103980a..2444f7d8 100644 --- a/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Hand Landmark detection Hailo8 -========================================================== +Hailo8 Hand Landmark detection +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_image_denoising.rst b/docs/public_models/HAILO8/HAILO8_image_denoising.rst index bc9cf561..71ae3b3b 100644 --- a/docs/public_models/HAILO8/HAILO8_image_denoising.rst +++ b/docs/public_models/HAILO8/HAILO8_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Image Denoising Hailo8 -================================================== +Hailo8 Image Denoising +====================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst index 32f141cb..a3b8e927 100644 --- a/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Instance Segmentation Hailo8 -======================================================== +Hailo8 Instance Segmentation +============================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst index a54dd2bc..885e4aea 100644 --- a/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst +++ b/docs/public_models/HAILO8/HAILO8_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Low Light Enhancement Hailo8 -======================================================== +Hailo8 Low Light Enhancement +============================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_object_detection.rst b/docs/public_models/HAILO8/HAILO8_object_detection.rst index 9b21e866..38215eeb 100644 --- a/docs/public_models/HAILO8/HAILO8_object_detection.rst +++ b/docs/public_models/HAILO8/HAILO8_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Object Detection Hailo8 -=================================================== +Hailo8 Object Detection +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_person_attribute.rst b/docs/public_models/HAILO8/HAILO8_person_attribute.rst index ec0accb5..045d299b 100644 --- a/docs/public_models/HAILO8/HAILO8_person_attribute.rst +++ b/docs/public_models/HAILO8/HAILO8_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Attribute Hailo8 -=================================================== +Hailo8 Person Attribute +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_person_re_id.rst b/docs/public_models/HAILO8/HAILO8_person_re_id.rst index 014ff88c..acaf25f6 100644 --- a/docs/public_models/HAILO8/HAILO8_person_re_id.rst +++ b/docs/public_models/HAILO8/HAILO8_person_re_id.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Re-ID Hailo8 -=============================================== +Hailo8 Person Re-ID +=================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst index 172a0f9b..465a4470 100644 --- a/docs/public_models/HAILO8/HAILO8_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Pose Estimation Hailo8 -================================================== +Hailo8 Pose Estimation +====================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst index f8006b57..41d93ced 100644 --- a/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst +++ b/docs/public_models/HAILO8/HAILO8_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Semantic Segmentation Hailo8 -======================================================== +Hailo8 Semantic Segmentation +============================ .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst index b3d5efa5..e42265cb 100644 --- a/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Single Person Pose Estimation Hailo8 -================================================================ +Hailo8 Single Person Pose Estimation +==================================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst index f7b7a914..43dba962 100644 --- a/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8/HAILO8_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Stereo Depth Estimation Hailo8 -========================================================== +Hailo8 Stereo Depth Estimation +============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_super_resolution.rst b/docs/public_models/HAILO8/HAILO8_super_resolution.rst index 7816ad9b..d3c99a00 100644 --- a/docs/public_models/HAILO8/HAILO8_super_resolution.rst +++ b/docs/public_models/HAILO8/HAILO8_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Super Resolution Hailo8 -=================================================== +Hailo8 Super Resolution +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst index ca6c04fe..da3f2a36 100644 --- a/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst +++ b/docs/public_models/HAILO8/HAILO8_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Zero-shot Classification Hailo8 -=========================================================== +Hailo8 Zero-shot Classification +=============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_classification.rst b/docs/public_models/HAILO8L/HAILO8l_classification.rst index 0d3a120d..9e0a5449 100644 --- a/docs/public_models/HAILO8L/HAILO8l_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Classification Hailo8L -================================================== +Hailo8L Classification +====================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst index 8982043f..6cea5891 100644 --- a/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Depth Estimation Hailo8L -==================================================== +Hailo8L Depth Estimation +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst index e69b3519..0f60c239 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Attribute Hailo8L -================================================== +Hailo8L Face Attribute +====================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst index 1208ca5d..4db14eb7 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Detection Hailo8L -================================================== +Hailo8L Face Detection +====================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst index 0d328fe4..12bece66 100644 --- a/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst +++ b/docs/public_models/HAILO8L/HAILO8l_face_recognition.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Face Recognition Hailo8L -==================================================== +Hailo8L Face Recognition +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst index 236438f3..c0fcded8 100644 --- a/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_facial_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Facial Landmark Detection Hailo8L -============================================================= +Hailo8L Facial Landmark Detection +================================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst index 30c9ecd0..fb2e5bfd 100644 --- a/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_hand_landmark_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Hand Landmark detection Hailo8L -=========================================================== +Hailo8L Hand Landmark detection +=============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst index 8e6abf00..06b1cf1b 100644 --- a/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst +++ b/docs/public_models/HAILO8L/HAILO8l_image_denoising.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Image Denoising Hailo8L -=================================================== +Hailo8L Image Denoising +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst index 543447f7..7cd5e760 100644 --- a/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_instance_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Instance Segmentation Hailo8L -========================================================= +Hailo8L Instance Segmentation +============================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst index 1e2ccd44..63e0dbaf 100644 --- a/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst +++ b/docs/public_models/HAILO8L/HAILO8l_low_light_enhancement.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Low Light Enhancement Hailo8L -========================================================= +Hailo8L Low Light Enhancement +============================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst index c64592d8..4883c74f 100644 --- a/docs/public_models/HAILO8L/HAILO8l_object_detection.rst +++ b/docs/public_models/HAILO8L/HAILO8l_object_detection.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Object Detection Hailo8L -==================================================== +Hailo8L Object Detection +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst index 73bbbd31..e588eb76 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_attribute.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Person Attribute Hailo8L -==================================================== +Hailo8L Person Attribute +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst index 9be02e83..6e50d957 100644 --- a/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst +++ b/docs/public_models/HAILO8L/HAILO8l_person_re_id.rst @@ -1,6 +1,5 @@ - -Public Pre-Trained Models - Person Re-ID Hailo8L -================================================ + Hailo8L Person Re-ID +===================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst index 3ab127d4..081d4d8e 100644 --- a/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Pose Estimation Hailo8L -=================================================== +Hailo8L Pose Estimation +======================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst index 6c56192f..0a2acf92 100644 --- a/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_semantic_segmentation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Semantic Segmentation Hailo8L -========================================================= +Hailo8L Semantic Segmentation +============================= .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst index a584e327..910317a6 100644 --- a/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_single_person_pose_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Single Person Pose Estimation Hailo8L -================================================================= +Hailo8L Single Person Pose Estimation +===================================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst index 0ac94f1c..9d25487e 100644 --- a/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst +++ b/docs/public_models/HAILO8L/HAILO8l_stereo_depth_estimation.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Stereo Depth Estimation Hailo8L -=========================================================== +Hailo8L Stereo Depth Estimation +=============================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst index b4845581..c574584a 100644 --- a/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst +++ b/docs/public_models/HAILO8L/HAILO8l_super_resolution.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Super Resolution Hailo8L -==================================================== +Hailo8L Super Resolution +======================== .. |rocket| image:: ../../images/rocket.png :width: 18 diff --git a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst index 2ca2b089..517786a0 100644 --- a/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst +++ b/docs/public_models/HAILO8L/HAILO8l_zero_shot_classification.rst @@ -1,6 +1,6 @@ -Public Pre-Trained Models - Zero-shot Classification Hailo8L -============================================================ +Hailo8L Zero-shot Classification +================================ .. |rocket| image:: ../../images/rocket.png :width: 18 From 5caa3f5fb18e18e5fa2b908235eb57efa557bdc0 Mon Sep 17 00:00:00 2001 From: HailoModelZoo Date: Mon, 8 Apr 2024 17:00:30 +0300 Subject: [PATCH 17/17] fix --- docs/public_models/HAILO15H/HAILO15H_object_detection.rst | 2 +- docs/public_models/HAILO15M/HAILO15M_object_detection.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst index 59c7cc1a..a90a1185 100644 --- a/docs/public_models/HAILO15H/HAILO15H_object_detection.rst +++ b/docs/public_models/HAILO15H/HAILO15H_object_detection.rst @@ -339,7 +339,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5s |star| - 35.33 - 33.98 diff --git a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst index dda1d8b2..15de33dd 100644 --- a/docs/public_models/HAILO15M/HAILO15M_object_detection.rst +++ b/docs/public_models/HAILO15M/HAILO15M_object_detection.rst @@ -339,7 +339,7 @@ COCO - `download `_ - `link `_ - `download `_ - - `download `_ + - None * - yolov5s |star| - 35.33 - 33.98