From 32514d14526331a62f711cfc672d14b25d37841d Mon Sep 17 00:00:00 2001 From: HonzaCuhel Date: Fri, 31 Jan 2025 13:49:30 +0100 Subject: [PATCH 1/5] Add SAM2.1 annotator --- README.md | 3 +- datadreamer/dataset_annotation/__init__.py | 2 + .../dataset_annotation/sam2_annotator.py | 136 ++++++++++++++++++ .../generate_dataset_from_scratch.py | 12 +- datadreamer/utils/config.py | 4 +- .../generate_dataset_and_train_yolo.ipynb | 7 +- ..._segmentation_dataset_and_train_yolo.ipynb | 4 +- examples/helmet_detection.ipynb | 7 +- ..._by_step_dataset_generation_pipeline.ipynb | 7 + requirements.txt | 8 +- tests/core_tests/unittests/test_annotators.py | 55 +++++++ 11 files changed, 223 insertions(+), 22 deletions(-) create mode 100644 datadreamer/dataset_annotation/sam2_annotator.py diff --git a/README.md b/README.md index 8d88ebe..0a8cfec 100644 --- a/README.md +++ b/README.md @@ -181,7 +181,7 @@ datadreamer --config - `--num_objects_range`: Range of objects in a prompt. Default is 1 to 3. - `--prompt_generator`: Choose between `simple`, `lm` (Mistral-7B), `tiny` (tiny LM), and `qwen2` (Qwen2.5 LM). Default is `qwen2`. - `--image_generator`: Choose image generator, e.g., `sdxl`, `sdxl-turbo`, `sdxl-lightning` or `shuttle-3`. Default is `sdxl-turbo`. -- `--image_annotator`: Specify the image annotator, like `owlv2` for object detection or `aimv2` or `clip` for image classification or `owlv2-slimsam` for instance segmentation. Default is `owlv2`. +- `--image_annotator`: Specify the image annotator, like `owlv2` for object detection or `aimv2` or `clip` for image classification or `owlv2-slimsam` and `owlv2-sam2` for instance segmentation. Default is `owlv2`. - `--conf_threshold`: Confidence threshold for annotation. Default is `0.15`. - `--annotation_iou_threshold`: Intersection over Union (IoU) threshold for annotation. Default is `0.2`. - `--prompt_prefix`: Prefix to add to every image generation prompt. Default is `""`. @@ -221,6 +221,7 @@ datadreamer --config | | [CLIP](https://huggingface.co/openai/clip-vit-base-patch32) | Zero-shot-image-classification | | | [AIMv2](https://huggingface.co/apple/aimv2-large-patch14-224-lit) | Zero-shot-image-classification | | | [SlimSAM](https://huggingface.co/Zigeng/SlimSAM-uniform-50) | Zero-shot-instance-segmentation | +| | [SAM2.1](https://huggingface.co/facebook/sam2-hiera-tiny) | Zero-shot-instance-segmentation | diff --git a/datadreamer/dataset_annotation/__init__.py b/datadreamer/dataset_annotation/__init__.py index 3fe9f5d..5fcf62c 100644 --- a/datadreamer/dataset_annotation/__init__.py +++ b/datadreamer/dataset_annotation/__init__.py @@ -5,6 +5,7 @@ from .cls_annotator import ImgClassificationAnnotator from .image_annotator import BaseAnnotator, TaskList from .owlv2_annotator import OWLv2Annotator +from .sam2_annotator import SAM2Annotator from .slimsam_annotator import SlimSAMAnnotator __all__ = [ @@ -14,5 +15,6 @@ "OWLv2Annotator", "ImgClassificationAnnotator", "CLIPAnnotator", + "SAM2Annotator", "SlimSAMAnnotator", ] diff --git a/datadreamer/dataset_annotation/sam2_annotator.py b/datadreamer/dataset_annotation/sam2_annotator.py new file mode 100644 index 0000000..2c50805 --- /dev/null +++ b/datadreamer/dataset_annotation/sam2_annotator.py @@ -0,0 +1,136 @@ +from __future__ import annotations + +import logging +from typing import List + +import numpy as np +import PIL +import torch +from sam2.sam2_image_predictor import SAM2ImagePredictor + +from datadreamer.dataset_annotation.image_annotator import BaseAnnotator +from datadreamer.dataset_annotation.utils import mask_to_polygon + +logger = logging.getLogger(__name__) + + +class SAM2Annotator(BaseAnnotator): + """A class for image annotation using the SAM2.1 model, specializing in instance + segmentation. + + Attributes: + model (SAM2ImagePredictor): The SAM2.1 model for instance segmentation. + device (str): The device on which the model will run ('cuda' for GPU, 'cpu' for CPU). + size (str): The size of the SAM model to use ('base' or 'large'). + + Methods: + _init_model(): Initializes the SAM2.1 model. + annotate_batch(image, prompts, conf_threshold, use_tta, synonym_dict): Annotates the given image with bounding boxes and labels. + release(empty_cuda_cache): Releases resources and optionally empties the CUDA cache. + """ + + def __init__( + self, + seed: float = 42, + device: str = "cuda", + size: str = "base", + ) -> None: + """Initializes the SAMAnnotator with a specific seed and device. + + Args: + seed (float): Seed for reproducibility. Defaults to 42. + device (str): The device to run the model on. Defaults to 'cuda'. + """ + super().__init__(seed) + self.size = size + self.device = device + self.model = self._init_model(device=device) + self.dtype = torch.bfloat16 if self.device == "cuda" else torch.float16 + + def _init_model(self, device: str) -> SAM2ImagePredictor: + """Initializes the SAM2.1 model for object detection. + + Returns: + SAM2ImagePredictor: The initialized SAM2.1 model. + """ + logger.info(f"Initializing SAM2.1 {self.size} model...") + if self.size == "large": + return SAM2ImagePredictor.from_pretrained( + "facebook/sam2.1-hiera-base-plus", device=device + ) + return SAM2ImagePredictor.from_pretrained( + "facebook/sam2-hiera-tiny", device=device + ) + + def annotate_batch( + self, + images: List[PIL.Image.Image], + boxes_batch: List[np.ndarray], + iou_threshold: float = 0.2, + ) -> List[List[List[float]]]: + """Annotates images for the task of instance segmentation using the SAM2.1 + model. + + Args: + images: The images to be annotated. + boxes_batch: The bounding boxes of found objects. + iou_threshold (float, optional): Intersection over union threshold for non-maximum suppression. Defaults to 0.2. + + Returns: + List: A list containing the final segment masks represented as a polygon. + """ + final_segments = [] + + image_batch = [np.array(img.convert("RGB")) for img in images] + bboxes_batch = [None if len(boxes) == 0 else boxes for boxes in boxes_batch] + + with torch.inference_mode(), torch.autocast(self.device, dtype=self.dtype): + self.model.set_image_batch(image_batch) + masks_batch, scores_batch, _ = self.model.predict_batch( + box_batch=bboxes_batch, + multimask_output=False, + ) + + n = len(images) + + for i in range(n): + boxes = boxes_batch[i].tolist() + if boxes is None: + final_segments.append([]) + continue + + image_masks = [] + for j in range(len(boxes)): + mask, score = masks_batch[i][j], scores_batch[i][j] + if score < iou_threshold: + image_masks.append([]) + continue + mask = mask.astype(np.uint8) + polygon = mask_to_polygon(mask) + image_masks.append(polygon if len(polygon) != 0 else []) + + final_segments.append(image_masks) + + return final_segments + + def release(self, empty_cuda_cache: bool = False) -> None: + """Releases the model and optionally empties the CUDA cache. + + Args: + empty_cuda_cache (bool, optional): Whether to empty the CUDA cache. Defaults to False. + """ + if empty_cuda_cache: + with torch.no_grad(): + torch.cuda.empty_cache() + + +if __name__ == "__main__": + import requests + from PIL import Image + + url = "https://ultralytics.com/images/bus.jpg" + im = Image.open(requests.get(url, stream=True).raw) + annotator = SAM2Annotator(device="cpu", size="base") + final_segments = annotator.annotate_batch([im], [np.array([[3, 229, 559, 650]])]) + print(len(final_segments), len(final_segments[0])) + print(final_segments[0][0][:5]) diff --git a/datadreamer/pipelines/generate_dataset_from_scratch.py b/datadreamer/pipelines/generate_dataset_from_scratch.py index 56ce04d..ef0fe55 100644 --- a/datadreamer/pipelines/generate_dataset_from_scratch.py +++ b/datadreamer/pipelines/generate_dataset_from_scratch.py @@ -20,6 +20,7 @@ AIMv2Annotator, CLIPAnnotator, OWLv2Annotator, + SAM2Annotator, SlimSAMAnnotator, ) from datadreamer.image_generation import ( @@ -61,8 +62,8 @@ det_annotators = {"owlv2": OWLv2Annotator} clf_annotators = {"clip": CLIPAnnotator, "aimv2": AIMv2Annotator} -inst_seg_annotators = {"owlv2-slimsam": SlimSAMAnnotator} -inst_seg_detectors = {"owlv2-slimsam": OWLv2Annotator} +inst_seg_annotators = {"owlv2-slimsam": SlimSAMAnnotator, "owlv2-sam2": SAM2Annotator} +inst_seg_detectors = {"owlv2-slimsam": OWLv2Annotator, "owlv2-sam2": OWLv2Annotator} setup_logging(use_rich=True) @@ -125,7 +126,7 @@ def parse_args(): parser.add_argument( "--image_annotator", type=str, - choices=["owlv2", "clip", "owlv2-slimsam", "aimv2"], + choices=["owlv2", "clip", "owlv2-slimsam", "aimv2", "owlv2-sam2"], help="Image annotator to use", ) @@ -668,9 +669,10 @@ def read_image_batch(image_batch, batch_num, batch_size): if args.task == "instance-segmentation": if k < len(masks_batch[j]): mask = masks_batch[j][k] - x_points, y_points = zip(*mask) + if len(mask) > 0: + x_points, y_points = zip(*mask) - ax.fill(x_points, y_points, label, alpha=0.5) + ax.fill(x_points, y_points, label, alpha=0.5) labels.append(label) x1, y1, x2, y2 = box diff --git a/datadreamer/utils/config.py b/datadreamer/utils/config.py index 1c90b21..2b73645 100644 --- a/datadreamer/utils/config.py +++ b/datadreamer/utils/config.py @@ -41,7 +41,9 @@ class Config(LuxonisConfig): # Profanity filter arguments disable_lm_filter: bool = False # Annotation arguments - image_annotator: Literal["owlv2", "aimv2", "clip", "owlv2-slimsam"] = "owlv2" + image_annotator: Literal[ + "owlv2", "aimv2", "clip", "owlv2-slimsam", "owlv2-sam2" + ] = "owlv2" conf_threshold: float = 0.15 annotation_iou_threshold: float = 0.2 use_tta: bool = False diff --git a/examples/generate_dataset_and_train_yolo.ipynb b/examples/generate_dataset_and_train_yolo.ipynb index ecfa5d7..a148fb6 100644 --- a/examples/generate_dataset_and_train_yolo.ipynb +++ b/examples/generate_dataset_and_train_yolo.ipynb @@ -5,7 +5,7 @@ "id": "11adc87f", "metadata": {}, "source": [ - "\n", + "\n", "\n", "# DataDreamer Tutorial: Generating a dataset for object detection, training a model, and deploying it to the OAK (optional)" ] @@ -85,7 +85,7 @@ "- `--num_objects_range`: Range of objects in a prompt. Default is 1 to 3.\n", "- `--prompt_generator`: Choose between `simple`, `lm` (Mistral-7B), `tiny` (tiny LM), and `qwen2` (Qwen2.5 LM). Default is `qwen2`.\n", "- `--image_generator`: Choose image generator, e.g., `sdxl`, `sdxl-turbo`, `sdxl-lightning` or `shuttle-3`. Default is `sdxl-turbo`.\n", - "- `--image_annotator`: Specify the image annotator, like `owlv2` for object detection or `aimv2` or `clip` for image classification or `owlv2-slimsam` for instance segmentation. Default is `owlv2`.\n", + "- `--image_annotator`: Specify the image annotator, like `owlv2` for object detection or `aimv2` or `clip` for image classification or `owlv2-slimsam` and `owlv2-sam2` for instance segmentation. Default is `owlv2`.\n", "- `--conf_threshold`: Confidence threshold for annotation. Default is `0.15`.\n", "- `--annotation_iou_threshold`: Intersection over Union (IoU) threshold for annotation. Default is `0.2`.\n", "- `--prompt_prefix`: Prefix to add to every image generation prompt. Default is `\"\"`.\n", @@ -104,8 +104,7 @@ "- `--batch_size_image`: Batch size for image generation. Default is `1`.\n", "- `--device`: Choose between `cuda` and `cpu`. Default is `cuda`.\n", "- `--seed`: Set a random seed for image and prompt generation. Default is `42`.\n", - "- `--config`: A path to an optional `.yaml` config file specifying the pipeline's arguments.\n", - "" + "- `--config`: A path to an optional `.yaml` config file specifying the pipeline's arguments.\n" ] }, { diff --git a/examples/generate_instance_segmentation_dataset_and_train_yolo.ipynb b/examples/generate_instance_segmentation_dataset_and_train_yolo.ipynb index 97a1b3d..8b1b7c2 100644 --- a/examples/generate_instance_segmentation_dataset_and_train_yolo.ipynb +++ b/examples/generate_instance_segmentation_dataset_and_train_yolo.ipynb @@ -7,7 +7,7 @@ "id": "8ce1517f-7258-406d-9139-9adadb1a1570" }, "source": [ - "\n", + "\n", "\n", "# DataDreamer Tutorial: Generating a dataset for instance segmentation, training a model, and deploying it to the OAK (optional)" ] @@ -99,7 +99,7 @@ "- `--num_objects_range`: Range of objects in a prompt. Default is 1 to 3.\n", "- `--prompt_generator`: Choose between `simple`, `lm` (Mistral-7B), `tiny` (tiny LM), and `qwen2` (Qwen2.5 LM). Default is `qwen2`.\n", "- `--image_generator`: Choose image generator, e.g., `sdxl`, `sdxl-turbo`, `sdxl-lightning` or `shuttle-3`. Default is `sdxl-turbo`.\n", - "- `--image_annotator`: Specify the image annotator, like `owlv2` for object detection or `aimv2` or `clip` for image classification or `owlv2-slimsam` for instance segmentation. Default is `owlv2`.\n", + "- `--image_annotator`: Specify the image annotator, like `owlv2` for object detection or `aimv2` or `clip` for image classification or `owlv2-slimsam` and `owlv2-sam2` for instance segmentation. Default is `owlv2`.\n", "- `--conf_threshold`: Confidence threshold for annotation. Default is `0.15`.\n", "- `--annotation_iou_threshold`: Intersection over Union (IoU) threshold for annotation. Default is `0.2`.\n", "- `--prompt_prefix`: Prefix to add to every image generation prompt. Default is `\"\"`.\n", diff --git a/examples/helmet_detection.ipynb b/examples/helmet_detection.ipynb index 9de96b0..4da0ec0 100644 --- a/examples/helmet_detection.ipynb +++ b/examples/helmet_detection.ipynb @@ -1,15 +1,10 @@ { "cells": [ { - "attachments": { - "image.png": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk8AAABxCAYAAADMKEeaAAAgAElEQVR4Ae1dCdwcRZV/kRUQQcRbYDVJVychrCeu94GKByKH6Ex1AgirbjwX8cL13OB9oIIHgoIH7ipeiKKugCyIiCCKIAQQRI4kX1d/IeEQCBCSb+f/ZZpvMl/f/aqmZ+bV7ze/nunprnr1r+qqf7969R7RGKZle0w9cIkKdw+U2U8rc4T2zRe1b36hlfmj9qKrtTI3aBWt08rco5XZMP3dM9drZS7XylzYVuZMrcyJbT88Uiuzf2vh6oXIcwyhlCoLAoKAICAICAKCwCgi0FLhI0GU2ir6lPbC87Qyd2llppg/92plVmgVHheo8DVLFkw8YhSxlDoJAoKAICAICAKCwIgi0PImVVczdKFWZhMzUSpCvDZqZS5tK/P5wIueTTQ1Z0ShlmoJAoKAICAICAKCwLAi0Jo7+ZhAmfdr31w2ALKUR6iu1Sr6cGuhmTes+IrcgoAgIAgIAoKAIDAiCLS96DmBZ77btVHKIzGD/n8Tlg7bKmwvp6kHjEgTSDUEAUFAEBAEBAFBoPkITM2BbRGWxhqoZSpG0Lzo6rYXHirG5s3vbSKhICAICAKCgCAw1Ai0VfSy6Z1x/EbfxUgPd7meub7tR28SEjXU3VKEFwQEAUFAEBAEmoeAVqufGihz7tBqmvJJ14pAhXs2D3mRSBAQBAQBQUAQEASGCoHWrisfFKhwuVYG7gAGox1yW+7pB6lVuw5VI4mwgoAgIAgIAoKAINAMBNp+tJdW5roxIU29xPAWLOWJi4Nm9EORQhAQBAQBQUAQaDwCe6trt2mr6MsD8tHUS2IG+r3tm1MPmHvLQxvfYPUF3IqI5jJ+xK9W/TaRHAQBQWB8EdiJiHYhoq3HF4IhqzmWrAJlLhhDbVMaUbuxpaJnDVkzlhUXy5RTjJ8dygog1wsCgoAgMMYIbEtEbyKiM4jorr6x+BoiOpaIdh9jfJpd9SXe5MsDZW4W4jTLtgtx9o5oduvVkk7IUy345GZBQBAQBCojcAARreojTEkvsxuJ6CQienDlkuRGfgQC3xyulUFYkzQNjJz3zQktmsIS16glIU+j1qJSH0FAEBgGBD5ARJsKEKdeMnUZET1qGCo34jJOO7zEbjohR8UwOA07EEesUwh5GrEGleoIAoJA4xF4fUnS1EugzieibRpfw1EVEFoU7ZsThDiVI47wd9Wav27HEeoXQp5GqDGlKoKAINB4BHYmojtqkCcQqXc1vpajKCCIU1tFPxTiVI44zeAVXvKqRasePiJ9Q8jTiDSkVEMQEASGAgEYgPdqkqp8X0tEMDSX5A6BqTlama/PEIGqBGK87wuUueiQJ5pRMN4T8uTu4ZOSBAFBYLwRQFD6CQbyBMK193hD6bj2bT/8rBAnHuLX9szP99xz6p8cNyF3cUKeuBGV/AQBQUAQSEZgERNxAnn6ZHIRcpYdgbYy7xPixEOcYhwDP/rWkHsjF/LE/qRJhoKAICAIJCLwIkbydHJiCXKSF4FAmf3G3Wt4THi4j21lPs7bWk5zE/LkFG4pTBAQBMYYASy1VbFxSrrn+2OMo5uqL50/6XccPd7KTRqY81ullbkCtkTaM2fhM+3t3DeXaWVu1MpsYC6P0z3DpsAP93HTmuylCHlih1QyFAQEAUEgEYGnMJKnYxJLkJM8CMCoWStzeYOIxyatzF+0Z47Rngm0Wv3UIobXrcUrtm6pcLH2owMDz3xUK/O7JhEqeGdHeBueVnOai5Anp3BLYYKAIDDGCGCT0XomAgVfUZJsIRAo850GEKeN2je/1l702pYKH8lV19biye03L0dGP9DKrB90PQM//O0QGpALeeLqkJKPICAICAL5CJzOQJ4QruUx+UXJFZUQaHvRAQMmFGu0ij4ceKv/uVIFSty09HE37qRV9GatzN8GWedAmU+UELsJlwp5akIriAyCgCAwLgg8n4E8fXtcwHJeT5CJtmcmBkEkNpcbvbPIchw3MND8tFV0sFZmxSDqrpW5r+VN7MFdL4v5CXmyCK5kLQgIAoJAAgKn1CBQ64jo8Ql5yikOBLQXfXMA5GFD2zPH7rdwzQ4cdaiTx3KaeoD2zDKtzG0DwOFilF9Hfof3CnlyCLYUJQgIAoIAEW3XcXJ5UQUCdQ8R7SUIWkKg7Ud7DcAtwYWt+eYJlqpUOdtg4ZqdtQp/5JxAeWZZZaHd3ijkyS3eUpogIAgIAkAAxuP/U4JArSKiZwl0lhCY1rio8BKHZGETtE3YDWepSizZwlhdK3OnQ1zWchrHs4CQnImQp2Rc5KwgIAgIAi4QeCERnUlEG1KI1E2d/z5ARNu7EGZsy+ja+3D6MMrK61btm1cOC9gtf/LJWpnrnBEozxw/BNgIeRqCRhIRBQFBYOQR2JGIXk5EhxHR24joNUT0LyNf6yZUENofh+TABGoSDr+GKrXmTj5Gu9PM3bt03kTTDfuEPA1VDxZhBQFBoAQCc4hoJyLahYjmdz/4DZsjSYLAZgQ6XsSPcKRVuU6ryBtW3Fvz1+0YKHOuC6zaKvpyw3ES8tTwBhLxxgKBxxLRgUT0n52J/utEdDYRXUNEhohu7VnOgcEwdlvdQESXEtGPO0TgM0T0RiJ6JhE9cCzQSq4kXlQPIaLjiOjXXYzgDykptAnOTXY0PRd2CBVixL2FiJ5IRMOy0ScZAd6z23aWE59NRLDf/QIR/axr5L6y2wfjZUZgHPfJK7qYfq+71Ig+7RPRVryiMebW3aK/0gEhWB0sCucyij6QrPbdeWK7rpfyrGVJjv/Ww2h9IJUsVui4kydMWu9l+ryuGOS1r1rIJC/qjcGtP2GpgAuTOJ92fyGOfsNtSCwDx/EgJrmxI3kpEcFfz3UZE3zaxJ92/k4iOoeIjiKiJzHJWjabQ5kwL+IEEn0VsUX/xoThBBF9sUsayta7yPULmLBBX24VKbDkNYuJ6INEdF6HyN/NhCn66i1EBPcMS7qawJJiWbw88Ix2QJxuhd2QxWo4zfpVi1Y9XCtzlXXcvAhvhk1N406ens44QFzmqJFhC5E2eZY9f1qCzNBeXMBYBmTC2+nLEsqyeephRHQjYz3wpv3cGgL/ExG9uqstuotRrqw2v6pLpFy+8P6RqW5p/vKgHTrAQh/tx/FiItLM2ii8rPSXU/X3L2r0xd5bYYgOzdvljLJl1QnP0a+IaP9GaKQcaFE2BCrcsxfxUfgOLRri0lkmUP/AUmFD8RLyxDeYjQp5Qld9HBGtZR5MsUwC2xMXCbYuP2WW/30VBYeW6e1EdD2zPFkTVP9/IK8IC4JlGNvJJnmCj6MVjnG8pCZp7sW7SeQJdl/YxQetUH9/cfX7r0QUdAg+nlf3CR6tLU/+U21lqg4c7gEpWWJ7fvgK236xAmWaGsRRyBPfwDFK5AlP0b5EtIl5YP0NEUEDYzsdySz3/1V4S4YG710DnpySJsEziGh3iw1ggzwhJur3mds0CZu0c3gOvkJEsAOqk5pCnvYjotUDxLMf598SEZYM3SatzNctk6czhshjdiXwtQqPtothhEmjiUnIk5CnrH55jIUB9qNZBTL8h6VYGFb3D9BVf8Nou4j9Ta/oe3feqK9mlKGq7Gn3YenkSx37Hixtcidu8oRYcHAQmVYXl+dhpI9de1XToMkTyN+JDcGyv93WdzZIvKkqsKXvW7bH1AMtLzvd1nCD59KYJd3QdfNwpUUCtamhhvZCnvgG5VHTPOFRGTb7p4cyL49huatMOIwHdQxij23o5NQ/WeE3iCF8C3EmTvKETRjxrq4k+QdxDkbl2JlXJQ2SPD2KiH43BH3zqxW0vOXbQnvh3hYnfOw2O6K8VMN5R9ufeL7V5TvPYH25aUnIk5CnvD4J+6ebmQddG/ZPsJs4lVnO5Xng9Pz/1IZrm9KIBgjipxiXU7nIE3Yici8bp2FQ9jzshKoQqEGRJ2gYXRmEl8Uy6XqErbHrNiJQ5iSL5OlSuEDoGRxG/qtW5n+s4elFUOM3LQl5EvJUpE8ikgD3RMZt/wSj7KSBuOq5c0u8AcOGBG4CqpbVhPvOIqKHFOkMOddwkacmYJIlA2yG8GJRJg2CPG3T9b2UVZcm/geTATsJS3ZambXWJvshCr3ChXBroZmnldlgC1PkzyUrUz5CnvgmvFFctuvtZnCUxz3Ictk/YVs7p18aaMaK+mdr4tJS1Xb6S8cmBmNCnTQu5AkYw51BGcekgyBPsG2r2h8GfR+XX7Ut+3NLRc+yNclrZS4lmhrM9sEtq+n8l1bhybZwbSvjypFiUdyEPPENLKNOnjBJcNtMcPh/gp8abHnmGuihYcNOwyIJZg1c5TYln2srGMj3YjVO5Alt9rHeyud8d02eXmRBY+yyn2J59NE5mJb/u+2HR9qa5OF0s7xEo3GH9id208pstITtfzcMJSFPfJPfqJMndN1/bqD9E+wjOAf0TxZ8Rg/uOv/kLLspeUEDhRhwVdK4kaf7Snhzd0meYDP0Z+ZnYxD985tVOmHmPVqZ021M8G1lonGzdeoHWnvmLCvYegY7NZqUhDzxTbzjQJ7Qd5tk//Rm5snhooLLMPs0cBcY98SGEB1bVxisxo08AXf4ASuSXJInzrK4+1aZ/LDbso57iP52mZpjz94p/Fx/aeP2W3vRa22QJ+TZWrgascmakoQ8CXmq0hc/z0xaMJiWtX9CXDPOMCcIblokjIkN7VuZycTltXC7UDaNI3lCmxRxacFJaPLCsyA4ssu+YrMsjDebk1Zhq9bHC99mbXIfUPy61q4rH9Sab54A9wttZfYN5k887YC5t8Bvi/PUWjy5vVbmDisYe9FrnVcovUAhT3wDzLhontCbBm3/9GAiupJxcoCdE+Kl5SUb9bY56XDkjXh8ZdK4kqc8MgMMXZEnOHXl3h2LvgQDebjvQD2eQUR4gcFmDfgKe3c3BBCng9q4/2LFZrPrAiuTsjLwy1T3g+jUTtMSb/Llbd+cqpW5M0H+DW1lzg788LAWTW3lUjDthT9OkKcuvgh1gyjgTUlCnoQ8Ve2LNjQwRf0/wQdQPLByHGfebLPRwLPLUd4w5XFriZ2HQG9cyRM2P/jZ3ccZeTqUuZ/CR1TRuIiP6Li8wPN0L4MMIE3wQTazWmNjUmbJ0zcn5DQ+29/wuA1iVELuK7Qyz2QTICejQIVvLSFbCVIV/iinaJd/C3nimwzHSfMU91HY/nC/4eb5f8KOVU7ygbfpIrY9mBg53SFw1sF2Xt+NG7zAcZDkCVoPhHKJBmTM/6EcfFxpnr7O+IxgXKsS2P45RBRWkAOk67TujtfZfibtTMq1tU5T2jOIcGw9BV70bBimV8Dh7kBFh1gXkIi6u+5KkKLC+IPFNyUJeeKbiMeRPKEfH11hgMyb7NPsnxYQ0e2M5SEv5FkknclYblb9r+9sz0aICuzme1onIC2C4sa+hBD+5bFE9NwOQYCxPHYarnUkF7a9F0kuyRMcD3+2a2+UFKcP29yxHAtfRysd4HRJDkCuyBOnSxGQoKoJy3pwN5DV3+P/ruo4mn1PrmuCCqTBxiQ+K8/Wgpt3qYpS0ftgMK2VuaUGBhthE1W0vDrXaWVMDTln4dvNa32DAi0LeSr2YMcPeNZxXMkT3g7PLzhAZuHX+1+S/ycENUVg1t7r6n5fWnB8gIatbllZ96O+P+ySorL+9aA1w6R8gWUZ0b+LyOaCPP2pS4qKyBM3MXB6AxH93TJOWc5VXZEnaN6y+lvR/26KwatxbGfIgpcXBCouuiRIZGFCTpuoy5wHQ7SaDpt7/bZamWsY6n/b0nkTj7cqLLRPypzDIOusNmhQkGAhTzyDDAajcSVPeAxh/7QmY5AsOlj3Xtdv/8QdDf4rJcYPbnLYW0/k/YQSsmRdijAxNzC3Q6+sRV5abZIn+FSCwXKduGdwqnqKRYyyDOxdkaf1TPVD3+RICP3T24+Q7791tNZoi3LJxoTMkOeF5WpR/urAi97NIGeXjIQnl5eg3B3aM8fzyTuzrAfv8OUksXa1kKctH+reB7zs93EmT+igr7Bo/5T19lq2nXA9nEBiCaxIel7fwF+lvKR7YCsGY9g6RCBJ/h2I6AeWZC4yR9giT3cQUdGlwyRces9BYwXP4EntUvcclhHTkgvyBE1w3TrE969Iq0jJ8y/p2j99egvj75KZTF9uY0Kun6ddMoLdchXtnGZpbrp13WhbgxMo8476uM6QpjgvV8uOBfqmkCe+gWbcyRO622cYB+54AIeHYU47p3+UHMBPtVAnGMWCENpKIAcIrBpjyHnMe/GzQZ6A197MYAEjGMJzYoO8slwWuCBPqBeWgTnqBYI/s9OtegNAptnG31XyiyfRRh29CKzQWmqp8AWF6utFV2sVXlIoQK9vEEndWoIBfSGZS7qIaHshtpI2IQl54hlkMFAJedo8QNpc4uKYEMpsOIERMrffGkxITjbmdIzLsXuaA7PePI7LGbhskKd35JRZ9W9oH7ltoOB/LC25IE8o+zbGdsfu123SKuT8vI0JuW6ebd980CYQWoVH5ch4b6AM1uynU8uffLJWJsy552fx9TaO2jevzCk/TSuWfd6P3mhD3gp5Cnnim1yEPG3ugOhT3PZPvZN3ne8nlXxGljFOQrHcHykpQ53LsUvvt8x1QNvGu/+SZOMmT9idzKO1SJKW6DBmfO5MLmb6rCvyBMzi/sZxxAvRvIx6ufvLyoRcUvuRIAMihFtLWpkTE8q8n2S0VfTl/sLbyrwu6x5oqPrv4fzd9ieen13+7CW5ItcHvjmcU84aeQl54htkhDzNdEQb9k91JwHYb2w3I2Khb+cwT0LwKWWTCCRVCnHBYC9UF7/e+9G+aYmbPCGWos0E58twDt1bv7rf0zQ1rsiTDZs3aLPeXyNgNE8bFplgnV9jWRuilflJVp0CZV7fj26gJp+SdY9WhmMrZX+x9/9GiJic8u8nf2Wug+H8/YUM9ouQJ75BU8jTln0ZZgB1JyGu+7H76Elbipf7C0SLc8kOy3XFt2TnilfqAuxQ48IS+Xwho3RO8pSn5coQo9RfiOfKic9OKaW7Ik94OeesT29esBnETlX4GuPe7JACW8/pMhOtw2sHqnnSKpq1dRiEKrv+onnq6VZVvgp54htkhDxt2QOhYeFeMuodxMt8n/VitqWoib9ezDwB/TSxFDcnsQOvqLPCIrhmafw5yRM8ZbtIL2Vua7juSEquyBMcvxZpx7rXwPEoHJAitl3R3atJuBQ/l00Iqi0F1c0z8KM81/LFK5hwZaDC5TkybtB+hM41nVrexB55TirbylgdkMTmqfQDiEF6mNLTGQcZIU+zW74J9k/fny1WoTNHMfYNTFJ7FSrV3kXQFtWdLOP7sZsrLWg7J3lyZd4ALWNcN45jmg9CV+QJvcj1iwtsvX5ORG8hIiwV20k5JKLSUlDtPL0I24ytpRL2Q9dq31ymlbkvr062bYfavlmSJ0OV/2W3nbVuVjZjIU9lESt/PbaYc22dLjuxXdsJUvqQ8iJP3/FLxgkV8dawXXuQ6amM9UE7pJFBTvJ0/wYiB8Bx7lBrAnl6FXN7l332EDoHAYLh4ynNBqx8s1aZcO3fY9/PU54mqWQdN9r2Mq5V9M6SMhUjvr6xbQRZtFPKsh3fG6dontJ7HZxBlh18616PIL4gDFUTiFddGeL7Z5kkVBWqxn0gb5zx3RBbLylxkqc8n1JJ5Vc9x9neTSBPaG/OGHdxX65yxIYFaIBB6BBmqXqyMiHX3G0XKHNR9RoVu5PV6aQXwXme1aR9c4KltnqmVcGLZy7kiW+CFPKU3u9g/3QeIxkpMoBj+aBqgrxwzFiknCLX2HSIWaaOnE4hYWSdlDjJk8txEvNfkbYsck0TyBPaZlEn2C6W04rI7OoaaPhgy1Z2A8fmvmZpQi6m9UgnWbcmPQmc5/ZW126jlbmKof63Bt7qNKM8NpEltl3ph05snth6X2pGr2EcDE9LLYX/D5f2Tz+qKb7PiDEmJa+mPFy3v5OxXmk+9oaVPP2eEZumkCf0GwS/xk5PV+SoTDlwwHlAqSVtBvJQlygl3t9acPMuXE9pWj6thasXamVuqYHBRhhyp+XPd35qDnM4mRjz9ctpyv0Wz2RgRPPEN6iI5im5j/WedWH/dCMRwTN4nYTlojKTQNa10GDBl1AT0v6M9cKSUFIS8kTUJPKENgJpbiqBwrODvvSMpM4061wN4hBPwFaOMJCeJayFE1qZZxbwHp5Ux/WBb8CkraeWChdbaid4f21KEvLEN0kKeSrWqz/BOIH3kxYQFQ47GRhD9+dd9fcNxWBxchWn0TiCKyclIU/NI09opzcQEewAq/Zj2/eB3H0nd4OHpUk5iWyUOhco87Wkp8HGOSy7aRX+qgQWl+r54b/akCUpT+2FbyshWwmcw7pLCkniVj0n5IlvMBHyVKwXPtLiII43bI6EpQSuyaJJL0uc/n8QEy4pCXlqJnlCW+1ORFcw9m2uZ6Q3n2sy7aHsTMos/qGuS3oabJ4L5psXaxX9QCtzewIu97SVObOtooNdL3W1fXNqgjwlSFJye7SV+bhNPEvmLeSJb5IU8pTf+bADCLYyvYMl13e8VXOZHUADzyUXyERTEpaTuOplUiol5Km55AlNtj0RfZWI7mPsC1x9Ks7nrtTg2VqFrVofa1oRTPir62zvTXme8k/DmHyJCndf4kcvCfxwH70getJBam1VHy35BWZcsd/CNTtoZe60QZ60F702o2jXfwl54ptMhDzl9973WB6wYYDKETuO00dO2vJWPlr8V2D3VTxB1T1enyKekKdmk6e42RZbfJGp27dwP8jdQbGwjMepOVqZtTYm97YycGw11inww8NsYIs8YTDfIHCFPPFNJkKesjs2tp1zxopLG6A/mi1GoX/h2C8t/7Ln00hGIUGYL+J0Cpu2HCnkaTjIU9y1YCN4CrNrjrLPSNr1GC+eFwvKdtTKnG5jgscOs2V7TD2QTdAhzKitzNlWsPXMRMPgEPLEN0kKeUrv3Nj9BsPptEGS8zy8mSNWWZ2EAL5cMjVpt12LsV4XpAAs5Gm4yFPcjFjyxouHq+e06PO1OiMUUCx7uWPbD4+0McFP5+mZoJw0o3O19id208pstIFtoAx2EzQpCXnimySFPCX3bLjl+F/GSbvIoBsR0c7J4hQ6C+1wkXKKXmMv1leh6tx/0fsZ64U4ZklJyNNwkqe4LWGXCG3P8Z04dZOM/aXos5J0He9Gtu52/9oGzCkkYYVrA+245QZ9BMFJwaQ21m1lXjfo+vWVL+SJb5IU8tTXubo/PzCgAfjcGv6VtmY2qNXJ0Dg/y2msf0yK9EKehps89TYrXnz26JxYTkRXDeg5BpmC/RPs9XgSltZs2T2BPATKuAzKyANKzVy0ijytzAZb5Km10MyrKSL37UKe+MjTldyNk5LfMHkYfwERbRjgoPuRFAyLnMbO46S34CrnmhDbDob0axnr9NYUEIU8jQ556m9iuDqA9vIPA3C6iR2CfEkrc6KtiV4r85c995zi2LnCV2HLOWllTrGIJ5h705KQJ74JEipuF+mNjBOgzfAsjyIi2CtUIRtc99Sxf/oVo+wIyIvlkEGmFzLWB+2TZlcm5Gl0yVNv/30cER1BROc7IlK31A4q3Ct9W0UvszjZT2kVcTmd6xW7kd/bfrSXVSw9g+WLpiUhT3yTO1TLLsLufJhxErRFnoDDmYxy1iFTVe2fPsYsP7yWDzKdzFgfeIN+eEplhDyNB3nqbX44X/0UEcH3V51nNe9evmcIS3eBMjdbnPRvdxHvrrcVBvF9OmCxF11tEcdNwaJw7iDqllPmuJMnRPbOe2DL/A/fKbYT5yRoizzBRqIMbravrWL/BM0Kp1xpgXRt9xfk/xhmr+5Z9n1CnsaPPMV9+EFE9C4iWsP87MTPIa+DaYRUsTjpT2nPnDXqxuPwbWUTw0AZDN5NTONOnrh3VR1uuZGh0eF8u7NBnrA81ETvxUeVbBt4YoabgXjg5jjCBcIgEuxFOOSP8zg2oxJCnsaXPMXdYkciwtgS9xeuI+8LCDyC25z4kXfbNx+MURm147Q3c2U22cQwUOb1DcVt3MkTd/3PstzOcGjHNRAhH27y9Ggigi8zThm58qpi/3Qec11gaOvajvRpFoz2983o50KehDyhe8DG75PMzw+/3XDgh7+1OflrZe5b4pkXZTwwQ/nXUj+ab3PHYrdN/tGavw5MvImJmzzs0MRKZsgEZ7DcWhKbwavhW4eLTHCTJ5AChEfhlA+2NZz5QWv32Iz+0P/Xm5nLR13KasD6ZSrzG9oz7ALlxBA79uDKIS0JeRoe8gRfaO8jokuJyMYcBQL1S8b+h+VA3rQ5Tl5yIFpGUnVboCafwiv54HJbsmDiEdqundNmv1Be9OnB1TK35HEnTwCIe0fYL3JRr3YBtv1zToLIi1Pz9Alm+WBX80oL5LaM/dMjLCzdgRDC07fthCXenzC3CfrMCTmCC3lqNnnCCyNiN57e92xhF6+NxKktR/Bv3tSiqa20Mn9nJEqJDiHbnplooK+i0mDuu/PEdoEyF9jGSyuz/uDd1pR50y1dl5o3CHnavNWWm5S8vWa79N+OPoTt7txycpGnvYkIy2Jc8t1ORHEMSBiJcuUb5wOD9qKJ07FkXD5idh1YVIAK10EL+N8WcIP8ebHGhDw1kzxhM8vRRITdp3E/7D1enaNRrNANp28BiQfp6S2r6nc8N/wp8M3hDsgASNXfl86f9Plr4CbHA+be8lDthec5wupLbmpVuRQhT0RfZnqwewcEGBq/onKrbHnjTkR0kQUZIS8HeUIf4txdA83Mq3sgwOD7a+b6g+gh+G+RtCdz2XE/gfPQ91rw//RIIoLtXVwO5xH9MC8JeWoOeYIZxRs6PrkQh7BIPzgyr3Er/g8fTUXKz7vm5orlZ9/WWrxia63M31yQgq57BERJH6oELZBW5s8uMNLK3Lt03sTjGw6QkCcihMzJe6xHGusAAAvxSURBVGir/A9bKkyOdRJcKfzNknyoU13yBA0HnORVwSftns8mAAZD9JC5nDL+n37PXHZv3c8mIvjJ4UgIA2PTYP+AAkIKeRo8eXouEX2TiO4o2W/XExGW2TgTXv56+3ud7xgL7aTAN0sdEQNooG5ve1GRh8lOZUvmGsyfeJpW5gZX+AReyOtOvmR9C17OTZ5Q5y82+IPdH/0JWtQ6D3TevbCxyVvq6JcJhpzYDo7BLC//Ov/XJU9YBqhTfv+9IClpxsg2XCAUtX9CqKp+WTl/Q1OJye6J/R2hwG/YsOxvUTsZ1/PygloyIU+DIU9Y2sfLGpbf4jarckSkBE5/ddB8VZEj6R4EGLeVpuZoZf7oiiBMl+ObE6D1slUjjny1Z5ZpZe52iMtaGKRzyG45D27ylNThm3RuXQqe1zA+4Gn1xZIHbG3g66e/bzyYiHbr2CNgd9epDkhTLGMd8gRjbs7dcNAE7ZLSPvFpxKqLZec6FrV/goaIq8ysfP5CRNhkAny9BNcGcET4BCI6hIhOchj5vugyp5An9+QJoZA4fZLdRkQcsW2xhMypCf1cPBBYOcKlgLbstyiBhFzc8iefbKVCNTI9SK3aVStzWoK8iQbxjNeBbQ9DEvK0uZU+72hi7J00YfMCwnDnAMqO5ahKnhDXCvYHcT51j7BBSouV1vscwf6J256nqP0T3sY5J6gymGEyA/EfVF/5QW8j5HwX8uSePKFJoEUt06fyrsWLEdodBL5Kwn0rmGWyuclicx0tBwxOIx4btW9OOEitfUgVpDnvQUBj7Zu3Y2mRkRCl1XvL8775wxB5ZBfytLnjYbkkbzAZxf+rkCcsExU1QC2K2YdKPP82HHEWtX+CRqhonUblOhA3jBNFk5CnwZCngyz1TbzkwVfTwQV9pMEpK0w3uHbYxc8RXlzsc4vpHWXKrHJOHOCNXJmo7YdH7rdwjXOHiSBNbS88VKvwr4OoO5yJwuN70VGmAdcJeZpphIstDT7xw9/EYxXyhIGRsy7YSbfVTDMU+oYdcNzOTc8pIMc2neU0TnLAiaOtvDApl0mc+LjclMS5KSBtoxA0J1zt1O9PDps3rmPMP01ObNz4v65W6mtdv1/f7Wq+4EA17b665/vrW6ZPlru2rcy+AyIQsSZmrVbhUS52nLV2X/mwQIVvdeHrKhNT3yAS+zAlIU8zrRVYfPDrDhy27i9LnmAHwWnndFOC/ddMi2R/g60SNy5F7J+wHHGrhbK568KR33HZTZD4r5CnwWie0BiHjXC/hA2guxT40bcyJ3tl3Ss5iNRGrcw5bWVe15o7iejeLAnLg9qPDtRe+GPHxuAxOdzy6IXnQfPFUjl3mQh5msEa9jTwas0x6QxLHmXIEzdpgBr+OTPwl/6G9jqTub2K2j9hlzG35qtpfQaamG1LtwqvZk40T+njUZImBuFQODVoTemTsJ0qq52u0HV7boEnba3MpQ0gUL1E4wrtmy/CrULLm9ijyPLe3urabbRn/qXtR69uK/NxrczvtTIbGlSvNa0FN+ftFOppmcZ8FfK0ZVPAsWVTBgwXchQlT1iu+hMzNhxe2LHLiDu8DuyfikQFOJRZC+eivYuWcUXHbuVhWz4ahX+J5mlwmic0EnZi2nZxUrQfcV2HCAbuk1aRp1W0rkFEo5dITX9H2BetzJXaN3/QnjkLn274lMu1MjdttiVyoiWbJVsB3DZpLxxM49bvTkKeZmOI9Xuuh77p+RQlT8czY4Jy8ZbMkRDzj1sLVMT+CbIjqGrT27isfDcUcBmR1W5CngZLntA2bx2hfllmp2dWv6z2X+CH+3SXz6qQA7knY3mz7YXwPTOsScjT7JaDbxLY4pSddIbx+iLkCV6rOet2rYUI7titxykj8vqv2V0j8czhzHH9uOtRJj9onOCGok4S8jR48oT2sxF2qkxf4rgW43BVDWidPrzlvdoP31NAiyJEKYMoJeD3DaIprjfoLRvMzS8hT8k4P2WAPnU4Bp2ieeSRJ3hfx1b1ovnlXYflBGDLnWD/dAajnKhHUfsn1AVR6u9iLj8PS+7/sYPqoQwNI+SpGeQJ9rd4vrn7iav84Nesisd9hi6ckIVW5pMJBEAIUznCNI1X2zM/H0ID8f5eIeSpH5GZ368hIvg4cTVYFC0HBOTDTHJlkScYC/+ZqZy4bv82Ay/7N9g/rWKW1xS0f0JlYNz8d+byY9xsHrF78piMsDhlG0rIUzPIE9oNPtmG0Qzhdgsx9sr24/7rp+ZozxwvBKqe/VSgzEWHPNEgpMawJyFP2S3YahiBwhZ5xHjjcuqZRZ6+wUwETsyGmuXf51tor6L2T6gAnPh9hxk3m8QJccy4t4ALeWoOeUKfxMoIYt5Bk2qzL3HljQ0gNrTT9QeYFk1tpX3zPSFQVQlUeMmrFq16eP2WaEQOQp7ymwETMjQQXIND1XygBVJdcW2Tp6XM9UW8tu3yoWa54gPMsqO9oOkrk6C1hOF11ba2fR8mUpBjeGvnTkKemkWe4vbFpibunanc/RSezNncGcUVZz2CQAVe+FUhUKUJ1Dmt+et2ZG2MwWYm5KkY/vAYfP6AJkMsHR7d53PHJnnandneCyr4RcVgZrkK9k+Ivs45sINs7FVSOgTwhSH7Hcyy1K3X74gIITRsJSFPzSRPaG/YtH3Vgna2bp+EBvT1jDtwbfXtmXwDFb5XCFRhAvWT1q4rMSCOUhLyVLw1of5e1g3QWnewKHr/b1KMJm2RJyxFcwbzhD0NtDCuE3bocGt+ytg/9dYXL1vwaTXot36Q/30dTFBCnppLnuJ+iZeZUxpAorAZ5eNOYtbFNec8dsOawAu4GI6nYhAeB20dJ+4NyUvIU/mGwGT4fiLC21JRElT2urO7tk1p0tkiT99mrtPn0irg4DwMuOHFvCz2WddjR1rVcQAG+Hi7Rtty+6VKkxmE7diu40QHkE8XIeSp+eQp7gvQqCPY9Urm5yStP8bn4RLjP4jIefzbuOJsR62il2pl1giBmkUg79aeQSOPatq+sxX9HWP0eTNjQ2IyfHVnYvox0zb1a4gIPsMWFJARvqhgBFr3g0ClcYL9S938eu9/Z3e3T5z/II4Io9IrE8d3hKmpm4D1W4joZ0R0M+PEBVJ2aZcwwXkoljBdJ3he58AZeezsUPhDGOXGxoGkhGebCxtsZuFK6CcItv3ZbogqzviVIEz3dM0e4DsNZgGjlQ5Sq3bVyvxOCNT9BOpGrYzL2Eqj1aHGqzYIX/LcrkYK6vCLc5b3oBGAFgOeuxHMMy0K+3ihOJ61xXLw4o6bhTd0J69TuxMYItSnTWLwKQVtwblEdFLX0zlCC42SPeZ49oZm1Bq2US/u2D6+p0Ngv9Ydq/Byl+fzDX0Wy/5nERGCSuMFARtuRs3cZXYrtRav2Fp75hitzKbxJlHhj0bMMHx2Y8sZVwjs1H2Dxlt02hupK1mknOFDABpO9CHEzsRRkiAwSAS27vZD9MW53e8czlUHWSe+stte+EKtzDXjR6CiddozMAqWJAgIAoKAICAICAKCQDkEDpt7/baBCpdrZe4ZAxK1Savw5CXzIht+T8oBL1cLAoKAICAICAKCwHAjoBdET9KeOWuECdTlwYKJ5w13K4n0goAgIAgIAoKAINA4BJZ45kVamQtHiERd1/bMvy/bYwrxfyQJAoKAICAICAKCgCBgBwGtzP5amYuHmERdGajokBEI6mungSVXQUAQEAQEAUFAELCDQOCbZwTKfEcrc/cQEKlNWkW/0SpsLaepQfg+sdMIkqsgIAgIAoKAICAIDB8Ch3jmUR0C9Z9ahZc0j0SFfw386EPBohBbKiUJAoKAICAICAKCgCDQLARaC828the9SytzvlZmECFfNm4mceHRLRU9q1noiDSCgCAgCAgCgoAgIAhkINDafeXDAj/cR/vmY1qZc7Qyd1jQTN2rlblcK/Ml7UcHoswMkeQvQUAQEAQEAUFAEBAEhgcBGGhrf2I37ZtXBr45vOvJ/HStzB+1Mldqz1yvVbSux6/UHd3fN3WM1P+slTkjUOZrgRe9G3ksnT/pi9H38LS/SCoICAKCgCAgCNRB4P8B0U2KllhDTigAAAAASUVORK5CYII=" - } - }, "cell_type": "markdown", "metadata": {}, "source": [ - "![image.png](attachment:image.png)" + "" ] }, { diff --git a/examples/step_by_step_dataset_generation_pipeline.ipynb b/examples/step_by_step_dataset_generation_pipeline.ipynb index b3466a1..d04ee63 100644 --- a/examples/step_by_step_dataset_generation_pipeline.ipynb +++ b/examples/step_by_step_dataset_generation_pipeline.ipynb @@ -1,5 +1,12 @@ { "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/requirements.txt b/requirements.txt index cdd8a50..7d2c8e1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -torch>=2.0.0 -torchvision>=0.16.0 +torch>=2.5.1 +torchvision>=0.20.1 transformers>=4.45.2 diffusers>=0.31.0 compel>=2.0.0 @@ -16,4 +16,6 @@ luxonis-ml[all]>=0.5.0 python-box>=7.1.1 gcsfs>=2023.1.0 sentencepiece>=0.2.0 -optimum-quanto>=0.2.6 \ No newline at end of file +optimum-quanto>=0.2.6 +huggingface_hub>=0.28.1 +SAM-2 @ git+https://github.com/facebookresearch/sam2.git \ No newline at end of file diff --git a/tests/core_tests/unittests/test_annotators.py b/tests/core_tests/unittests/test_annotators.py index eb5c986..0798cff 100644 --- a/tests/core_tests/unittests/test_annotators.py +++ b/tests/core_tests/unittests/test_annotators.py @@ -10,6 +10,7 @@ from datadreamer.dataset_annotation.aimv2_annotator import AIMv2Annotator from datadreamer.dataset_annotation.clip_annotator import CLIPAnnotator from datadreamer.dataset_annotation.owlv2_annotator import OWLv2Annotator +from datadreamer.dataset_annotation.sam2_annotator import SAM2Annotator from datadreamer.dataset_annotation.slimsam_annotator import SlimSAMAnnotator # Get the total disk space in GB @@ -183,3 +184,57 @@ def test_cuda_slimsam_large_annotator(): ) def test_cpu_slimsam_large_annotator(): _check_slimsam_annotator("cpu", size="large") + + +def _check_sam2_annotator(device: str, size: str = "base"): + url = "https://ultralytics.com/images/bus.jpg" + im = Image.open(requests.get(url, stream=True).raw) + annotator = SAM2Annotator(device=device, size=size) + masks = annotator.annotate_batch([im], [np.array([[3, 229, 559, 650]])]) + w, h = im.width, im.height + # Check that the masks are lists + assert isinstance(masks, list) and len(masks) == 1 + # Check that the masks are [B, O, N, 2], where + # - B = batch size + # - O = number of objects + # - N = number of points of the mask segment polygon (at least 3 to be polygon) + assert isinstance(masks[0], list) and len(masks[0]) == 1 + assert isinstance(masks[0][0], list) and len(masks[0][0]) >= 3 + for point in masks[0][0]: + # Check that it is a 2D point + assert len(point) == 2 + assert 0 <= point[0] <= w and 0 <= point[1] <= h + + annotator.release(empty_cuda_cache=True if device != "cpu" else False) + + +@pytest.mark.skipif( + not torch.cuda.is_available() or total_disk_space < 16, + reason="Test requires GPU and 16GB of HDD", +) +def test_cuda_sam2_base_annotator(): + _check_sam2_annotator("cuda") + + +@pytest.mark.skipif( + total_disk_space < 16, + reason="Test requires at least 16GB of HDD", +) +def test_cpu_sam2_base_annotator(): + _check_sam2_annotator("cpu") + + +@pytest.mark.skipif( + not torch.cuda.is_available() or total_disk_space < 16, + reason="Test requires GPU and 16GB of HDD", +) +def test_cuda_sam2_large_annotator(): + _check_sam2_annotator("cuda", size="large") + + +@pytest.mark.skipif( + total_disk_space < 16, + reason="Test requires at least 16GB of HDD", +) +def test_cpu_sam2_large_annotator(): + _check_sam2_annotator("cpu", size="large") From bdffe962ab4ed92b41dc5445cc272e00c2cf3c81 Mon Sep 17 00:00:00 2001 From: HonzaCuhel Date: Fri, 31 Jan 2025 15:58:53 +0100 Subject: [PATCH 2/5] Squeeze the mask if it's 3D --- datadreamer/dataset_annotation/sam2_annotator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/datadreamer/dataset_annotation/sam2_annotator.py b/datadreamer/dataset_annotation/sam2_annotator.py index 2c50805..ceb55e6 100644 --- a/datadreamer/dataset_annotation/sam2_annotator.py +++ b/datadreamer/dataset_annotation/sam2_annotator.py @@ -101,11 +101,12 @@ def annotate_batch( image_masks = [] for j in range(len(boxes)): - mask, score = masks_batch[i][j], scores_batch[i][j] + mask, score = masks_batch[i][j].astype(np.uint8), scores_batch[i][j] if score < iou_threshold: image_masks.append([]) continue - mask = mask.astype(np.uint8) + if len(mask.shape) == 3: + mask = mask.squeeze(0) polygon = mask_to_polygon(mask) image_masks.append(polygon if len(polygon) != 0 else []) From fe83647bbadd4084f24f2066ad1abef55f039b75 Mon Sep 17 00:00:00 2001 From: HonzaCuhel Date: Sat, 1 Feb 2025 15:14:10 +0100 Subject: [PATCH 3/5] Update dependencies --- requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 7d2c8e1..59b9e3a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,14 @@ torch>=2.5.1 torchvision>=0.20.1 -transformers>=4.45.2 -diffusers>=0.31.0 +transformers>=4.48.2 +diffusers>=0.32.2 compel>=2.0.0 tqdm>=4.0.0 Pillow>=9.0.0 numpy>=1.22.0 matplotlib>=3.6.0 opencv-python>=4.7.0 -accelerate>=0.25.0 +accelerate>=1.3.0 scipy>=1.10.0 bitsandbytes>=0.42.0 nltk>=3.8.1 From 37e2c46663913ead00bab2b9d72f89d65fafd7b7 Mon Sep 17 00:00:00 2001 From: HonzaCuhel Date: Mon, 3 Feb 2025 15:43:29 +0100 Subject: [PATCH 4/5] Update model sizes --- README.md | 2 +- datadreamer/dataset_annotation/sam2_annotator.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0a8cfec..3f3786b 100644 --- a/README.md +++ b/README.md @@ -221,7 +221,7 @@ datadreamer --config | | [CLIP](https://huggingface.co/openai/clip-vit-base-patch32) | Zero-shot-image-classification | | | [AIMv2](https://huggingface.co/apple/aimv2-large-patch14-224-lit) | Zero-shot-image-classification | | | [SlimSAM](https://huggingface.co/Zigeng/SlimSAM-uniform-50) | Zero-shot-instance-segmentation | -| | [SAM2.1](https://huggingface.co/facebook/sam2-hiera-tiny) | Zero-shot-instance-segmentation | +| | [SAM2.1](https://huggingface.co/facebook/sam2.1-hiera-large) | Zero-shot-instance-segmentation | diff --git a/datadreamer/dataset_annotation/sam2_annotator.py b/datadreamer/dataset_annotation/sam2_annotator.py index ceb55e6..ad16d6c 100644 --- a/datadreamer/dataset_annotation/sam2_annotator.py +++ b/datadreamer/dataset_annotation/sam2_annotator.py @@ -56,10 +56,10 @@ def _init_model(self, device: str) -> SAM2ImagePredictor: logger.info(f"Initializing SAM2.1 {self.size} model...") if self.size == "large": return SAM2ImagePredictor.from_pretrained( - "facebook/sam2.1-hiera-base-plus", device=device + "facebook/sam2-hiera-large", device=device ) return SAM2ImagePredictor.from_pretrained( - "facebook/sam2-hiera-tiny", device=device + "facebook/sam2.1-hiera-base-plus", device=device ) def annotate_batch( @@ -131,7 +131,7 @@ def release(self, empty_cuda_cache: bool = False) -> None: url = "https://ultralytics.com/images/bus.jpg" im = Image.open(requests.get(url, stream=True).raw) - annotator = SAM2Annotator(device="cpu", size="base") + annotator = SAM2Annotator(device="cpu", size="large") final_segments = annotator.annotate_batch([im], [np.array([[3, 229, 559, 650]])]) print(len(final_segments), len(final_segments[0])) print(final_segments[0][0][:5]) From f8ef89c176b4af11001ca71797341c63091a425e Mon Sep 17 00:00:00 2001 From: HonzaCuhel Date: Tue, 4 Feb 2025 08:26:57 +0100 Subject: [PATCH 5/5] Fix dependencies --- requirements.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index 59b9e3a..bb3ce9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,14 @@ -torch>=2.5.1 -torchvision>=0.20.1 -transformers>=4.48.2 -diffusers>=0.32.2 +torch>=2.0.0,<=2.5.1 +torchvision>=0.16.0 +transformers>=4.45.2 +diffusers>=0.31.0 compel>=2.0.0 tqdm>=4.0.0 Pillow>=9.0.0 numpy>=1.22.0 matplotlib>=3.6.0 opencv-python>=4.7.0 -accelerate>=1.3.0 +accelerate>=0.25.0 scipy>=1.10.0 bitsandbytes>=0.42.0 nltk>=3.8.1