diff --git a/demo/panoptic_segmentation_shapes_dataset_demo.ipynb b/demo/panoptic_segmentation_shapes_dataset_demo.ipynb new file mode 100644 index 000000000..6703fe38a --- /dev/null +++ b/demo/panoptic_segmentation_shapes_dataset_demo.ipynb @@ -0,0 +1,2800 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "objdet.ipynb", + "version": "0.3.2", + "provenance": [], + "collapsed_sections": [ + "-N9mxq4OX6Yc", + "aiLvxXRpDbiq", + "xnr8tbDz7WjS", + "2hpTvuSp830x", + "BI2ncK7kATEh", + "F9njOSX0AU5-", + "P8rXzGehNU_g", + "hbzY16ocEdrg", + "If8z4OZfDHmC", + "mOo-0LGFEAmc", + "bbCBInqHFUg7", + "NVjPYFN1Pz6D", + "BTKsrHa-TkGr", + "r-SfVh-qCmhe", + "ONldqRzHUAm0", + "ccHt8YMdKq6K", + "8S_78fk1xLfJ" + ] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "268x1mG64rCy", + "colab_type": "text" + }, + "source": [ + "# Installation" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "VNvKG2TF3Y0B", + "colab_type": "code", + "outputId": "37eae83c-0d54-40b0-85fc-4bdd42c64fd6", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "%%writefile setup.sh\n", + "\n", + "# maskrcnn_benchmark and coco api dependencies\n", + "pip install ninja yacs cython matplotlib tqdm opencv-python\n", + "\n", + "# follow PyTorch installation in https://pytorch.org/get-started/locally/\n", + "# we give the instructions for CUDA 9.0\n", + "pip install -c pytorch pytorch-nightly torchvision cudatoolkit=9.0\n", + "\n", + "\n", + "git clone https://github.com/cocodataset/cocoapi.git\n", + "cd cocoapi/PythonAPI\n", + "python setup.py build_ext install\n", + "cd ../../\n", + "\n", + "# install apex\n", + "rm -rf apex\n", + "git clone https://github.com/NVIDIA/apex.git\n", + "cd apex\n", + "git pull\n", + "python setup.py install --cuda_ext --cpp_ext\n", + "cd ../\n", + "\n", + "# install PyTorch Detection\n", + "git clone https://github.com/facebookresearch/maskrcnn-benchmark.git\n", + "cd maskrcnn-benchmark\n", + "\n", + "# the following will install the lib with\n", + "# symbolic links, so that you can modify\n", + "# the files if you want and won't need to\n", + "# re-build it\n", + "python setup.py build develop\n" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Writing setup.sh\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "NYzsp3Ng3mOy", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!sh setup.sh" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "LUQbRTRocPNN", + "colab_type": "text" + }, + "source": [ + "### Modify YACS Config" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "cwCmPMeccUzz", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "b2eb5f82-ee23-447a-eab4-fa234fb4ae79" + }, + "source": [ + "%%writefile maskrcnn-benchmark/maskrcnn_benchmark/config/defaults.py\n", + "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n", + "import os\n", + "\n", + "from yacs.config import CfgNode as CN\n", + "\n", + "\n", + "# -----------------------------------------------------------------------------\n", + "# Convention about Training / Test specific parameters\n", + "# -----------------------------------------------------------------------------\n", + "# Whenever an argument can be either used for training or for testing, the\n", + "# corresponding name will be post-fixed by a _TRAIN for a training parameter,\n", + "# or _TEST for a test-specific parameter.\n", + "# For example, the maximum image side during training will be\n", + "# INPUT.MAX_SIZE_TRAIN, while for testing it will be\n", + "# INPUT.MAX_SIZE_TEST\n", + "\n", + "# -----------------------------------------------------------------------------\n", + "# Config definition\n", + "# -----------------------------------------------------------------------------\n", + "\n", + "_C = CN()\n", + "\n", + "_C.MODEL = CN()\n", + "_C.MODEL.RPN_ONLY = False\n", + "_C.MODEL.MASK_ON = False\n", + "_C.MODEL.RETINANET_ON = False\n", + "_C.MODEL.KEYPOINT_ON = False\n", + "_C.MODEL.DEVICE = \"cuda\"\n", + "_C.MODEL.META_ARCHITECTURE = \"GeneralizedRCNN\"\n", + "_C.MODEL.CLS_AGNOSTIC_BBOX_REG = False\n", + "\n", + "# If the WEIGHT starts with a catalog://, like :R-50, the code will look for\n", + "# the path in paths_catalog. Else, it will use it as the specified absolute\n", + "# path\n", + "_C.MODEL.WEIGHT = \"\"\n", + "\n", + "\n", + "# -----------------------------------------------------------------------------\n", + "# INPUT\n", + "# -----------------------------------------------------------------------------\n", + "_C.INPUT = CN()\n", + "# Size of the smallest side of the image during training\n", + "_C.INPUT.MIN_SIZE_TRAIN = (800,) # (800,)\n", + "# Maximum size of the side of the image during training\n", + "_C.INPUT.MAX_SIZE_TRAIN = 1333\n", + "# Size of the smallest side of the image during testing\n", + "_C.INPUT.MIN_SIZE_TEST = 800\n", + "# Maximum size of the side of the image during testing\n", + "_C.INPUT.MAX_SIZE_TEST = 1333\n", + "# Values to be used for image normalization\n", + "_C.INPUT.PIXEL_MEAN = [102.9801, 115.9465, 122.7717]\n", + "# Values to be used for image normalization\n", + "_C.INPUT.PIXEL_STD = [1., 1., 1.]\n", + "# Convert image to BGR format (for Caffe2 models), in range 0-255\n", + "_C.INPUT.TO_BGR255 = True\n", + "\n", + "# Image ColorJitter\n", + "_C.INPUT.BRIGHTNESS = 0.0\n", + "_C.INPUT.CONTRAST = 0.0\n", + "_C.INPUT.SATURATION = 0.0\n", + "_C.INPUT.HUE = 0.0\n", + "\n", + "_C.INPUT.VERTICAL_FLIP_PROB_TRAIN = 0.0\n", + "\n", + "# -----------------------------------------------------------------------------\n", + "# Dataset\n", + "# -----------------------------------------------------------------------------\n", + "_C.DATASETS = CN()\n", + "# List of the dataset names for training, as present in paths_catalog.py\n", + "_C.DATASETS.TRAIN = ()\n", + "# List of the dataset names for testing, as present in paths_catalog.py\n", + "_C.DATASETS.TEST = ()\n", + "\n", + "# -----------------------------------------------------------------------------\n", + "# DataLoader\n", + "# -----------------------------------------------------------------------------\n", + "_C.DATALOADER = CN()\n", + "# Number of data loading threads\n", + "_C.DATALOADER.NUM_WORKERS = 4\n", + "# If > 0, this enforces that each collated batch should have a size divisible\n", + "# by SIZE_DIVISIBILITY\n", + "_C.DATALOADER.SIZE_DIVISIBILITY = 0\n", + "# If True, each batch should contain only images for which the aspect ratio\n", + "# is compatible. This groups portrait images together, and landscape images\n", + "# are not batched with portrait images.\n", + "_C.DATALOADER.ASPECT_RATIO_GROUPING = True\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Backbone options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.BACKBONE = CN()\n", + "\n", + "# The backbone conv body to use\n", + "# The string must match a function that is imported in modeling.model_builder\n", + "# (e.g., 'FPN.add_fpn_ResNet101_conv5_body' to specify a ResNet-101-FPN\n", + "# backbone)\n", + "_C.MODEL.BACKBONE.CONV_BODY = \"R-50-C4\"\n", + "\n", + "# Add StopGrad at a specified stage so the bottom layers are frozen\n", + "_C.MODEL.BACKBONE.FREEZE_CONV_BODY_AT = 2\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# FPN options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.FPN = CN()\n", + "_C.MODEL.FPN.USE_GN = False\n", + "_C.MODEL.FPN.USE_RELU = False\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Group Norm options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.GROUP_NORM = CN()\n", + "# Number of dimensions per group in GroupNorm (-1 if using NUM_GROUPS)\n", + "_C.MODEL.GROUP_NORM.DIM_PER_GP = -1\n", + "# Number of groups in GroupNorm (-1 if using DIM_PER_GP)\n", + "_C.MODEL.GROUP_NORM.NUM_GROUPS = 32\n", + "# GroupNorm's small constant in the denominator\n", + "_C.MODEL.GROUP_NORM.EPSILON = 1e-5\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# RPN options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.RPN = CN()\n", + "_C.MODEL.RPN.USE_FPN = False\n", + "# Base RPN anchor sizes given in absolute pixels w.r.t. the scaled network input\n", + "_C.MODEL.RPN.ANCHOR_SIZES = (32, 64, 128, 256, 512)\n", + "# Stride of the feature map that RPN is attached.\n", + "# For FPN, number of strides should match number of scales\n", + "_C.MODEL.RPN.ANCHOR_STRIDE = (16,)\n", + "# RPN anchor aspect ratios\n", + "_C.MODEL.RPN.ASPECT_RATIOS = (0.5, 1.0, 2.0)\n", + "# Remove RPN anchors that go outside the image by RPN_STRADDLE_THRESH pixels\n", + "# Set to -1 or a large value, e.g. 100000, to disable pruning anchors\n", + "_C.MODEL.RPN.STRADDLE_THRESH = 0\n", + "# Minimum overlap required between an anchor and ground-truth box for the\n", + "# (anchor, gt box) pair to be a positive example (IoU >= FG_IOU_THRESHOLD\n", + "# ==> positive RPN example)\n", + "_C.MODEL.RPN.FG_IOU_THRESHOLD = 0.7\n", + "# Maximum overlap allowed between an anchor and ground-truth box for the\n", + "# (anchor, gt box) pair to be a negative examples (IoU < BG_IOU_THRESHOLD\n", + "# ==> negative RPN example)\n", + "_C.MODEL.RPN.BG_IOU_THRESHOLD = 0.3\n", + "# Total number of RPN examples per image\n", + "_C.MODEL.RPN.BATCH_SIZE_PER_IMAGE = 256\n", + "# Target fraction of foreground (positive) examples per RPN minibatch\n", + "_C.MODEL.RPN.POSITIVE_FRACTION = 0.5\n", + "# Number of top scoring RPN proposals to keep before applying NMS\n", + "# When FPN is used, this is *per FPN level* (not total)\n", + "_C.MODEL.RPN.PRE_NMS_TOP_N_TRAIN = 12000\n", + "_C.MODEL.RPN.PRE_NMS_TOP_N_TEST = 6000\n", + "# Number of top scoring RPN proposals to keep after applying NMS\n", + "_C.MODEL.RPN.POST_NMS_TOP_N_TRAIN = 2000\n", + "_C.MODEL.RPN.POST_NMS_TOP_N_TEST = 1000\n", + "# NMS threshold used on RPN proposals\n", + "_C.MODEL.RPN.NMS_THRESH = 0.7\n", + "# Proposal height and width both need to be greater than RPN_MIN_SIZE\n", + "# (a the scale used during training or inference)\n", + "_C.MODEL.RPN.MIN_SIZE = 0\n", + "# Number of top scoring RPN proposals to keep after combining proposals from\n", + "# all FPN levels\n", + "_C.MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN = 2000\n", + "_C.MODEL.RPN.FPN_POST_NMS_TOP_N_TEST = 2000\n", + "# Apply the post NMS per batch (default) or per image during training\n", + "# (default is True to be consistent with Detectron, see Issue #672)\n", + "_C.MODEL.RPN.FPN_POST_NMS_PER_BATCH = True\n", + "# Custom rpn head, empty to use default conv or separable conv\n", + "_C.MODEL.RPN.RPN_HEAD = \"SingleConvRPNHead\"\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# ROI HEADS options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.ROI_HEADS = CN()\n", + "_C.MODEL.ROI_HEADS.USE_FPN = False\n", + "# Overlap threshold for an RoI to be considered foreground (if >= FG_IOU_THRESHOLD)\n", + "_C.MODEL.ROI_HEADS.FG_IOU_THRESHOLD = 0.5\n", + "# Overlap threshold for an RoI to be considered background\n", + "# (class = 0 if overlap in [0, BG_IOU_THRESHOLD))\n", + "_C.MODEL.ROI_HEADS.BG_IOU_THRESHOLD = 0.5\n", + "# Default weights on (dx, dy, dw, dh) for normalizing bbox regression targets\n", + "# These are empirically chosen to approximately lead to unit variance targets\n", + "_C.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS = (10., 10., 5., 5.)\n", + "# RoI minibatch size *per image* (number of regions of interest [ROIs])\n", + "# Total number of RoIs per training minibatch =\n", + "# TRAIN.BATCH_SIZE_PER_IM * TRAIN.IMS_PER_BATCH\n", + "# E.g., a common configuration is: 512 * 2 * 8 = 8192\n", + "_C.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 512\n", + "# Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0)\n", + "_C.MODEL.ROI_HEADS.POSITIVE_FRACTION = 0.25\n", + "\n", + "# Only used on test mode\n", + "\n", + "# Minimum score threshold (assuming scores in a [0, 1] range); a value chosen to\n", + "# balance obtaining high recall with not having too many low precision\n", + "# detections that will slow down inference post processing steps (like NMS)\n", + "_C.MODEL.ROI_HEADS.SCORE_THRESH = 0.05\n", + "# Overlap threshold used for non-maximum suppression (suppress boxes with\n", + "# IoU >= this threshold)\n", + "_C.MODEL.ROI_HEADS.NMS = 0.5\n", + "# Maximum number of detections to return per image (100 is based on the limit\n", + "# established for the COCO dataset)\n", + "_C.MODEL.ROI_HEADS.DETECTIONS_PER_IMG = 100\n", + "\n", + "\n", + "_C.MODEL.ROI_BOX_HEAD = CN()\n", + "_C.MODEL.ROI_BOX_HEAD.FEATURE_EXTRACTOR = \"ResNet50Conv5ROIFeatureExtractor\"\n", + "_C.MODEL.ROI_BOX_HEAD.PREDICTOR = \"FastRCNNPredictor\"\n", + "_C.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION = 14\n", + "_C.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO = 0\n", + "_C.MODEL.ROI_BOX_HEAD.POOLER_SCALES = (1.0 / 16,)\n", + "_C.MODEL.ROI_BOX_HEAD.NUM_CLASSES = 81\n", + "# Hidden layer dimension when using an MLP for the RoI box head\n", + "_C.MODEL.ROI_BOX_HEAD.MLP_HEAD_DIM = 1024\n", + "# GN\n", + "_C.MODEL.ROI_BOX_HEAD.USE_GN = False\n", + "# Dilation\n", + "_C.MODEL.ROI_BOX_HEAD.DILATION = 1\n", + "_C.MODEL.ROI_BOX_HEAD.CONV_HEAD_DIM = 256\n", + "_C.MODEL.ROI_BOX_HEAD.NUM_STACKED_CONVS = 4\n", + "\n", + "\n", + "_C.MODEL.ROI_MASK_HEAD = CN()\n", + "_C.MODEL.ROI_MASK_HEAD.FEATURE_EXTRACTOR = \"ResNet50Conv5ROIFeatureExtractor\"\n", + "_C.MODEL.ROI_MASK_HEAD.PREDICTOR = \"MaskRCNNC4Predictor\"\n", + "_C.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION = 14\n", + "_C.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO = 0\n", + "_C.MODEL.ROI_MASK_HEAD.POOLER_SCALES = (1.0 / 16,)\n", + "_C.MODEL.ROI_MASK_HEAD.MLP_HEAD_DIM = 1024\n", + "_C.MODEL.ROI_MASK_HEAD.CONV_LAYERS = (256, 256, 256, 256)\n", + "_C.MODEL.ROI_MASK_HEAD.RESOLUTION = 14\n", + "_C.MODEL.ROI_MASK_HEAD.SHARE_BOX_FEATURE_EXTRACTOR = True\n", + "# Whether or not resize and translate masks to the input image.\n", + "_C.MODEL.ROI_MASK_HEAD.POSTPROCESS_MASKS = False\n", + "_C.MODEL.ROI_MASK_HEAD.POSTPROCESS_MASKS_THRESHOLD = 0.5\n", + "# Dilation\n", + "_C.MODEL.ROI_MASK_HEAD.DILATION = 1\n", + "# GN\n", + "_C.MODEL.ROI_MASK_HEAD.USE_GN = False\n", + "\n", + "_C.MODEL.ROI_KEYPOINT_HEAD = CN()\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.FEATURE_EXTRACTOR = \"KeypointRCNNFeatureExtractor\"\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.PREDICTOR = \"KeypointRCNNPredictor\"\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION = 14\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO = 0\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_SCALES = (1.0 / 16,)\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.MLP_HEAD_DIM = 1024\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.CONV_LAYERS = tuple(512 for _ in range(8))\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.RESOLUTION = 14\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.NUM_CLASSES = 17\n", + "_C.MODEL.ROI_KEYPOINT_HEAD.SHARE_BOX_FEATURE_EXTRACTOR = True\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# ResNe[X]t options (ResNets = {ResNet, ResNeXt}\n", + "# Note that parts of a resnet may be used for both the backbone and the head\n", + "# These options apply to both\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.RESNETS = CN()\n", + "\n", + "# Number of groups to use; 1 ==> ResNet; > 1 ==> ResNeXt\n", + "_C.MODEL.RESNETS.NUM_GROUPS = 1\n", + "\n", + "# Baseline width of each group\n", + "_C.MODEL.RESNETS.WIDTH_PER_GROUP = 64\n", + "\n", + "# Place the stride 2 conv on the 1x1 filter\n", + "# Use True only for the original MSRA ResNet; use False for C2 and Torch models\n", + "_C.MODEL.RESNETS.STRIDE_IN_1X1 = True\n", + "\n", + "# Residual transformation function\n", + "_C.MODEL.RESNETS.TRANS_FUNC = \"BottleneckWithFixedBatchNorm\"\n", + "# ResNet's stem function (conv1 and pool1)\n", + "_C.MODEL.RESNETS.STEM_FUNC = \"StemWithFixedBatchNorm\"\n", + "\n", + "# Apply dilation in stage \"res5\"\n", + "_C.MODEL.RESNETS.RES5_DILATION = 1\n", + "\n", + "_C.MODEL.RESNETS.BACKBONE_OUT_CHANNELS = 256 * 4\n", + "_C.MODEL.RESNETS.RES2_OUT_CHANNELS = 256\n", + "_C.MODEL.RESNETS.STEM_OUT_CHANNELS = 64\n", + "\n", + "_C.MODEL.RESNETS.STAGE_WITH_DCN = (False, False, False, False)\n", + "_C.MODEL.RESNETS.WITH_MODULATED_DCN = False\n", + "_C.MODEL.RESNETS.DEFORMABLE_GROUPS = 1\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# RetinaNet Options (Follow the Detectron version)\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.RETINANET = CN()\n", + "\n", + "# This is the number of foreground classes and background.\n", + "_C.MODEL.RETINANET.NUM_CLASSES = 81\n", + "\n", + "# Anchor aspect ratios to use\n", + "_C.MODEL.RETINANET.ANCHOR_SIZES = (32, 64, 128, 256, 512)\n", + "_C.MODEL.RETINANET.ASPECT_RATIOS = (0.5, 1.0, 2.0)\n", + "_C.MODEL.RETINANET.ANCHOR_STRIDES = (8, 16, 32, 64, 128)\n", + "_C.MODEL.RETINANET.STRADDLE_THRESH = 0\n", + "\n", + "# Anchor scales per octave\n", + "_C.MODEL.RETINANET.OCTAVE = 2.0\n", + "_C.MODEL.RETINANET.SCALES_PER_OCTAVE = 3\n", + "\n", + "# Use C5 or P5 to generate P6\n", + "_C.MODEL.RETINANET.USE_C5 = True\n", + "\n", + "# Convolutions to use in the cls and bbox tower\n", + "# NOTE: this doesn't include the last conv for logits\n", + "_C.MODEL.RETINANET.NUM_CONVS = 4\n", + "\n", + "# Weight for bbox_regression loss\n", + "_C.MODEL.RETINANET.BBOX_REG_WEIGHT = 4.0\n", + "\n", + "# Smooth L1 loss beta for bbox regression\n", + "_C.MODEL.RETINANET.BBOX_REG_BETA = 0.11\n", + "\n", + "# During inference, #locs to select based on cls score before NMS is performed\n", + "# per FPN level\n", + "_C.MODEL.RETINANET.PRE_NMS_TOP_N = 1000\n", + "\n", + "# IoU overlap ratio for labeling an anchor as positive\n", + "# Anchors with >= iou overlap are labeled positive\n", + "_C.MODEL.RETINANET.FG_IOU_THRESHOLD = 0.5\n", + "\n", + "# IoU overlap ratio for labeling an anchor as negative\n", + "# Anchors with < iou overlap are labeled negative\n", + "_C.MODEL.RETINANET.BG_IOU_THRESHOLD = 0.4\n", + "\n", + "# Focal loss parameter: alpha\n", + "_C.MODEL.RETINANET.LOSS_ALPHA = 0.25\n", + "\n", + "# Focal loss parameter: gamma\n", + "_C.MODEL.RETINANET.LOSS_GAMMA = 2.0\n", + "\n", + "# Prior prob for the positives at the beginning of training. This is used to set\n", + "# the bias init for the logits layer\n", + "_C.MODEL.RETINANET.PRIOR_PROB = 0.01\n", + "\n", + "# Inference cls score threshold, anchors with score > INFERENCE_TH are\n", + "# considered for inference\n", + "_C.MODEL.RETINANET.INFERENCE_TH = 0.05\n", + "\n", + "# NMS threshold used in RetinaNet\n", + "_C.MODEL.RETINANET.NMS_TH = 0.4\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# FBNet options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.FBNET = CN()\n", + "_C.MODEL.FBNET.ARCH = \"default\"\n", + "# custom arch\n", + "_C.MODEL.FBNET.ARCH_DEF = \"\"\n", + "_C.MODEL.FBNET.BN_TYPE = \"bn\"\n", + "_C.MODEL.FBNET.SCALE_FACTOR = 1.0\n", + "# the output channels will be divisible by WIDTH_DIVISOR\n", + "_C.MODEL.FBNET.WIDTH_DIVISOR = 1\n", + "_C.MODEL.FBNET.DW_CONV_SKIP_BN = True\n", + "_C.MODEL.FBNET.DW_CONV_SKIP_RELU = True\n", + "\n", + "# > 0 scale, == 0 skip, < 0 same dimension\n", + "_C.MODEL.FBNET.DET_HEAD_LAST_SCALE = 1.0\n", + "_C.MODEL.FBNET.DET_HEAD_BLOCKS = []\n", + "# overwrite the stride for the head, 0 to use original value\n", + "_C.MODEL.FBNET.DET_HEAD_STRIDE = 0\n", + "\n", + "# > 0 scale, == 0 skip, < 0 same dimension\n", + "_C.MODEL.FBNET.KPTS_HEAD_LAST_SCALE = 0.0\n", + "_C.MODEL.FBNET.KPTS_HEAD_BLOCKS = []\n", + "# overwrite the stride for the head, 0 to use original value\n", + "_C.MODEL.FBNET.KPTS_HEAD_STRIDE = 0\n", + "\n", + "# > 0 scale, == 0 skip, < 0 same dimension\n", + "_C.MODEL.FBNET.MASK_HEAD_LAST_SCALE = 0.0\n", + "_C.MODEL.FBNET.MASK_HEAD_BLOCKS = []\n", + "# overwrite the stride for the head, 0 to use original value\n", + "_C.MODEL.FBNET.MASK_HEAD_STRIDE = 0\n", + "\n", + "# 0 to use all blocks defined in arch_def\n", + "_C.MODEL.FBNET.RPN_HEAD_BLOCKS = 0\n", + "_C.MODEL.FBNET.RPN_BN_TYPE = \"\"\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Solver\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.SOLVER = CN()\n", + "_C.SOLVER.MAX_ITER = 40000\n", + "\n", + "_C.SOLVER.BASE_LR = 0.001\n", + "_C.SOLVER.BIAS_LR_FACTOR = 2\n", + "\n", + "_C.SOLVER.MOMENTUM = 0.9\n", + "\n", + "_C.SOLVER.WEIGHT_DECAY = 0.0005\n", + "_C.SOLVER.WEIGHT_DECAY_BIAS = 0\n", + "\n", + "_C.SOLVER.GAMMA = 0.1\n", + "_C.SOLVER.STEPS = (30000,)\n", + "\n", + "_C.SOLVER.WARMUP_FACTOR = 1.0 / 3\n", + "_C.SOLVER.WARMUP_ITERS = 500\n", + "_C.SOLVER.WARMUP_METHOD = \"linear\"\n", + "\n", + "_C.SOLVER.CHECKPOINT_PERIOD = 2500\n", + "\n", + "# Number of images per batch\n", + "# This is global, so if we have 8 GPUs and IMS_PER_BATCH = 16, each GPU will\n", + "# see 2 images per batch\n", + "_C.SOLVER.IMS_PER_BATCH = 16\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Specific test options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.TEST = CN()\n", + "_C.TEST.EXPECTED_RESULTS = []\n", + "_C.TEST.EXPECTED_RESULTS_SIGMA_TOL = 4\n", + "# Number of images per batch\n", + "# This is global, so if we have 8 GPUs and IMS_PER_BATCH = 16, each GPU will\n", + "# see 2 images per batch\n", + "_C.TEST.IMS_PER_BATCH = 8\n", + "# Number of detections per image\n", + "_C.TEST.DETECTIONS_PER_IMG = 100\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Test-time augmentations for bounding box detection\n", + "# See configs/test_time_aug/e2e_mask_rcnn_R-50-FPN_1x.yaml for an example\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.TEST.BBOX_AUG = CN()\n", + "\n", + "# Enable test-time augmentation for bounding box detection if True\n", + "_C.TEST.BBOX_AUG.ENABLED = False\n", + "\n", + "# Horizontal flip at the original scale (id transform)\n", + "_C.TEST.BBOX_AUG.H_FLIP = False\n", + "\n", + "# Each scale is the pixel size of an image's shortest side\n", + "_C.TEST.BBOX_AUG.SCALES = ()\n", + "\n", + "# Max pixel size of the longer side\n", + "_C.TEST.BBOX_AUG.MAX_SIZE = 4000\n", + "\n", + "# Horizontal flip at each scale\n", + "_C.TEST.BBOX_AUG.SCALE_H_FLIP = False\n", + "\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Misc options\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.OUTPUT_DIR = \".\"\n", + "\n", + "_C.PATHS_CATALOG = os.path.join(os.path.dirname(__file__), \"paths_catalog.py\")\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Precision options\n", + "# ---------------------------------------------------------------------------- #\n", + "\n", + "# Precision of input, allowable: (float32, float16)\n", + "_C.DTYPE = \"float32\"\n", + "\n", + "# Enable verbosity in apex.amp\n", + "_C.AMP_VERBOSE = False\n", + "\n", + "# ---------------------------------------------------------------------------- #\n", + "# Panoptic FPN\n", + "# ---------------------------------------------------------------------------- #\n", + "_C.MODEL.PANOPTIC = CN()\n", + "_C.MODEL.PANOPTIC.CHANNEL_SIZE = 128\n", + "_C.MODEL.PANOPTIC.NUM_CLASSES = 1\n" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Overwriting maskrcnn-benchmark/maskrcnn_benchmark/config/defaults.py\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1uoPMGDl49Wk", + "colab_type": "text" + }, + "source": [ + "### Checking our Installation\n", + "\n", + "If a module not found error appears, restart the runtime. The libraries should be loaded after restarting" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "3q-n76S95KA3", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 299 + }, + "outputId": "39d390c1-1b71-4c9c-88e6-1d78e4267ce6" + }, + "source": [ + "import maskrcnn_benchmark" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "error", + "ename": "ModuleNotFoundError", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mmaskrcnn_benchmark\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'maskrcnn_benchmark'", + "", + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0;32m\nNOTE: If your import is failing due to a missing package, you can\nmanually install dependencies using either !pip or !apt.\n\nTo view examples of installing some common dependencies, click the\n\"Open Examples\" button below.\n\u001b[0;31m---------------------------------------------------------------------------\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "aiLvxXRpDbiq", + "colab_type": "text" + }, + "source": [ + "# Imports" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kLzesfGNX9O2", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import torch\n", + "from torch import nn\n", + "import torch.nn.functional as Fx\n", + "import datetime\n", + "\n", + "# Set up custom environment before nearly anything else is imported\n", + "# NOTE: this should be the first import (no not reorder)\n", + "from maskrcnn_benchmark.utils.env import setup_environment # noqa F401 isort:skip\n", + "\n", + "from maskrcnn_benchmark.data.build import *\n", + "from maskrcnn_benchmark.structures.bounding_box import BoxList\n", + "from maskrcnn_benchmark.structures.segmentation_mask import SegmentationMask\n", + "from maskrcnn_benchmark.modeling.detector import build_detection_model\n", + "from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer\n", + "from maskrcnn_benchmark.structures.image_list import to_image_list\n", + "from maskrcnn_benchmark.modeling.roi_heads.mask_head.inference import Masker\n", + "from maskrcnn_benchmark import layers as L\n", + "from maskrcnn_benchmark.utils import cv2_util\n", + "from maskrcnn_benchmark.utils.miscellaneous import mkdir\n", + "from maskrcnn_benchmark.utils.logger import setup_logger\n", + "from maskrcnn_benchmark.utils.comm import synchronize, get_rank\n", + "from maskrcnn_benchmark.config import cfg\n", + "from maskrcnn_benchmark.config import cfg\n", + "from maskrcnn_benchmark.data import make_data_loader\n", + "from maskrcnn_benchmark.solver import make_lr_scheduler\n", + "from maskrcnn_benchmark.solver import make_optimizer\n", + "from maskrcnn_benchmark.engine.inference import inference\n", + "from maskrcnn_benchmark.engine.trainer import do_train\n", + "from maskrcnn_benchmark.modeling.detector import build_detection_model\n", + "from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer\n", + "from maskrcnn_benchmark.utils.collect_env import collect_env_info\n", + "from maskrcnn_benchmark.utils.comm import synchronize, get_rank\n", + "from maskrcnn_benchmark.utils.imports import import_file\n", + "from maskrcnn_benchmark.data.datasets.evaluation import evaluate\n", + "from maskrcnn_benchmark.utils.comm import is_main_process, get_world_size\n", + "from maskrcnn_benchmark.utils.comm import all_gather\n", + "from maskrcnn_benchmark.utils.timer import Timer, get_time_str\n", + "from maskrcnn_benchmark.engine.inference import compute_on_dataset, _accumulate_predictions_from_multiple_gpus\n", + "from maskrcnn_benchmark.data.datasets.evaluation.coco import coco_evaluation\n", + "from maskrcnn_benchmark.modeling.utils import cat\n", + "from maskrcnn_benchmark.structures.image_list import to_image_list\n", + "\n", + "from maskrcnn_benchmark.modeling.backbone import build_backbone\n", + "from maskrcnn_benchmark.modeling.rpn.rpn import build_rpn\n", + "from maskrcnn_benchmark.modeling.roi_heads.roi_heads import build_roi_heads\n", + "from maskrcnn_benchmark.modeling.make_layers import make_conv3x3\n", + "from maskrcnn_benchmark.structures.image_list import to_image_list\n", + "from maskrcnn_benchmark.modeling.backbone import build_backbone\n", + "from maskrcnn_benchmark.modeling.rpn.rpn import build_rpn\n", + "from maskrcnn_benchmark.modeling.roi_heads.roi_heads import build_roi_heads\n", + "\n", + "import torch.distributed as dist\n", + "\n", + "from maskrcnn_benchmark.utils.comm import get_world_size\n", + "from maskrcnn_benchmark.utils.metric_logger import MetricLogger\n", + "\n", + "\n", + "from PIL import Image\n", + "import json\n", + "import logging\n", + "import torch\n", + "import numpy as np\n", + "import skimage.draw as draw\n", + "import tempfile\n", + "from pycocotools.coco import COCO\n", + "import os\n", + "import sys\n", + "import random\n", + "import math\n", + "import re\n", + "import time\n", + "import cv2\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from tqdm import tqdm\n", + "\n", + "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n", + "from torchvision import transforms as T\n", + "from torchvision.transforms import functional as F\n", + "from google.colab.patches import cv2_imshow\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DvU-NYKJ3uzb", + "colab_type": "text" + }, + "source": [ + "# Loading Our Dataset\n", + "\n", + "To train a network using the MaskRCNN repo, we first need to define our dataset. The dataset needs to a class of type object and should extend 3 things. \n", + "\n", + "1. **__getitem__(self, idx)**: This function should return a PIL Image, a BoxList and the idx. The Boxlist is an abstraction for our bounding boxes, segmentation masks, class lables and also people keypoints. Please check ABSTRACTIONS.ms for more details on this. \n", + "\n", + "2. **__len__()**: returns the length of the dataset. \n", + "\n", + "3. **get_img_info(self, idx)**: Return a dict of img info with the fields \"height\" and \"width\" filled in with the idx's image's height and width.\n", + "\n", + "4. **self.coco**: Should be a variable that holds the COCO object for your annotations so that you can perform evaluations of your dataset. \n", + "\n", + "5. **self.id_to_img_map**: Is a dictionary that maps the ids to coco image ids. Almost in all cases just map the idxs to idxs. This is simply a requirement for the coco evaluation. \n", + "\n", + "6. **self.contiguous_category_id_to_json_id**: Another requirement for coco evaluation. It maps the categpry to json category id. Again, for almost all purposes category id and json id should be same. \n", + "\n", + "Given below is a sample fo a dataset. It is the Shape Dataset taken from the Matterport Mask RCNN Repo. One important detail is that the constructor if the dataset should have the variable transforms that is set inside the constructor. It should thgen be used inside **__get__item(idx)** as shown below." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xnr8tbDz7WjS", + "colab_type": "text" + }, + "source": [ + "## Helper Functions" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tb_5MERf7c_1", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Helper Functions for the Shapes Dataset\n", + "\n", + "def non_max_suppression(boxes, scores, threshold):\n", + " \"\"\"Performs non-maximum suppression and returns indices of kept boxes.\n", + " boxes: [N, (y1, x1, y2, x2)]. Notice that (y2, x2) lays outside the box.\n", + " scores: 1-D array of box scores.\n", + " threshold: Float. IoU threshold to use for filtering.\n", + " \"\"\"\n", + " assert boxes.shape[0] > 0\n", + " if boxes.dtype.kind != \"f\":\n", + " boxes = boxes.astype(np.float32)\n", + "\n", + " # Compute box areas\n", + " y1 = boxes[:, 0]\n", + " x1 = boxes[:, 1]\n", + " y2 = boxes[:, 2]\n", + " x2 = boxes[:, 3]\n", + " area = (y2 - y1) * (x2 - x1)\n", + "\n", + " # Get indicies of boxes sorted by scores (highest first)\n", + " ixs = scores.argsort()[::-1]\n", + "\n", + " pick = []\n", + " while len(ixs) > 0:\n", + " # Pick top box and add its index to the list\n", + " i = ixs[0]\n", + " pick.append(i)\n", + " # Compute IoU of the picked box with the rest\n", + " iou = compute_iou(boxes[i], boxes[ixs[1:]], area[i], area[ixs[1:]])\n", + " # Identify boxes with IoU over the threshold. This\n", + " # returns indices into ixs[1:], so add 1 to get\n", + " # indices into ixs.\n", + " remove_ixs = np.where(iou > threshold)[0] + 1\n", + " # Remove indices of the picked and overlapped boxes.\n", + " ixs = np.delete(ixs, remove_ixs)\n", + " ixs = np.delete(ixs, 0)\n", + " return np.array(pick, dtype=np.int32)\n", + "\n", + "def compute_iou(box, boxes, box_area, boxes_area):\n", + " \"\"\"Calculates IoU of the given box with the array of the given boxes.\n", + " box: 1D vector [y1, x1, y2, x2]\n", + " boxes: [boxes_count, (y1, x1, y2, x2)]\n", + " box_area: float. the area of 'box'\n", + " boxes_area: array of length boxes_count.\n", + " Note: the areas are passed in rather than calculated here for\n", + " efficiency. Calculate once in the caller to avoid duplicate work.\n", + " \"\"\"\n", + " # Calculate intersection areas\n", + " y1 = np.maximum(box[0], boxes[:, 0])\n", + " y2 = np.minimum(box[2], boxes[:, 2])\n", + " x1 = np.maximum(box[1], boxes[:, 1])\n", + " x2 = np.minimum(box[3], boxes[:, 3])\n", + " intersection = np.maximum(x2 - x1, 0) * np.maximum(y2 - y1, 0)\n", + " union = box_area + boxes_area[:] - intersection[:]\n", + " iou = intersection / union\n", + " return iou" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5DC0K7tW7d-M", + "colab_type": "text" + }, + "source": [ + "## Dataset" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WhG_Tu9ELAsj", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class ShapeDataset(object):\n", + " \n", + " def __init__(self, num_examples, transforms=None):\n", + " \n", + " self.height = 128\n", + " self.width = 128\n", + " \n", + " self.num_examples = num_examples\n", + " self.transforms = transforms # IMPORTANT, DON'T MISS\n", + " self.image_info = []\n", + " self.logger = logging.getLogger(__name__)\n", + " \n", + " # Class Names: Note that the ids start fromm 1 not 0. This repo uses the 0 index for background\n", + " self.class_names = {\"square\": 1, \"circle\": 2, \"triangle\": 3}\n", + " \n", + " # Add images\n", + " # Generate random specifications of images (i.e. color and\n", + " # list of shapes sizes and locations). This is more compact than\n", + " # actual images. Images are generated on the fly in load_image().\n", + " for i in range(num_examples):\n", + " bg_color, shapes = self.random_image(self.height, self.width)\n", + " self.image_info.append({ \"path\":None,\n", + " \"width\": self.width, \"height\": self.height,\n", + " \"bg_color\": bg_color, \"shapes\": shapes\n", + " })\n", + " \n", + " # Fills in the self.coco varibale for evaluation.\n", + " self.get_gt()\n", + " \n", + " # Variables needed for coco mAP evaluation\n", + " self.id_to_img_map = {}\n", + " for i, _ in enumerate(self.image_info):\n", + " self.id_to_img_map[i] = i\n", + "\n", + " self.contiguous_category_id_to_json_id = { 0:0 ,1:1, 2:2, 3:3 }\n", + " \n", + "\n", + " def random_shape(self, height, width):\n", + " \"\"\"Generates specifications of a random shape that lies within\n", + " the given height and width boundaries.\n", + " Returns a tuple of three valus:\n", + " * The shape name (square, circle, ...)\n", + " * Shape color: a tuple of 3 values, RGB.\n", + " * Shape dimensions: A tuple of values that define the shape size\n", + " and location. Differs per shape type.\n", + " \"\"\"\n", + " # Shape\n", + " shape = random.choice([\"square\", \"circle\", \"triangle\"])\n", + " # Color\n", + " color = tuple([random.randint(0, 255) for _ in range(3)])\n", + " # Center x, y\n", + " buffer = 20\n", + " y = random.randint(buffer, height - buffer - 1)\n", + " x = random.randint(buffer, width - buffer - 1)\n", + " # Size\n", + " s = random.randint(buffer, height//4)\n", + " return shape, color, (x, y, s)\n", + "\n", + " def random_image(self, height, width):\n", + " \"\"\"Creates random specifications of an image with multiple shapes.\n", + " Returns the background color of the image and a list of shape\n", + " specifications that can be used to draw the image.\n", + " \"\"\"\n", + " # Pick random background color\n", + " bg_color = np.array([random.randint(0, 255) for _ in range(3)])\n", + " # Generate a few random shapes and record their\n", + " # bounding boxes\n", + " shapes = []\n", + " boxes = []\n", + " N = random.randint(1, 4)\n", + " labels = {}\n", + " for _ in range(N):\n", + " shape, color, dims = self.random_shape(height, width)\n", + " shapes.append((shape, color, dims))\n", + " x, y, s = dims\n", + " boxes.append([y-s, x-s, y+s, x+s])\n", + "\n", + " # Apply non-max suppression wit 0.3 threshold to avoid\n", + " # shapes covering each other\n", + " keep_ixs = non_max_suppression(np.array(boxes), np.arange(N), 0.3)\n", + " shapes = [s for i, s in enumerate(shapes) if i in keep_ixs]\n", + " \n", + " return bg_color, shapes\n", + " \n", + " \n", + " def draw_shape(self, image, shape, dims, color):\n", + " \"\"\"Draws a shape from the given specs.\"\"\"\n", + " # Get the center x, y and the size s\n", + " x, y, s = dims\n", + " if shape == 'square':\n", + " cv2.rectangle(image, (x-s, y-s), (x+s, y+s), color, -1)\n", + " elif shape == \"circle\":\n", + " cv2.circle(image, (x, y), s, color, -1)\n", + " elif shape == \"triangle\":\n", + " points = np.array([[(x, y-s),\n", + " (x-s/math.sin(math.radians(60)), y+s),\n", + " (x+s/math.sin(math.radians(60)), y+s),\n", + " ]], dtype=np.int32)\n", + " cv2.fillPoly(image, points, color)\n", + " return image, [ x-s, y-s, x+s, y+s]\n", + "\n", + "\n", + " def load_mask(self, image_id):\n", + " \"\"\"\n", + " Generates instance masks for shapes of the given image ID.\n", + " \"\"\"\n", + " info = self.image_info[image_id]\n", + " shapes = info['shapes']\n", + " count = len(shapes)\n", + " mask = np.zeros([info['height'], info['width'], count], dtype=np.uint8)\n", + " boxes = []\n", + " \n", + " for i, (shape, _, dims) in enumerate(info['shapes']):\n", + " mask[:, :, i:i+1], box = self.draw_shape( mask[:, :, i:i+1].copy(),\n", + " shape, dims, 1)\n", + " boxes.append(box)\n", + " \n", + " \n", + " # Handle occlusions\n", + " occlusion = np.logical_not(mask[:, :, -1]).astype(np.uint8)\n", + " for i in range(count-2, -1, -1):\n", + " mask[:, :, i] = mask[:, :, i] * occlusion\n", + " occlusion = np.logical_and(occlusion, np.logical_not(mask[:, :, i]))\n", + " \n", + " segmentation_mask = mask.copy()\n", + " segmentation_mask = np.expand_dims(np.sum(segmentation_mask, axis=2), axis=2)\n", + " \n", + " # Map class names to class IDs.\n", + " class_ids = np.array([self.class_names[s[0]] for s in shapes])\n", + " return segmentation_mask.astype(np.uint8), mask.astype(np.uint8), class_ids.astype(np.int32), boxes\n", + " \n", + " def load_image(self, image_id):\n", + " \"\"\"Generate an image from the specs of the given image ID.\n", + " Typically this function loads the image from a file, but\n", + " in this case it generates the image on the fly from the\n", + " specs in image_info.\n", + " \"\"\"\n", + " info = self.image_info[image_id]\n", + " bg_color = np.array(info['bg_color']).reshape([1, 1, 3])\n", + " image = np.ones([info['height'], info['width'], 3], dtype=np.uint8)\n", + " image = image * bg_color.astype(np.uint8)\n", + " for shape, color, dims in info['shapes']:\n", + " image, _ = self.draw_shape(image, shape, dims, color)\n", + " return image\n", + " \n", + " def __getitem__(self, idx):\n", + " \n", + " \"\"\"Generate an image from the specs of the given image ID.\n", + " Typically this function loads the image from a file, but\n", + " in this case it generates the image on the fly from the\n", + " specs in image_info.\n", + " \"\"\"\n", + " image = Image.fromarray(self.load_image(idx))\n", + " segmentation_mask, masks, labels, boxes = self.load_mask(idx)\n", + " \n", + " # create a BoxList from the boxes\n", + " boxlist = BoxList(boxes, image.size, mode=\"xyxy\")\n", + "\n", + " # add the labels to the boxlist\n", + " boxlist.add_field(\"labels\", torch.tensor(labels))\n", + "\n", + " # Add masks to the boxlist\n", + " masks = np.transpose(masks, (2,0,1))\n", + " masks = SegmentationMask(torch.tensor(masks), image.size, \"mask\")\n", + " boxlist.add_field(\"masks\", masks)\n", + " \n", + " # Add semantic segmentation masks to the boxlist for panoptic segmentation\n", + " segmentation_mask = np.transpose(segmentation_mask, (2,0,1))\n", + " seg_masks = SegmentationMask(torch.tensor(segmentation_mask), image.size, \"mask\")\n", + " boxlist.add_field(\"seg_masks\", seg_masks)\n", + " \n", + " # Important line! dont forget to add this\n", + " if self.transforms:\n", + " image, boxlist = self.transforms(image, boxlist)\n", + "\n", + " # return the image, the boxlist and the idx in your dataset\n", + " return image, boxlist, idx\n", + " \n", + " \n", + " def __len__(self):\n", + " return self.num_examples\n", + " \n", + "\n", + " def get_img_info(self, idx):\n", + " # get img_height and img_width. This is used if\n", + " # we want to split the batches according to the aspect ratio\n", + " # of the image, as it can be more efficient than loading the\n", + " # image from disk\n", + "\n", + " return {\"height\": self.height, \"width\": self.width}\n", + " \n", + " def get_gt(self):\n", + " # Prepares dataset for coco eval\n", + " \n", + " \n", + " images = []\n", + " annotations = []\n", + " results = []\n", + " \n", + " # Define categories\n", + " categories = [ {\"id\": 1, \"name\": \"square\"}, {\"id\": 2, \"name\": \"circle\"}, {\"id\": 3, \"name\": \"triangle\"}]\n", + "\n", + "\n", + " i = 1\n", + " ann_id = 0\n", + "\n", + " for img_id, d in enumerate(self.image_info):\n", + "\n", + " images.append( {\"id\": img_id, 'height': self.height, 'width': self.width } )\n", + "\n", + " for (shape, color, dims) in d['shapes']:\n", + " \n", + " if shape == \"square\":\n", + " category_id = 1\n", + " elif shape == \"circle\":\n", + " category_id = 2\n", + " elif shape == \"triangle\":\n", + " category_id = 3\n", + " \n", + " x, y, s = dims\n", + " bbox = [ x - s, y - s, x+s, y +s ] \n", + " area = (bbox[0] - bbox[2]) * (bbox[1] - bbox[3])\n", + " \n", + " # Format for COCOC\n", + " annotations.append( {\n", + " \"id\": int(ann_id),\n", + " \"category_id\": category_id,\n", + " \"image_id\": int(img_id),\n", + " \"area\" : float(area),\n", + " \"bbox\": [ float(bbox[0]), float(bbox[1]), float(bbox[2]) - float(bbox[0]) + 1, float(bbox[3]) - float(bbox[1]) + 1 ], # note that the bboxes are in x, y , width, height format\n", + " \"iscrowd\" : 0\n", + " } )\n", + "\n", + " ann_id += 1\n", + "\n", + " # Save ground truth file\n", + " \n", + " with open(\"tmp_gt.json\", \"w\") as f:\n", + " json.dump({\"images\": images, \"annotations\": annotations, \"categories\": categories }, f)\n", + "\n", + " # Load gt for coco eval\n", + " self.coco = COCO(\"tmp_gt.json\") \n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2hpTvuSp830x", + "colab_type": "text" + }, + "source": [ + "## Visualise Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BI2ncK7kATEh", + "colab_type": "text" + }, + "source": [ + "### Load" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "6nsO_MRUbBpk", + "colab_type": "code", + "outputId": "6a7f5e13-8ba2-4587-e6bc-abe078c0e308", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 105 + } + }, + "source": [ + "train_dt = ShapeDataset(100)\n", + "im, boxlist, idx = train_dt[0]" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "loading annotations into memory...\n", + "Done (t=0.00s)\n", + "creating index...\n", + "index created!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "F9njOSX0AU5-", + "colab_type": "text" + }, + "source": [ + "### Display some sample Images" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nMXB9sAW994F", + "colab_type": "code", + "outputId": "d4702873-36ec-430e-c2c9-8bb4b58aa84b", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 486 + } + }, + "source": [ + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " im, boxlist, idx = train_dt[i]\n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(im)\n", + "plt.show()\n", + " " + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3X2wXXV97/H3twkEkiDhqblpgia2\nVIc6tjBnkI7UcUQrUkro1DowThuVO7FTrFrb0Vg7g3emzkhttXZuq6aCxA4FEfWSuYNVpFivM5fU\ngMjzQ0QekhsIPiAP0Uj0e//Y69RNOIeTvddae/323u/XzJmz92+vffb3rJxvPuf3W2uvE5mJJEkq\nxy90XYAkSXomw1mSpMIYzpIkFcZwliSpMIazJEmFMZwlSSpMa+EcEWdExN0RsSMiNrX1OpLaZS9L\noxdtvM85IhYB9wCvAXYC3wDOy8w7Gn8xSa2xl6VutDVzPgXYkZn3ZeZPgCuA9S29lqT22MtSBxa3\n9HVXAw/13d8JvGy+jZcsOSSXLjuspVKk8fTYD578bmYe13EZA/UywNIVkUf+t1ZrksbKDx+GvY9l\nDPKctsJ5QRGxEdgIcPjSJbzyt0/qqhSpSP/rM//nga5rOFj9/fy8lfCWTy7quCKpHJf8958O/Jy2\nlrV3Acf33V9Tjf2XzNycmTOZObNkySEtlSGppgV7GZ7Zz0tXjKw2aWK1Fc7fAE6IiHURcShwLrC1\npdeS1B57WepAK8vambk/It4GfAlYBFySmbe38VqS2mMvS91o7ZhzZl4DXNPW15c0GvayNHpeIUyS\npMIYzpIkFcZwliSpMIazJEmFMZwlSSqM4SxJUmEMZ0mSCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTCG\nsyRJhTGcJUkqjOEsSVJhDGdJkgpjOEuSVBjDeULd9dm13PXZtV2XIUkaguE8gfpD2YCWpPFjOEuS\nVBjDWZKkwizuugA1Z74l7NnxF//B/SOrRZI0PGfOkiQVZuhwjojjI+L6iLgjIm6PiHdU40dHxLUR\ncW/1+ajmypXUBvtZKkudmfN+4M8z80TgVOCCiDgR2ARcl5knANdV99Wig33blGdu6znYz1JBhg7n\nzNydmTdVt58A7gRWA+uBLdVmW4Bz6hYpqV32s1SWRo45R8Ra4CRgG7AyM3dXDz0MrGziNTQ3Z8Nq\nmv0sda92OEfEcuBzwDsz8/H+xzIzgZzneRsjYntEbN+37+m6ZeggeeUwPZcm+nnvYyMoVJpwtcI5\nIg6h18iXZebnq+FHImJV9fgqYM9cz83MzZk5k5kzS5YcUqcMSQ1oqp+XrhhNvdIkq3O2dgAXA3dm\n5of7HtoKbKhubwCuHr48zafuDNjZs/rZz1JZ6lyE5OXAHwK3RsTN1dhfAh8EroyI84EHgDfUK1HS\nCNjPUkGGDufM/DoQ8zx8+rBfV9Lo2c9SWbxC2Bhqaknak8MkqUyGsyRJhfEPX4wRZ7mSNB2cOcvQ\nl6TCGM6SJBXGcB4Tzm4laXoYzgI8c1uSSmI4S5JUGM/WLtyoZ7Ozr/fiP7h/pK8rSfo5Z86SJBXG\ncJYkqTCGc8G6PEHLk8MkqTsecy6QwShJ082ZsyRJhTGcNS/f+yxJ3TCcJUkqjOFcGGeqkiTDWQvy\nFwZJGi3DWZKkwvhWqkI4O5UkzTKcOzYuoew1tyVpdFzWliSpMLXDOSIWRcQ3I+J/V/fXRcS2iNgR\nEZ+JiEPrl6lSjMtMX8Oxn6UyNDFzfgdwZ9/9i4CPZOavAD8Azm/gNSaSQacC2c9SAWqFc0SsAX4H\n+GR1P4BXAVdVm2wBzqnzGpJGw36WylF35vz3wLuBn1X3jwEey8z91f2dwOq5nhgRGyNie0Rs37fv\n6ZplaJS8rOfEaqSf9z7WfqHSpBs6nCPiLGBPZt44zPMzc3NmzmTmzJIlhwxbxlgy3FSaJvt56YqG\ni5OmUJ23Ur0cODsizgQOA54HfBRYERGLq9+21wC76pcpqWX2s1SQoWfOmfnezFyTmWuBc4F/z8w3\nAtcDr6822wBcXbvKCTJJM+ZJ+l6mnf0slaWN9zm/B3hXROygd8zq4hZeQ9Jo2M9SBxq5QlhmfhX4\nanX7PuCUJr6upNGzn6XuefnOEZnUJWAv6ylJzfPynZIkFcZwViMmdWVAkrpgOI+AwSVJGoThLElS\nYTwhrEXTNmP25DBJaoYzZ0mSCmM4S5JUGMO5JdO2pN3PP+whSfUYzpIkFcZwliSpMIZzw1zS/Tn3\ngyQNx3BWq/xlRZIGZzhLklQYw7khzhAlDeNll9zUdQkqkOEsSVJhvHxnA5wxL+yuz671sp5Sn/4Z\n8+ztbW85uatyVBhnzpIkFcZwliSpMIZzDZ4ENhj3l9Qz30lgnhymWYazJEmFMZwlSSpMrXCOiBUR\ncVVE3BURd0bEb0bE0RFxbUTcW30+qqliS+Ly7PDcd2Wa5n4elZddctOCS9cHs40mX923Un0U+LfM\nfH1EHAosBf4SuC4zPxgRm4BNwHtqvk5xfFuQJtDU9vMoGLgaxNAz54g4EngFcDFAZv4kMx8D1gNb\nqs22AOfULVJSu+xnqSx1lrXXAY8Cn4qIb0bEJyNiGbAyM3dX2zwMrJzryRGxMSK2R8T2ffuerlGG\npAY01s97HxtRxRPO5e3pViecFwMnAx/LzJOAp+gtef2XzEwg53pyZm7OzJnMnFmy5JAaZUhqQGP9\nvHRF67WOlboha0BPpzrhvBPYmZnbqvtX0WvuRyJiFUD1eU+9EiWNgP0sFWTocM7Mh4GHIuJF1dDp\nwB3AVmBDNbYBuLpWhZJaZz9LZal7tvafApdVZ3beB7yZXuBfGRHnAw8Ab6j5GpJGw35uWFNL0v5h\njOlTK5wz82ZgZo6HTq/zdSWNnv0slcM/GSlJDfMkrmd6zYNnAnDt86/puJLx4eU7JUkqjDNnSRoT\nL7vkpuKOO8/Oipva1tl1j+EsSQ2ZluXsQQK5ztee5qB2WVuSpMI4c5akMdLV26ranC0fzGtO2yza\ncJakBox6Sbvt489dhPFzmbagdllbkqTCOHOWpBom8SSw0mbNB5qG9007c5akMdX0n5V8zYNnFh/M\n/cap1kEZzpIkFcZwlqQhTdKS9rjOQsdttn+wDGdJGnN1fkmYlHCbhO+hn+EsSVJhPFtbkgY0ScvZ\nKpMzZ0kaQKnBPMyZ25O2FDwpS/RgOEuSVBzDWZImSKkzew3GcJYkqTCeECZJB2GSZqSTclx2Pq95\n8Myxv7SnM2dJmjBNX9ZTo2c4S5JUmFrhHBF/FhG3R8RtEXF5RBwWEesiYltE7IiIz0TEoU0VK6k9\n9vP8JmkWOulL2rPG/W1VQ4dzRKwG3g7MZOZLgEXAucBFwEcy81eAHwDnN1GopPbYz5PJ5e3xVXdZ\nezFweEQsBpYCu4FXAVdVj28Bzqn5GpJGw36WCjH02dqZuSsi/hZ4EPgR8GXgRuCxzNxfbbYTWD3X\n8yNiI7AR4PClS4YtQ1IDmuzn561sv95RcdaprtRZ1j4KWA+sA34JWAaccbDPz8zNmTmTmTNLlhwy\nbBmSGtBkPy9d0VKRIzRpy8GT9L1MizrL2q8GvpOZj2bm08DngZcDK6plMYA1wK6aNUpqn/0sFaRO\nOD8InBoRSyMigNOBO4DrgddX22wArq5XoqQRsJ8n3KStBky6ocM5M7fRO1HkJuDW6mttBt4DvCsi\ndgDHABc3UKekFtnPP2eAqQS1Lt+ZmRcCFx4wfB9wSp2vK2n07GepHF4hTJKmyONf+euuSxipcb0Y\niX/4QpJwOXtSjesfwHDmLElSYQxnSVNv2mbNj3/lr6dueXvcGM6SJBXGcJYkqTCGs6SpNe0X5nBp\nu1yGsyRJhTGcJWmKeXJYmQxnSVNpmpezVT7DWZKkwniFMElTxRnz3B7/yl/zvFf/VddlNGZcrww2\ny5mzJEmFMZwlSSqM4Sxparik/dwm5cztcV/SBsNZkqTiGM6SJBXGs7UlTTyXswczrmduT8Jy9izD\nWdLE2/aWk7suoVivefDMrkvQHFzWliSpMIazJE2xa59/zUQsB0/C99DPcJYkqTALhnNEXBIReyLi\ntr6xoyPi2oi4t/p8VDUeEfEPEbEjIm6JCA/0SAWxnzWfcZ15TsrM/0AHM3O+FDjjgLFNwHWZeQJw\nXXUf4HXACdXHRuBjzZQpqSGXYj9rHuMWdONU66AWDOfM/Brw/QOG1wNbqttbgHP6xj+dPTcAKyJi\nVVPFSqrHfpbGw7DHnFdm5u7q9sPAyur2auChvu12VmPPEhEbI2J7RGzft+/pIcuQ1IBG+3nvY+0V\nqtEofUY6bjP8YdR+n3NmZkTkEM/bDGwGOOroIwZ+vqTmNdHPq148+PNVngPDr+v3Q096GB9o2Jnz\nI7PLW9XnPdX4LuD4vu3WVGOSymU/S4UZdua8FdgAfLD6fHXf+Nsi4grgZcAP+5bLJJXJftaC+meu\no5pFT9tsud+C4RwRlwOvBI6NiJ3AhfSa+MqIOB94AHhDtfk1wJnADmAv8OYWapY0JPtZTWgzqKc5\nkPstGM6Zed48D50+x7YJXFC3KEntsJ+l8eAfvpAkDe1gZrqzs2tnxQfPcJYktcpQHpzX1pYkqTCG\nsyRJhTGcJUkqjOEsSVJhDGdJkgpjOEuSVBjDWZKkwhjOkiQVxnCWJKkwhrMkSYUxnCVJKozhLElS\nYQxnSZIKYzhLklQYw1mSpMIYzpIkFcZwliSpMIazJEmFMZwlSSqM4SxJUmEWDOeIuCQi9kTEbX1j\nH4qIuyLiloj4QkSs6HvsvRGxIyLujojXtlW4pMHZz9J4OJiZ86XAGQeMXQu8JDNfCtwDvBcgIk4E\nzgV+rXrOP0XEosaqlVTXpdjPUvEWDOfM/Brw/QPGvpyZ+6u7NwBrqtvrgSsyc19mfgfYAZzSYL2S\narCfpfHQxDHntwBfrG6vBh7qe2xnNfYsEbExIrZHxPZ9+55uoAxJDajdz3sfa7lCaQrUCueIeB+w\nH7hs0Odm5ubMnMnMmSVLDqlThqQGNNXPS1csvL2k57Z42CdGxJuAs4DTMzOr4V3A8X2branGJBXM\nfpbKMtTMOSLOAN4NnJ2Ze/se2gqcGxFLImIdcALwn/XLlNQW+1kqz4Iz54i4HHglcGxE7AQupHc2\n5xLg2ogAuCEz/zgzb4+IK4E76C2PXZCZP22reEmDsZ+l8bBgOGfmeXMMX/wc238A+ECdoiS1w36W\nxoNXCJMkqTCGsyRJhTGcJUkqjOEsSVJhDGdJkgpjOEuSVJj4+cWAOiwi4lHgKeC7Xdcyj2Mps7ZS\n64Jyayu1Lnh2bS/IzOO6KmZYEfEEcHfXdcxjnP79S1FqXTA+tQ3cy0WEM0BEbM/Mma7rmEuptZVa\nF5RbW6l1Qdm1DaLk78PaBldqXTDZtbmsLUlSYQxnSZIKU1I4b+66gOdQam2l1gXl1lZqXVB2bYMo\n+fuwtsGVWhdMcG3FHHOWJEk9Jc2cJUkSBYRzRJwREXdHxI6I2NRxLcdHxPURcUdE3B4R76jG3x8R\nuyLi5urjzI7quz8ibq1q2F6NHR0R10bEvdXno0Zc04v69svNEfF4RLyzq30WEZdExJ6IuK1vbM59\nFD3/UP3s3RIRJ3dQ24ci4q7q9b8QESuq8bUR8aO+/ffxNmtrSin9bC8PXZf9PHxdzfZyZnb2ASwC\nvg28EDgU+BZwYof1rAJOrm4fAdwDnAi8H/iLLvdVVdP9wLEHjP0NsKm6vQm4qON/z4eBF3S1z4BX\nACcDty20j4AzgS8CAZwKbOugtt8GFle3L+qrbW3/duPwUVI/28uN/XvazwdfV6O93PXM+RRgR2be\nl5k/Aa4A1ndVTGbuzsybqttPAHcCq7uq5yCtB7ZUt7cA53RYy+nAtzPzga4KyMyvAd8/YHi+fbQe\n+HT23ACsiIhVo6wtM7+cmfuruzcAa9p6/REopp/t5UbYzwPU1XQvdx3Oq4GH+u7vpJAGioi1wEnA\ntmrobdVyxSVdLDdVEvhyRNwYERursZWZubu6/TCwspvSADgXuLzvfgn7DObfR6X9/L2F3m/+s9ZF\nxDcj4j8i4re6KmoApe1PwF6uwX4eXu1e7jqcixQRy4HPAe/MzMeBjwG/DPwGsBv4u45KOy0zTwZe\nB1wQEa/ofzB7ayidnH4fEYcCZwOfrYZK2WfP0OU+ei4R8T5gP3BZNbQbeH5mngS8C/jXiHheV/WN\nK3t5OPbz8Jrq5a7DeRdwfN/9NdVYZyLiEHrNfFlmfh4gMx/JzJ9m5s+Af6a3fDdymbmr+rwH+EJV\nxyOzSzfV5z1d1EbvP5mbMvORqsYi9lllvn1UxM9fRLwJOAt4Y/WfDZm5LzO/V92+kd6x3F8ddW0D\nKmJ/zrKXa7Gfh9BkL3cdzt8AToiIddVvaucCW7sqJiICuBi4MzM/3Dfef9zi94DbDnzuCGpbFhFH\nzN6md/LBbfT214Zqsw3A1aOurXIefUtgJeyzPvPto63AH1VneZ4K/LBvuWwkIuIM4N3A2Zm5t2/8\nuIhYVN1+IXACcN8oaxtCMf1sL9dmPw+o8V5u62y2g/2gd4bdPfR+m3hfx7WcRm+J5Bbg5urjTOBf\ngFur8a3Aqg5qeyG9s1+/Bdw+u6+AY4DrgHuBrwBHd1DbMuB7wJF9Y53sM3r/oewGnqZ3zOn8+fYR\nvbM6/7H62bsVmOmgth30jpPN/rx9vNr296t/55uBm4DfHfW/65DfYxH9bC/Xqs9+Hq6uRnvZK4RJ\nklSYrpe1JUnSAQxnSZIKYzhLklQYw1mSpMIYzpIkFcZwliSpMIazJEmFMZwlSSqM4SxJUmEMZ0mS\nCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTCGsyRJhTGcJUkqjOEsSVJhDGdJkgpjOEuSVBjDWZKkwhjO\nkiQVprVwjogzIuLuiNgREZvaeh1J7bKXpdGLzGz+i0YsAu4BXgPsBL4BnJeZdzT+YpJaYy9L3Vjc\n0tc9BdiRmfcBRMQVwHpgzoZedvixueKItS2VIo2n//fojd/NzOM6LmOgXgZYftgRecyyY0dUnlS+\n7z31XZ788RMxyHPaCufVwEN993cCL+vfICI2AhsBjlz+fP7kDd9oqRRpPP3VP/7CA13XwEH0Mjyz\nn49eegybzrhwNNVJY+CD//Y/Bn5OZyeEZebmzJzJzJllh3c9OZBUR38/Lz/siK7LkcZeW+G8Czi+\n7/6aakzAvmWnsW/ZaV2XIR0Me1nqQFvh/A3ghIhYFxGHAucCW1t6LUntsZelDrQSzpm5H3gb8CXg\nTuDKzLy9jdcaN/0zZmfPKp29LHWjrRPCyMxrgGva+vqSRsNelkbPK4RJklSY1mbOeqb5lrBnx5c8\n9fVRliNJKpgzZ0mSCmM4S5JUGMN5BA7mrGzf+yxJmuUx5xYZtpKkYThzliSpMIZzYZxtS5IM55bU\nCVmPP0vSdDOcJUkqjOEsSVJhDGdJkgpjODesyePFHneWpOnk+5wbYpBKkprizFmSpMIYzoXzbVWS\nNH0M5wYYnpKkJhnOkiQVxnAeE87OJWl6eLZ2DaMOzNnXW/LU10f6upKk0XLmLElSYQznIbnMLElq\ny9DhHBHHR8T1EXFHRNweEe+oxo+OiGsj4t7q81HNlSvwFwM1z36WylJn5rwf+PPMPBE4FbggIk4E\nNgHXZeYJwHXVfUlls5+lggwdzpm5OzNvqm4/AdwJrAbWA1uqzbYA59QtsiReFESTaFr7WSpVI8ec\nI2ItcBKwDViZmburhx4GVs7znI0RsT0itj/1o0ebKGOq+EuC2lK3n5/88RMjqVOaZLXDOSKWA58D\n3pmZj/c/lpkJ5FzPy8zNmTmTmTPLDj+ubhmSGtBEPy8/7IgRVCpNtlrhHBGH0GvkyzLz89XwIxGx\nqnp8FbCnXonlcKaqSTZt/SyVrM7Z2gFcDNyZmR/ue2grsKG6vQG4evjyJI2C/SyVpc4Vwl4O/CFw\na0TcXI39JfBB4MqIOB94AHhDvRK7V/KMed+y07ximJowNf0sjYOhwzkzvw7EPA+fPuzX1eC8rKfq\nsp+lsniFMEmSCmM4L6DkJW1J0mQynCeI732WpMlgOEuSVBj/nvM8nIFKkrrizHkO4x7M416/JE07\nw1mSpMIYzhPKk8MkaXx5zLmPYSZJKoEzZ0mSCmM4TzhXAyRp/LisjQEmSSqLM2dJkgoz9eE8DbNm\nz9yWpPEy9eEsSVJpDGdJkgozteE8jUu90/b9StK4mtpwliSpVIbzlJnGFQNJGjdTGc6TEE5vP+1x\n3n7a412XIUlqwVSGsyRJJat9hbCIWARsB3Zl5lkRsQ64AjgGuBH4w8z8Sd3XacI4zJgHnQ0Psv0/\nfP15/3V737LTWPLU1wd6LU2+cepnaZI1cfnOdwB3ArP/818EfCQzr4iIjwPnAx9r4HUmzqiXpQ98\nvU98aaQvr/FgP0sFqLWsHRFrgN8BPlndD+BVwFXVJluAc+q8hqTRsJ+lctSdOf898G7giOr+McBj\nmbm/ur8TWF3zNRrR9ZJ2iSdvvfW1L33W2Ce+dEsHlagQY9PP0qQbOpwj4ixgT2beGBGvHOL5G4GN\nAEcuf/6wZRStxEBeSH9gG9TTo8l+PnrpMQ1XJ02fOjPnlwNnR8SZwGH0jlF9FFgREYur37bXALvm\nenJmbgY2A6z+xZmsUYek+hrr5xccs85+lmoa+phzZr43M9dk5lrgXODfM/ONwPXA66vNNgBX166y\nhq4uujGOs+YDvfW1L51z6VuTZ1z6WZoWbbzP+T3AuyJiB71jVhe38BqSRsN+ljrQxFupyMyvAl+t\nbt8HnNLE123CqN7LO8kzTI9DT5eS+1maFl4hrAGTHMwHmqbvVZK6YjhLklSYRpa1p9W0ziJnv2+X\nuCWpHc6chzStwdzPfSBJ7TCcJUkqjMvaA3K2+EwucUtS85w5D8Bgnp/7RpKaYzhLklQYl7UPgrPC\ng+MStyQ1w5nzAgzmwXlNbkmqx3CWJKkwhrOkqbL3yTez98k3d12G9Jw85jwPl2Xre+trX+rxZ43E\nMGE7yHOWLv/UwF9fqsOZsyRJhXHmPAdnzc3xDG41qavl6Ple1xm12uLMWZKkwjhzllS0kk/e6q/N\nWbSaZDj3cTm7PZ4cpkGUHMjzMajVJJe1JUkqjDNnSUUYx9nyfJxFqy7DWVKnJimU5zL7/RnSGoTL\n2pIkFaZWOEfEioi4KiLuiog7I+I3I+LoiLg2Iu6tPh/VVLFt8Q81jIb7uWxd9POkz5r7TdP3qvrq\nzpw/CvxbZr4Y+HXgTmATcF1mngBcV92XVL6R9PPsta2nMaym+XvXYIYO54g4EngFcDFAZv4kMx8D\n1gNbqs22AOfULVJSu+xnqSx1TghbBzwKfCoifh24EXgHsDIzd1fbPAysrFeipBFovZ+dLT6TJ4rp\nudRZ1l4MnAx8LDNPAp7igCWvzEwg53pyRGyMiO0Rsf2pHz1ao4x6PAY6eu7zIjXWz0/++IlnPW4w\nz899o7nUCeedwM7M3Fbdv4pecz8SEasAqs975npyZm7OzJnMnFl2+HE1ypDUgMb6eflhR4ykYGmS\nDR3Omfkw8FBEvKgaOh24A9gKbKjGNgBX16pQUuva6mdPfjo47icdqO5FSP4UuCwiDgXuA95ML/Cv\njIjzgQeAN9R8DUmjYT9LhagVzpl5MzAzx0On1/m6kkbPfpbK4RXCJLXCZdrBuc80y3CWJKkwhrOk\nRv3sp/c7A6zBk8MEhrMkFcmQnm6GsyRJhTGcJUkqTN33OY8tLyHZrdn9/4kv3dJxJVLZ9j75Zq+/\nPYWcOUuSVJipnTnPzticQXfDGbMkzc+ZsyQVzjO3p4/hLElSYQxnSZIKYzhL0phwaXt6GM6SJBXG\ncJYkqTCGsyRJhTGcJUkqjOEsSVJhDGdJkgoz9eHsZSRHz30uDc+rhU2HqQ9nSZJKYzhLklSYWuEc\nEX8WEbdHxG0RcXlEHBYR6yJiW0TsiIjPRMShTRUrqT32s1SOocM5IlYDbwdmMvMlwCLgXOAi4COZ\n+SvAD4Dzmyi0TZ/40i0eBx0B93O5JqmfpUlQd1l7MXB4RCwGlgK7gVcBV1WPbwHOqfkakkbDfpYK\nMXQ4Z+Yu4G+BB+k18Q+BG4HHMnN/tdlOYPVcz4+IjRGxPSK2P/WjR4ctQ1IDGu3nn4yiYmmy1VnW\nPgpYD6wDfglYBpxxsM/PzM2ZOZOZM8sOP27YMiQ1oNF+9qi0VFudZe1XA9/JzEcz82ng88DLgRXV\nshjAGmBXzRoltc9+lgpSJ5wfBE6NiKUREcDpwB3A9cDrq202AFfXK3F0PGGpPe7X4k1cP0vjrM4x\n5230ThS5Cbi1+lqbgfcA74qIHcAxwMUN1CmpRfbz+PEqYZNt8cKbzC8zLwQuPGD4PuCUOl9X0ujZ\nz1I5vELYHFyCbY6HCqR2LF3+qa5LUIsM53kYKvW5/yRpOIazJEmFMZwlSSqM4SxJUmEM5wV43HRw\nHq+XpHpqvZVqWswGzVtf+9KOKymbgSxJzXDmLElSYQznATgznJ/7RpKa47L2gFzifiZDWZKa58xZ\nkqTCGM5DcsboPpC6sHT5p7x05xRwWbuGaV3iNpQlqV3OnCVJKozh3IBpmklO0/cqSV1xWbshB4bW\nJC11G8iSNFrOnCVJKozh3JJJmG16jWypLJ6lPT1c1m5Rf7CNyzK3YSxJ3XPmLElSYZw5j8hcM9Ku\nZ9POkqXx4HL29HHmLElSYRacOUfEJcBZwJ7MfEk1djTwGWAtcD/whsz8QUQE8FHgTGAv8KbMvKmd\n0sffqGfTzpRlP0vj4WCWtS8F/ifw6b6xTcB1mfnBiNhU3X8P8DrghOrjZcDHqs86SAcboLMhbuBq\nQJdiP48Vl7Sn04LL2pn5NeD7BwyvB7ZUt7cA5/SNfzp7bgBWRMSqpoqVVI/9LI2HYU8IW5mZu6vb\nDwMrq9urgYf6tttZje3mABGxEdgIcOTy5w9ZxvRyxqwGNdrPRx3eXqHStKh9tnZmZkTkEM/bDGwG\nWP2LMwM/X1Lzmujn41cM/nw9m8vZ023Ys7UfmV3eqj7vqcZ3Acf3bbemGpNULvtZKsyw4bwV2FDd\n3gBc3Tf+R9FzKvDDvuUySWV1GWLFAAAHPElEQVRqtJ9/YdFaZ301LF3+KfefFg7niLgc+L/AiyJi\nZ0ScD3wQeE1E3Au8uroPcA1wH7AD+GfgT1qpWtJQRtnPBszg3GeateAx58w8b56HTp9j2wQuqFuU\npHbYz9J48AphkiQVxmtrS2rN7DLt3iff3HElZXM5Wwdy5ixJUmEMZ0mtc2Y4P/eN5mI4S5JUGI85\nSxoJjz8/kzNmPRfDWdJI9YfStAW1gayD5bK2JEmFMZwldWaaZpLT9L2qPpe1JXVq0o9FG8oahjNn\nSZIK48xZUhEm6UQxZ8uqy3CWVJxxDGoDWU1yWVuSpMI4c5ZUtJJn0c6W1RbDWdLYmC8M2w5tQ1ij\n5rK2JEmFceYsaewNMrOdnWU7G1bJDGdJU8VQ1jhwWVuSpMIYzpIkFcZwliSpMAuGc0RcEhF7IuK2\nvrEPRcRdEXFLRHwhIlb0PfbeiNgREXdHxGvbKlzS4OxnaTwczMz5UuCMA8auBV6SmS8F7gHeCxAR\nJwLnAr9WPeefImJRY9VKqutS7GepeAuGc2Z+Dfj+AWNfzsz91d0bgDXV7fXAFZm5LzO/A+wATmmw\nXkk12M/SeGjimPNbgC9Wt1cDD/U9trMae5aI2BgR2yNi+1M/erSBMiQ1oHY/P/njJ1ouUZp8tcI5\nIt4H7AcuG/S5mbk5M2cyc2bZ4cfVKUNSA5rq5+WHHdF8cdKUGfoiJBHxJuAs4PTMzGp4F3B832Zr\nqjFJBbOfpbIMNXOOiDOAdwNnZ+bevoe2AudGxJKIWAecAPxn/TIltcV+lsqz4Mw5Ii4HXgkcGxE7\ngQvpnc25BLg2IgBuyMw/zszbI+JK4A56y2MXZOZP2ype0mDsZ2k8LBjOmXneHMMXP8f2HwA+UKco\nSe2wn6Xx4BXCJEkqjOEsSVJhDGdJkgpjOEuSVBjDWZKkwhjOkiQVJn5+MaAOi4h4FHgK+G7Xtczj\nWMqsrdS6oNzaSq0Lnl3bCzJz7K5tGxFPAHd3Xcc8xunfvxSl1gXjU9vAvVxEOANExPbMnOm6jrmU\nWlupdUG5tZVaF5Rd2yBK/j6sbXCl1gWTXZvL2pIkFcZwliSpMCWF8+auC3gOpdZWal1Qbm2l1gVl\n1zaIkr8PaxtcqXXBBNdWzDFnSZLUU9LMWZIkYThLklSczsM5Is6IiLsjYkdEbOq4luMj4vqIuCMi\nbo+Id1Tj74+IXRFxc/VxZkf13R8Rt1Y1bK/Gjo6IayPi3urzUSOu6UV9++XmiHg8It7Z1T6LiEsi\nYk9E3NY3Nuc+ip5/qH72bomIkzuo7UMRcVf1+l+IiBXV+NqI+FHf/vt4m7U1pZR+tpeHrst+Hr6u\nZns5Mzv7ABYB3wZeCBwKfAs4scN6VgEnV7ePAO4BTgTeD/xFl/uqqul+4NgDxv4G2FTd3gRc1PG/\n58PAC7raZ8ArgJOB2xbaR8CZwBeBAE4FtnVQ228Di6vbF/XVtrZ/u3H4KKmf7eXG/j3t54Ovq9Fe\n7nrmfAqwIzPvy8yfAFcA67sqJjN3Z+ZN1e0ngDuB1V3Vc5DWA1uq21uAczqs5XTg25n5QFcFZObX\ngO8fMDzfPloPfDp7bgBWRMSqUdaWmV/OzP3V3RuANW29/ggU08/2ciPs5wHqarqXuw7n1cBDffd3\nUkgDRcRa4CRgWzX0tmq54pIulpsqCXw5Im6MiI3V2MrM3F3dfhhY2U1pAJwLXN53v4R9BvPvo9J+\n/t5C7zf/Wesi4psR8R8R8VtdFTWA0vYnYC/XYD8Pr3Yvdx3ORYqI5cDngHdm5uPAx4BfBn4D2A38\nXUelnZaZJwOvAy6IiFf0P5i9NZRO3hsXEYcCZwOfrYZK2WfP0OU+ei4R8T5gP3BZNbQbeH5mngS8\nC/jXiHheV/WNK3t5OPbz8Jrq5a7DeRdwfN/9NdVYZyLiEHrNfFlmfh4gMx/JzJ9m5s+Af6a3fDdy\nmbmr+rwH+EJVxyOzSzfV5z1d1EbvP5mbMvORqsYi9lllvn1UxM9fRLwJOAt4Y/WfDZm5LzO/V92+\nkd6x3F8ddW0DKmJ/zrKXa7Gfh9BkL3cdzt8AToiIddVvaucCW7sqJiICuBi4MzM/3Dfef9zi94Db\nDnzuCGpbFhFHzN6md/LBbfT214Zqsw3A1aOurXIefUtgJeyzPvPto63AH1VneZ4K/LBvuWwkIuIM\n4N3A2Zm5t2/8uIhYVN1+IXACcN8oaxtCMf1sL9dmPw+o8V5u62y2g/2gd4bdPfR+m3hfx7WcRm+J\n5Bbg5urjTOBfgFur8a3Aqg5qeyG9s1+/Bdw+u6+AY4DrgHuBrwBHd1DbMuB7wJF9Y53sM3r/oewG\nnqZ3zOn8+fYRvbM6/7H62bsVmOmgth30jpPN/rx9vNr296t/55uBm4DfHfW/65DfYxH9bC/Xqs9+\nHq6uRnvZy3dKklSYrpe1JUnSAQxnSZIKYzhLklQYw1mSpMIYzpIkFcZwliSpMIazJEmF+f9DLBmh\n69wGYQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "P8rXzGehNU_g", + "colab_type": "text" + }, + "source": [ + "### Visualise Segmentation Masks" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "GNpkhNrINZHR", + "colab_type": "code", + "outputId": "1471b268-82c2-48fc-9613-72bd0a924726", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 521 + } + }, + "source": [ + "def visMask(im, seg_mask):\n", + " m = seg_mask.instances.masks\n", + " m = m.numpy().reshape([128,128])\n", + " im = np.transpose(im, (2,0,1)) # 3, 128, 128\n", + " res = im*m\n", + " res = np.transpose(res, (1,2,0)) # 128, 128, 3 \n", + " plt.imshow(res)\n", + "\n", + "im, boxlist, idx = train_dt[0]\n", + "plt.imshow(im)\n", + "plt.show()\n", + "seg_mask = boxlist.extra_fields['seg_masks']\n", + "visMask(im, seg_mask)" + ], + "execution_count": 6, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAD6xJREFUeJzt3X+s3XV9x/HnSyo4MJUipqktGZ02\nGtQ5yI2WuCwGNAIzwhJjIGZ2jqRZwiY6EwfzDyCZRjejYuLYGlG7hfBjyEbDmMoKxuwPOosYhFak\ngyElhWIsdpGFWX3vj/O983zK7W57z+/L85Hc3PP9nu+533c/ved1Pp/P+d7zSVUhSfNeMukCJE0X\nQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ1BhZKCQ5L8nDSfYkuWJU55E0XBnFxUtJjgN+CLwT2At8B7ik\nqnYN/WSShmrFiH7uW4A9VfUoQJKbgAuBBUNh5aqX1Kte7UhGGqVHd/3ix1X1qsWOG1UorAWe6Nve\nC7y1/4Akm4HNAKeueQmfvHnliEqRBHDxmw48fjTHTezluaq2VNVcVc2tXJVJlSHpMKMKhSeB0/q2\n13X7JE25UYXCd4ANSdYnOR64GNg2onNJGqKRzClU1aEkfwx8AzgO+HJVPTSKc0karlFNNFJVdwJ3\njurnSxoN3weU1DAUJDUMBUmNkc0pDOqce3dMugQdpbs3vnXxgzQz7ClIahgKkhqGgqSGoSCpYShI\nahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCp\nYShIaiw5FJKcluSeJLuSPJTk8m7/KUnuSvJI933V8MqVNGqD9BQOAR+tqjOAjcBlSc4ArgC2V9UG\nYHu3LWlGLDkUqmpfVX23u/1fwG5gLXAhsLU7bCtw0aBFShqfocwpJDkdOBPYAayuqn3dXU8Bq4/w\nmM1JdibZefBADaMMSUMwcCgkeTnwNeDDVXWw/76qKmDBZ3xVbamquaqaW7kqg5YhaUgGCoUkL6UX\nCDdU1W3d7qeTrOnuXwPsH6xESeM0yLsPAa4HdlfVZ/vu2gZs6m5vAm5fenmSxm2QBWbfBvw+8P0k\n3+v2/TnwKeCWJJcCjwPvG6xESeO05FCoqn8DjjQZcO5Sf66kyfKKRkkNQ0FSw1CQ1DAUJDUMBUkN\nQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ1DAUJDUM\nBUkNQ0FSw1CQ1DAUJDWGscDscUnuT3JHt70+yY4ke5LcnOT4wcuUNC7D6ClcDuzu2/408Lmqei1w\nALh0COeQNCaDrjq9Dvhd4EvddoBzgFu7Q7YCFw1yDknjNWhP4fPAx4BfdtuvBJ6tqkPd9l5g7YDn\nkDRGgyxF/25gf1Xdt8THb06yM8nOgwdqqWVIGrJBl6J/T5ILgJcBK4FrgZOTrOh6C+uAJxd6cFVt\nAbYAvOYNK0wFaUosuadQVVdW1bqqOh24GLi7qt4P3AO8tztsE3D7wFVKGptRXKfwZ8CfJtlDb47h\n+hGcQ9KIDDJ8+D9V9S3gW93tR4G3DOPnSho/r2iU1DAUJDUMBUkNQ0FSw1CQ1DAUJDUMBQ3sqqvv\n56qr7590GRoSQ0FSw1DQ0NhjWB4MBQ2d4TDbDAVJDUNBI2NvYTYZCpIahoJGalrnF054bs2kS5ha\nhoKkxlA+T0GaBsf66n8sxz9/4r5jLWdmGQoai/khxDVXnznUnzuuYUD/eZZ7QDh8kNSwp6CZM+lJ\nwuXea7CnIKlhKGislvL25AnPrWm+psk017ZUhoKm2qw90ZZDOBgKkhqGgsbuaK5ynPVX3Fmu3VCQ\n1BgoFJKcnOTWJD9IsjvJ2UlOSXJXkke676uGVayWv1nvIfSb1X/LoD2Fa4GvV9XrgTcDu4ErgO1V\ntQHY3m1LL9A/jJjVJ9DRmLV/25JDIckrgN+hW0C2qv6nqp4FLgS2dodtBS4atEhJ4zNIT2E98Azw\nlST3J/lSkpOA1VU1f5nXU8DqQYvU8jZLr6KDmJV/5yChsAI4C7iuqs4EfsZhQ4WqKqAWenCSzUl2\nJtl58MCCh0iagEFCYS+wt6p2dNu30guJp5OsAei+71/owVW1parmqmpu5aoMUIakYVpyKFTVU8AT\nSV7X7ToX2AVsAzZ1+zYBtw9UoaSxGvSvJP8EuCHJ8cCjwAfpBc0tSS4FHgfeN+A5tEx96mMXTLqE\nsZufV5jmv64cKBSq6nvA3AJ3nTvIz5U0OV7RKKnhh6xo7F6Mw4bDTfMwwp6CpIahIKlhKGisHDq0\npvEqR0NBUsNQkNQwFCQ1DAVJDUNBUmNqL166e+NbJ13CEU3j0urSsNhTkNQwFCQ1DAVJDUNBUmNq\nJxq1vHh585FN219MGgoaiyv+8k7AcFjItITBPIcPkhqGwhJcc/WZXHP1mZMuQxoJQ0FSw1CQ1DAU\nBuAwQsuRoSCpYShIahgKkhoDhUKSjyR5KMmDSW5M8rIk65PsSLInyc3dknLLmvMKWk6WHApJ1gIf\nAuaq6o3AccDFwKeBz1XVa4EDwKXDKFTSeAw6fFgB/FqSFcCJwD7gHHrL0gNsBS4a8BxaRqbtkt5J\nm8b2WPLfPlTVk0k+A/wI+G/gm8B9wLNVdag7bC+wduAqZ8D8EMJPZVqYQ6zZMcjwYRVwIbAeeDVw\nEnDeMTx+c5KdSXYePFBLLUPSkA0yfHgH8FhVPVNVPwduA94GnNwNJwDWAU8u9OCq2lJVc1U1t3JV\nBihDs+b5E/dNZbd5nKa5DQYJhR8BG5OcmCTAucAu4B7gvd0xm4DbBytR0jgNMqewI8mtwHeBQ8D9\nwBbgn4GbkvxFt+/6YRQ6K5xbaDmXMHtSNfnx/GvesKI+efPKSZcxdC/2YDjaQJjGRVZHZZJDhovf\ndOC+qppb7DivaJTU8OPYRujFOpRwyDDb7ClIathTGIMXS49hqT2E50/c96KYV5jWtyAPZyiM0XIN\nh2EMF+afMMsxHGYlDOY5fJDUsKcwAf2vrLPaaxjVZOJy6jHMWg9hnj0FSQ17ChM2S72Gcb7VOOs9\nhlntJYA9BUmHsacwRQ5/JZ50z2EaLkKatbcrZ7mHMM9QmGKTGFpMQxAc7vAn2jSFxHIIgcM5fJDU\nsKcwI471FXy+ZzGNr/yD6n91nkSvYTn2DvrZU5DUsKewTC3HHsJCxtVrWO69g36GgpaNY3ninvDc\nmhfVE/1YOHyQ1DAU9KJkL+HIDAVJDUNBUsNQkNQwFCQ1DAVJDUNBUmPRUEjy5ST7kzzYt++UJHcl\neaT7vqrbnyRfSLInyQNJzhpl8ZKG72h6Cl/lhUvMXwFsr6oNwPZuG+B8YEP3tRm4bjhlShqXRUOh\nqr4N/OSw3RcCW7vbW4GL+vb/XfXcS29Z+un543dJi1rqnMLqqpq/JOwpYHV3ey3wRN9xe7t9kmbE\nwBON1Vu2+piXrk6yOcnOJDsPHpj8yteSepYaCk/PDwu67/u7/U8Cp/Udt67b9wJVtaWq5qpqbuWq\nLLEMScO21FDYBmzqbm8Cbu/b/4HuXYiNwE/7hhmSZsCin6eQ5Ebg7cCpSfYCVwGfAm5JcinwOPC+\n7vA7gQuAPcBzwAdHULOkEVo0FKrqkiPcde4CxxZw2aBFSZocr2iU1DAUJDUMBUkNQ0FSw1CQ1DAU\nJDUMBUkNQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ1DAUJDUMBUkNQ0FSw1CQ\n1DAUJDUMBUkNQ0FSw1CQ1Fg0FJJ8Ocn+JA/27furJD9I8kCSf0xyct99VybZk+ThJO8aVeGSRuNo\negpfBc47bN9dwBur6jeBHwJXAiQ5A7gYeEP3mL9OctzQqpU0couGQlV9G/jJYfu+WVWHus176S05\nD3AhcFNVPV9Vj9FbaPYtQ6xX0ogNY07hD4F/6W6vBZ7ou29vt0/SjBgoFJJ8HDgE3LCEx25OsjPJ\nzoMHapAyJA3RkkMhyR8A7wbe3y1BD/AkcFrfYeu6fS9QVVuqaq6q5lauylLLkDRkSwqFJOcBHwPe\nU1XP9d21Dbg4yQlJ1gMbgH8fvExJ47JisQOS3Ai8HTg1yV7gKnrvNpwA3JUE4N6q+qOqeijJLcAu\nesOKy6rqF6MqXtLwLRoKVXXJAruv/3+O/wTwiUGKkjQ5XtEoqWEoSGoYCpIahoKkhqEgqWEoSGoY\nCpIahoKkRn71ZwsTLCJ5BvgZ8ONJ1wKcinX0s47WLNfx61X1qsUOmopQAEiys6rmrMM6rGOydTh8\nkNQwFCQ1pikUtky6gI51tKyjtezrmJo5BUnTYZp6CpKmwFSEQpLzunUi9iS5YkznPC3JPUl2JXko\nyeXd/lOS3JXkke77qjHVc1yS+5Pc0W2vT7Kja5Obkxw/hhpOTnJrt6bH7iRnT6I9knyk+z95MMmN\nSV42rvY4wjonC7ZBer7Q1fRAkrNGXMdY1luZeCh060J8ETgfOAO4pFs/YtQOAR+tqjOAjcBl3Xmv\nALZX1QZge7c9DpcDu/u2Pw18rqpeCxwALh1DDdcCX6+q1wNv7uoZa3skWQt8CJirqjcCx9FbS2Rc\n7fFVXrjOyZHa4Hx6Hzm4AdgMXDfiOsaz3kpVTfQLOBv4Rt/2lcCVE6jjduCdwMPAmm7fGuDhMZx7\nHb1ftnOAO4DQuzBlxUJtNKIaXgE8RjfP1Ld/rO3Br5YJOIXeJ4PdAbxrnO0BnA48uFgbAH8LXLLQ\ncaOo47D7fg+4obvdPGeAbwBnL/W8E+8pMAVrRSQ5HTgT2AGsrqp93V1PAavHUMLn6X0Q7i+77VcC\nz9avFtwZR5usB54BvtINY76U5CTG3B5V9STwGeBHwD7gp8B9jL89+h2pDSb5uzuy9VamIRQmKsnL\nga8BH66qg/33VS92R/r2TJJ3A/ur6r5RnucorADOAq6rqjPpXXbeDBXG1B6r6K00th54NXASL+xG\nT8w42mAxg6y3cjSmIRSOeq2IYUvyUnqBcENV3dbtfjrJmu7+NcD+EZfxNuA9Sf4TuIneEOJa4OQk\n8x+sO4422Qvsraod3fat9EJi3O3xDuCxqnqmqn4O3EavjcbdHv2O1AZj/90ddL2VozENofAdYEM3\nu3w8vQmTbaM+aXqfTX89sLuqPtt31zZgU3d7E725hpGpqiural1VnU7v3353Vb0fuAd47xjreAp4\nIsnrul3n0vuo/rG2B71hw8YkJ3b/R/N1jLU9DnOkNtgGfKB7F2Ij8NO+YcbQjW29lVFOGh3DhMoF\n9GZT/wP4+JjO+dv0uoEPAN/rvi6gN57fDjwC/Ctwyhjb4e3AHd3t3+j+Y/cA/wCcMIbz/xaws2uT\nfwJWTaI9gGuAHwAPAn9Pb42RsbQHcCO9uYyf0+s9XXqkNqA3IfzF7vf2+/TeMRllHXvozR3M/77+\nTd/xH+/qeBg4f5Bze0WjpMY0DB8kTRFDQVLDUJDUMBQkNQwFSQ1DQVLDUJDUMBQkNf4XHdFBfzyZ\nI98AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAD2VJREFUeJzt3WusXFd5xvH/U5tAE1TiQLGMHdWu\nMKncqG0iiwbRVoiASFJEgkAoCBW3WFiVaAkUCRz4UFXqB1ARN4mmNQngVlESmktjpSoQTBDtB1xs\ngkLi4MSQQGw5cRAJVLRqcXn7YbbJLF84l5nZM+f4/5NGZ/aePWe/Xj7nOWut2TMrVYUkHfdL0y5A\n0mwxFCQ1DAVJDUNBUsNQkNQwFCQ1DAVJjYmFQpLLkhxIcjDJ9kmdR9J4ZRIXLyVZATwEvBo4BHwd\neHNV7R/7ySSN1coJfd+XAger6rsASW4GrgROGQpJvKxSmrwfVNWvznXQpIYPa4HHhrYPdft+Lsm2\nJHuT7J1QDZJa35vPQZPqKcypqnYAO8CegjRLJtVTOAycP7S9rtsnacZNKhS+DmxMsiHJWcDVwK4J\nnUvSGE1k+FBVx5L8GfAFYAXw6ap6YBLnkjReE3lJcsFFOKcg9WFfVW2e6yCvaJTUMBQkNQwFSY2p\nXacwl6OfemjaJWieXvj2l0y7BI2RPQVJDUNBUsNQkNQwFCQ1DAVJDUNBUsNQkNQwFCQ1DAVJDUNB\nUsNQkNQwFCQ1DAVJDUNBUsNQkNQwFCQ1DAVJDUNBUsNQkNQwFCQ1DAVJjUWHQpLzk9yTZH+SB5Jc\n0+0/L8ndSR7uvq4aX7mSJm2UnsIx4D1VtQm4BHhHkk3AdmB3VW0EdnfbkpaIRYdCVR2pqm909/8T\neBBYC1wJ7OwO2wlcNWqRkvozlsVgkqwHLgL2AKur6kj30OPA6tM8ZxuwbRznlzQ+I080JnkucBvw\nrqr68fBjNVjS+pQrSlfVjqraPJ9VcCX1Z6RQSPIsBoFwY1Xd3u1+Isma7vE1wNHRSpTUp1FefQhw\nA/BgVX1k6KFdwJbu/hbgzsWXJ6lvo8wpvBz4I+BbSb7Z7Xs/8EHgc0m2At8D3jRaiZL6tOhQqKp/\nB3Kahy9d7PeVNF1e0SipYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSG\noSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqTGOBaYXZHk3iR3ddsb\nkuxJcjDJLUnOGr1MSX0ZR0/hGuDBoe0PAR+tqhcDTwFbx3AOST0ZddXpdcAfAtd32wFeCdzaHbIT\nuGqUc0jq16g9hY8B7wV+1m0/H3i6qo5124eAtSOeQ1KPRlmK/rXA0arat8jnb0uyN8nexdYgafxG\nXYr+dUmuAJ4D/ArwceDcJCu73sI64PCpnlxVO4AdAElqhDokjdGiewpVdW1Vrauq9cDVwJer6i3A\nPcAbu8O2AHeOXKWk3kziOoX3AX+R5CCDOYYbJnAOSRMyyvDh56rqK8BXuvvfBV46ju8rqX9e0Sip\nYShIahgKkhqGgqSGoSCpYShIahgKGtkFb3iUC97w6LTL0JgYCpIahoLGxh7D8mAoaOwMh6XNUJDU\nMBQ0MfYWliZDQVLDUNBEzer8wh17Nk27hJllKEhqjOXzFKRZsNC//gs5/vW/u3+h5SxZhoJ6cXwI\nceC29WP9vn0NA4bPs9wDwuGDpIY9BS05054kXO69BnsKkhr2FNSrC97w6ILnFabdM/hFTqxtOfQc\n7Clops1yIJzKHXs2LbmaT2QoSGoYCurdfK5yXOp/cZdy7YaCpMZIE41JzgWuBy4ECngbcAC4BVgP\nPAq8qaqeGqlKnTGW8l/YEx3/tyy1ycdULX7B5yQ7gX+rquuTnAWcDbwf+GFVfTDJdmBVVb1vju9z\nUhFHP/XQoutSv37/82eN9PwDt61fVmFwOjMQDvuqavNcBy16+JDkecAf0C0gW1X/W1VPA1cCO7vD\ndgJXLfYckvo3ypzCBuBJ4DNJ7k1yfZJzgNVVdaQ75nFg9ahFank7E3oJsHT+naOEwkrgYuC6qroI\n+AmwffiAGoxNTjk+SbItyd4ke0eoQdKYjRIKh4BDVbWn276VQUg8kWQNQPf16KmeXFU7qmrzfMY4\nkvqz6FCoqseBx5Jc0O26FNgP7AK2dPu2AHeOVKGkXo363oc/B27sXnn4LvAnDILmc0m2At8D3jTi\nObRMffC9V0y7hN4thZcpRwqFqvomcKru/6WjfF9J0+MVjZIavnVavTsThw0nmuVhhD0FSQ1DQVLD\nUFCvHDq0ZvEqR0NBUsNQkNQwFCQ1DAVJDUNBUmNmL1564dtfMu0STmsWl1aXxsWegqSGoSCpYShI\nahgKkhozO9Go5cXLm09v1t4xOdK6D2Mr4hTrPswyX31YPMPhZD2GwWTXfZC0PBkKi3DgtvUcuG39\ntMuQJsJQkNQwFCQ1DIUROIzQcmQoSGoYCpIahoKkxkihkOTdSR5Icn+Sm5I8J8mGJHuSHExyS7ek\n3LLmvIKWk0WHQpK1wDuBzVV1IbACuBr4EPDRqnox8BSwdRyFSurHqMOHlcAvJ1kJnA0cAV7JYFl6\ngJ3AVSOeQ8vIrFzfPytmsT0W/Yaoqjqc5MPA94H/Br4I7AOerqpj3WGHgLUjV7kEHB9C+L6IU3OI\ntXSMMnxYBVwJbABeBJwDXLaA529LsjfJ3sXWIGn8Rnnr9KuAR6rqSYAktwMvB85NsrLrLawDDp/q\nyVW1A9jRPXdJvUtSozneZZ7F1ZH6MovDhuNGmVP4PnBJkrOTBLgU2A/cA7yxO2YLcOdoJUrq0yhz\nCnuS3Ap8AzgG3MvgL/+/ADcn+etu3w3jKHSpcG6h5VzC0uOHrEzQmR4M8w2EM2kYMeVhgx+yImnh\n/IzGCTpThxIOGZY2ewqSGs4p9Gi59xhG6SGcCfMKM/Ay5LzmFAyFKVhu4TDO4cJyDIcZCIPjnGiU\ntHD2FKZsqfYaJj2ZuBx6DDPUQzjOnoKkhbOnMENmvdcwjZcal2qPYQZ7CWBPQdJi2FOYYdPuOczK\nRUhLqbcwoz2E43xJcjnpKyBmJQh+kVkKiRkPgRM5fJC0cPYUlqnjPYul8Jd/FNPoNSyx3sEwewqS\nFs6egpaNSfYalnDvYJgTjdLp3LFn03L5RV8Ihw+SFs6egnTmsKcgaeEMBUkNQ0FSw1CQ1DAUJDUM\nBUmNOUMhyaeTHE1y/9C+85LcneTh7uuqbn+SfCLJwST3Jbl4ksVLGr/59BQ+y8lLzG8HdlfVRmB3\ntw1wObCxu20DrhtPmZL6MmcoVNVXgR+esPtKYGd3fydw1dD+f6iBrzFYln7NuIqVNHmLnVNYXVVH\nuvuPA6u7+2uBx4aOO9Ttk7REjLyWZFXVYi5TTrKNwRBD0gxZbE/hiePDgu7r0W7/YeD8oePWdftO\nUlU7qmrzfK7FltSfxYbCLmBLd38LcOfQ/rd2r0JcAvxoaJghaSmoql94A24CjgA/ZTBHsBV4PoNX\nHR4GvgSc1x0b4JPAd4BvAZvn+v7d88qbN28Tv+2dz++jb52Wzhy+dVrSwhkKkhqGgqSGoSCpYShI\nahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCpYShIahgKkhqGgqSGoSCp\nYShIahgKkhqGgqSGoSCpYShIaswZCkk+neRokvuH9v1Nkm8nuS/JHUnOHXrs2iQHkxxI8ppJFS5p\nMubTU/gscNkJ++4GLqyq3wIeAq4FSLIJuBr4ze45f5tkxdiqlTRxc4ZCVX0V+OEJ+75YVce6za8x\nWHIe4Erg5qr6n6p6BDgIvHSM9UqasHHMKbwN+Nfu/lrgsaHHDnX7JC0RK0d5cpIPAMeAGxfx3G3A\ntlHOL2n8Fh0KSf4YeC1waT2znv1h4Pyhw9Z1+05SVTuAHd33cil6aUYsaviQ5DLgvcDrquq/hh7a\nBVyd5NlJNgAbgf8YvUxJfZmzp5DkJuAVwAuSHAL+ksGrDc8G7k4C8LWq+tOqeiDJ54D9DIYV76iq\n/5tU8ZLGL8/0/KdYhMMHqQ/7qmrzXAd5RaOkhqEgqWEoSGoYCpIahoKkhqEgqWEoSGoYCpIaI70h\naox+APyk+zptL8A6hllHaynX8WvzOWgmrmgESLJ3PldbWYd1WMdk63D4IKlhKEhqzFIo7Jh2AR3r\naFlHa9nXMTNzCpJmwyz1FCTNgJkIhSSXdetEHEyyvadznp/kniT7kzyQ5Jpu/3lJ7k7ycPd1VU/1\nrEhyb5K7uu0NSfZ0bXJLkrN6qOHcJLd2a3o8mORl02iPJO/u/k/uT3JTkuf01R6nWefklG2QgU90\nNd2X5OIJ19HLeitTD4VuXYhPApcDm4A3d+tHTNox4D1VtQm4BHhHd97twO6q2gjs7rb7cA3w4ND2\nh4CPVtWLgaeArT3U8HHg81X1G8Bvd/X02h5J1gLvBDZX1YXACgZrifTVHp/l5HVOTtcGlzP4yMGN\nDD6E+LoJ19HPeitVNdUb8DLgC0Pb1wLXTqGOO4FXAweANd2+NcCBHs69jsEP2yuBu4AwuDBl5ana\naEI1PA94hG6eaWh/r+3BM8sEnMfg4rq7gNf02R7AeuD+udoA+Hvgzac6bhJ1nPDY64Ebu/vN7wzw\nBeBliz3v1HsKzMBaEUnWAxcBe4DVVXWke+hxYHUPJXyMwQfh/qzbfj7wdD2z4E4fbbIBeBL4TDeM\nuT7JOfTcHlV1GPgw8H3gCPAjYB/9t8ew07XBNH92J7beyiyEwlQleS5wG/Cuqvrx8GM1iN2JvjyT\n5LXA0araN8nzzMNK4GLguqq6iMFl581Qoaf2WMVgpbENwIuAczi5Gz01fbTBXEZZb2U+ZiEU5r1W\nxLgleRaDQLixqm7vdj+RZE33+Brg6ITLeDnwuiSPAjczGEJ8HDg3yfH3pvTRJoeAQ1W1p9u+lUFI\n9N0erwIeqaonq+qnwO0M2qjv9hh2ujbo/Wd3aL2Vt3QBNfY6ZiEUvg5s7GaXz2IwYbJr0ifN4LPp\nbwAerKqPDD20C9jS3d/CYK5hYqrq2qpaV1XrGfzbv1xVbwHuAd7YYx2PA48luaDbdSmDj+rvtT0Y\nDBsuSXJ29390vI5e2+MEp2uDXcBbu1chLgF+NDTMGLve1luZ5KTRAiZUrmAwm/od4AM9nfP3GHQD\n7wO+2d2uYDCe3w08DHwJOK/HdngFcFd3/9e7/9iDwD8Bz+7h/L8D7O3a5J+BVdNoD+CvgG8D9wP/\nyGCNkV7aA7iJwVzGTxn0nraerg0YTAh/svu5/RaDV0wmWcdBBnMHx39e/27o+A90dRwALh/l3F7R\nKKkxC8MHSTPEUJDUMBQkNQwFSQ1DQVLDUJDUMBQkNQwFSY3/B5CihLXFt5GeAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ORQXaa6k30yD", + "colab_type": "text" + }, + "source": [ + "# Training a Model\n", + "\n", + "Now we move on to training our very own model. Here we will be finetuning the base of a Mask RCNN, modifying it to support Semantic Segmentation and change the number of classes to support this dataset. To do this we need\n", + "\n", + "1. A base model that has the same amount of output classes as our dataset. In this case, we have need for only 3 classes instead of COCO's 80. Hence , we first need to do some model trimming. \n", + "\n", + "2. Second, we need to build a Panoptic FPN model. That means attaching the semantic segmentation branch to the FPN.\n", + "\n", + "3. FInally, we write a loss function to train the semantic segmentation head.\n", + "\n", + "4. Lastly, set to train !" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SVaNqbpiAzwx", + "colab_type": "text" + }, + "source": [ + "## Model Trimming" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "hbzY16ocEdrg", + "colab_type": "text" + }, + "source": [ + "### Helper Functions for Visualising Detections" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "yk5a6RpsEdIt", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class Resize(object):\n", + " def __init__(self, min_size, max_size):\n", + " self.min_size = min_size\n", + " self.max_size = max_size\n", + "\n", + " # modified from torchvision to add support for max size\n", + " def get_size(self, image_size):\n", + " w, h = image_size\n", + " size = self.min_size\n", + " max_size = self.max_size\n", + " if max_size is not None:\n", + " min_original_size = float(min((w, h)))\n", + " max_original_size = float(max((w, h)))\n", + " if max_original_size / min_original_size * size > max_size:\n", + " size = int(round(max_size * min_original_size / max_original_size))\n", + "\n", + " if (w <= h and w == size) or (h <= w and h == size):\n", + " return (h, w)\n", + "\n", + " if w < h:\n", + " ow = size\n", + " oh = int(size * h / w)\n", + " else:\n", + " oh = size\n", + " ow = int(size * w / h)\n", + "\n", + " return (oh, ow)\n", + "\n", + " def __call__(self, image):\n", + " size = self.get_size(image.size)\n", + " image = F.resize(image, size)\n", + " return image\n", + " \n", + " \n", + "class COCODemo(object):\n", + " \n", + " def __init__(\n", + " self,\n", + " cfg,\n", + " confidence_threshold=0.7,\n", + " show_mask_heatmaps=False,\n", + " masks_per_dim=2,\n", + " min_image_size=224,\n", + " convert_model=False\n", + " ):\n", + " self.cfg = cfg.clone()\n", + " if convert_model:\n", + " self.model = build_detection_model(cfg)\n", + " else:\n", + " self.model = build_panoptic_network(cfg)\n", + " self.training = False\n", + "\n", + " self.model.eval()\n", + " self.device = torch.device(cfg.MODEL.DEVICE)\n", + " self.model.to(self.device)\n", + " self.min_image_size = min_image_size\n", + "\n", + " save_dir = cfg.OUTPUT_DIR\n", + " checkpointer = DetectronCheckpointer(cfg, self.model, save_dir=save_dir)\n", + " _ = checkpointer.load(cfg.MODEL.WEIGHT)\n", + "\n", + " self.transforms = self.build_transform()\n", + "\n", + " mask_threshold = -1 if show_mask_heatmaps else 0.5\n", + " self.masker = Masker(threshold=mask_threshold, padding=1)\n", + "\n", + " # used to make colors for each class\n", + " self.palette = torch.tensor([2 ** 25 - 1, 2 ** 15 - 1, 2 ** 21 - 1])\n", + "\n", + " self.cpu_device = torch.device(\"cpu\")\n", + " self.confidence_threshold = confidence_threshold\n", + " self.show_mask_heatmaps = show_mask_heatmaps\n", + " self.masks_per_dim = masks_per_dim\n", + "\n", + " def build_transform(self):\n", + " \"\"\"\n", + " Creates a basic transformation that was used to train the models\n", + " \"\"\"\n", + " cfg = self.cfg\n", + "\n", + " # we are loading images with OpenCV, so we don't need to convert them\n", + " # to BGR, they are already! So all we need to do is to normalize\n", + " # by 255 if we want to convert to BGR255 format, or flip the channels\n", + " # if we want it to be in RGB in [0-1] range.\n", + " if cfg.INPUT.TO_BGR255:\n", + " to_bgr_transform = T.Lambda(lambda x: x * 255)\n", + " else:\n", + " to_bgr_transform = T.Lambda(lambda x: x[[2, 1, 0]])\n", + "\n", + " normalize_transform = T.Normalize(\n", + " mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD\n", + " )\n", + " min_size = cfg.INPUT.MIN_SIZE_TEST\n", + " max_size = cfg.INPUT.MAX_SIZE_TEST\n", + " transform = T.Compose(\n", + " [\n", + " T.ToPILImage(),\n", + " Resize(min_size, max_size),\n", + " T.ToTensor(),\n", + " to_bgr_transform,\n", + " normalize_transform,\n", + " ]\n", + " )\n", + " return transform\n", + "\n", + " def run_on_opencv_image(self, image, panoptic=False, objDet=False, semantic=False):\n", + " \"\"\"\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " Returns:\n", + " prediction (BoxList): the detected objects. Additional information\n", + " of the detection properties can be found in the fields of\n", + " the BoxList via `prediction.fields()`\n", + " \"\"\"\n", + " mask, predictions = self.compute_prediction(image)\n", + " top_predictions = self.select_top_predictions(predictions)\n", + " \n", + " \n", + " result = image.copy()\n", + " \n", + " if semantic or panoptic:\n", + " height, width = image.shape[:-1]\n", + "\n", + " # overlay segmentation mask first\n", + " mask = np.squeeze(mask)\n", + " mask = (mask > 0.5).astype(np.uint8)\n", + " result = np.transpose(result, (2,0,1))\n", + "\n", + " mask = cv2.resize(mask, dsize=(width, height), interpolation=cv2.INTER_CUBIC)\n", + " result = result*mask\n", + " result = np.transpose(result, (1,2,0))\n", + " \n", + " if objDet or panoptic:\n", + "\n", + " if self.show_mask_heatmaps:\n", + " return self.create_mask_montage(result, top_predictions)\n", + " result = self.overlay_boxes(result, top_predictions)\n", + " if self.cfg.MODEL.MASK_ON:\n", + " result = self.overlay_mask(result, top_predictions)\n", + " if self.cfg.MODEL.KEYPOINT_ON:\n", + " result = self.overlay_keypoints(result, top_predictions)\n", + " result = self.overlay_class_names(result, top_predictions)\n", + "\n", + " return result\n", + "\n", + " def compute_prediction(self, original_image):\n", + " \"\"\"\n", + " Arguments:\n", + " original_image (np.ndarray): an image as returned by OpenCV\n", + " Returns:\n", + " prediction (BoxList): the detected objects. Additional information\n", + " of the detection properties can be found in the fields of\n", + " the BoxList via `prediction.fields()`\n", + " \"\"\"\n", + " # apply pre-processing to image\n", + " image = self.transforms(original_image)\n", + " # convert to an ImageList, padded so that it is divisible by\n", + " # cfg.DATALOADER.SIZE_DIVISIBILITY\n", + " image_list = to_image_list(image, self.cfg.DATALOADER.SIZE_DIVISIBILITY)\n", + " image_list = image_list.to(self.device)\n", + " # compute predictions\n", + " with torch.no_grad():\n", + " semantic_mask, predictions = self.model(image_list)\n", + " predictions = [o.to(self.cpu_device) for o in predictions]\n", + "\n", + " # always single image is passed at a time\n", + " prediction = predictions[0]\n", + "\n", + " # reshape prediction (a BoxList) into the original image size\n", + " height, width = original_image.shape[:-1]\n", + " prediction = prediction.resize((width, height))\n", + "\n", + " if prediction.has_field(\"mask\"):\n", + " # if we have masks, paste the masks in the right position\n", + " # in the image, as defined by the bounding boxes\n", + " masks = prediction.get_field(\"mask\")\n", + " # always single image is passed at a time\n", + " masks = self.masker([masks], [prediction])[0]\n", + " prediction.add_field(\"mask\", masks)\n", + " return semantic_mask.cpu().detach().numpy(), prediction\n", + "\n", + " def select_top_predictions(self, predictions):\n", + " \"\"\"\n", + " Select only predictions which have a `score` > self.confidence_threshold,\n", + " and returns the predictions in descending order of score\n", + " Arguments:\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `scores`.\n", + " Returns:\n", + " prediction (BoxList): the detected objects. Additional information\n", + " of the detection properties can be found in the fields of\n", + " the BoxList via `prediction.fields()`\n", + " \"\"\"\n", + " scores = predictions.get_field(\"scores\")\n", + " keep = torch.nonzero(scores > self.confidence_threshold).squeeze(1)\n", + " predictions = predictions[keep]\n", + " scores = predictions.get_field(\"scores\")\n", + " _, idx = scores.sort(0, descending=True)\n", + " return predictions[idx]\n", + "\n", + " def compute_colors_for_labels(self, labels):\n", + " \"\"\"\n", + " Simple function that adds fixed colors depending on the class\n", + " \"\"\"\n", + " colors = labels[:, None] * self.palette\n", + " colors = (colors % 255).numpy().astype(\"uint8\")\n", + " return colors\n", + "\n", + " def overlay_boxes(self, image, predictions):\n", + " \"\"\"\n", + " Adds the predicted boxes on top of the image\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `labels`.\n", + " \"\"\"\n", + " labels = predictions.get_field(\"labels\")\n", + " boxes = predictions.bbox\n", + "\n", + " colors = self.compute_colors_for_labels(labels).tolist()\n", + "\n", + " for box, color in zip(boxes, colors):\n", + " box = box.to(torch.int64)\n", + " top_left, bottom_right = box[:2].tolist(), box[2:].tolist()\n", + " image = cv2.rectangle(\n", + " image, tuple(top_left), tuple(bottom_right), tuple(color), 1\n", + " )\n", + "\n", + " return image\n", + "\n", + " def overlay_mask(self, image, predictions):\n", + " \"\"\"\n", + " Adds the instances contours for each predicted object.\n", + " Each label has a different color.\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `mask` and `labels`.\n", + " \"\"\"\n", + " masks = predictions.get_field(\"mask\").numpy()\n", + " labels = predictions.get_field(\"labels\")\n", + "\n", + " colors = self.compute_colors_for_labels(labels).tolist()\n", + "\n", + " for mask, color in zip(masks, colors):\n", + " thresh = mask[0, :, :, None]\n", + " contours, hierarchy = cv2_util.findContours(\n", + " thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE\n", + " )\n", + " image = cv2.drawContours(image, contours, -1, color, 3)\n", + "\n", + " composite = image\n", + "\n", + " return composite\n", + "\n", + " def overlay_keypoints(self, image, predictions):\n", + " keypoints = predictions.get_field(\"keypoints\")\n", + " kps = keypoints.keypoints\n", + " scores = keypoints.get_field(\"logits\")\n", + " kps = torch.cat((kps[:, :, 0:2], scores[:, :, None]), dim=2).numpy()\n", + " for region in kps:\n", + " image = vis_keypoints(image, region.transpose((1, 0)))\n", + " return image\n", + "\n", + " def create_mask_montage(self, image, predictions):\n", + " \"\"\"\n", + " Create a montage showing the probability heatmaps for each one one of the\n", + " detected objects\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `mask`.\n", + " \"\"\"\n", + " masks = predictions.get_field(\"mask\")\n", + " masks_per_dim = self.masks_per_dim\n", + " masks = L.interpolate(\n", + " masks.float(), scale_factor=1 / masks_per_dim\n", + " ).byte()\n", + " height, width = masks.shape[-2:]\n", + " max_masks = masks_per_dim ** 2\n", + " masks = masks[:max_masks]\n", + " # handle case where we have less detections than max_masks\n", + " if len(masks) < max_masks:\n", + " masks_padded = torch.zeros(max_masks, 1, height, width, dtype=torch.uint8)\n", + " masks_padded[: len(masks)] = masks\n", + " masks = masks_padded\n", + " masks = masks.reshape(masks_per_dim, masks_per_dim, height, width)\n", + " result = torch.zeros(\n", + " (masks_per_dim * height, masks_per_dim * width), dtype=torch.uint8\n", + " )\n", + " for y in range(masks_per_dim):\n", + " start_y = y * height\n", + " end_y = (y + 1) * height\n", + " for x in range(masks_per_dim):\n", + " start_x = x * width\n", + " end_x = (x + 1) * width\n", + " result[start_y:end_y, start_x:end_x] = masks[y, x]\n", + " return cv2.applyColorMap(result.numpy(), cv2.COLORMAP_JET)\n", + "\n", + " def overlay_class_names(self, image, predictions):\n", + " \"\"\"\n", + " Adds detected class names and scores in the positions defined by the\n", + " top-left corner of the predicted bounding box\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `scores` and `labels`.\n", + " \"\"\"\n", + " scores = predictions.get_field(\"scores\").tolist()\n", + " labels = predictions.get_field(\"labels\").tolist()\n", + " labels = [self.CATEGORIES[i] for i in labels]\n", + " boxes = predictions.bbox\n", + "\n", + " template = \"{}: {:.2f}\"\n", + " for box, score, label in zip(boxes, scores, labels):\n", + " x, y = box[:2]\n", + " s = template.format(label, score)\n", + " cv2.putText(\n", + " image, s, (x, y), cv2.FONT_HERSHEY_SIMPLEX, .5, (255, 255, 255), 1\n", + " )\n", + "\n", + " return image\n", + "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from maskrcnn_benchmark.structures.keypoint import PersonKeypoints\n", + "\n", + "def vis_keypoints(img, kps, kp_thresh=2, alpha=0.7):\n", + " \"\"\"Visualizes keypoints (adapted from vis_one_image).\n", + " kps has shape (4, #keypoints) where 4 rows are (x, y, logit, prob).\n", + " \"\"\"\n", + " dataset_keypoints = PersonKeypoints.NAMES\n", + " kp_lines = PersonKeypoints.CONNECTIONS\n", + "\n", + " # Convert from plt 0-1 RGBA colors to 0-255 BGR colors for opencv.\n", + " cmap = plt.get_cmap('rainbow')\n", + " colors = [cmap(i) for i in np.linspace(0, 1, len(kp_lines) + 2)]\n", + " colors = [(c[2] * 255, c[1] * 255, c[0] * 255) for c in colors]\n", + "\n", + " # Perform the drawing on a copy of the image, to allow for blending.\n", + " kp_mask = np.copy(img)\n", + "\n", + " # Draw mid shoulder / mid hip first for better visualization.\n", + " mid_shoulder = (\n", + " kps[:2, dataset_keypoints.index('right_shoulder')] +\n", + " kps[:2, dataset_keypoints.index('left_shoulder')]) / 2.0\n", + " sc_mid_shoulder = np.minimum(\n", + " kps[2, dataset_keypoints.index('right_shoulder')],\n", + " kps[2, dataset_keypoints.index('left_shoulder')])\n", + " mid_hip = (\n", + " kps[:2, dataset_keypoints.index('right_hip')] +\n", + " kps[:2, dataset_keypoints.index('left_hip')]) / 2.0\n", + " sc_mid_hip = np.minimum(\n", + " kps[2, dataset_keypoints.index('right_hip')],\n", + " kps[2, dataset_keypoints.index('left_hip')])\n", + " nose_idx = dataset_keypoints.index('nose')\n", + " if sc_mid_shoulder > kp_thresh and kps[2, nose_idx] > kp_thresh:\n", + " cv2.line(\n", + " kp_mask, tuple(mid_shoulder), tuple(kps[:2, nose_idx]),\n", + " color=colors[len(kp_lines)], thickness=2, lineType=cv2.LINE_AA)\n", + " if sc_mid_shoulder > kp_thresh and sc_mid_hip > kp_thresh:\n", + " cv2.line(\n", + " kp_mask, tuple(mid_shoulder), tuple(mid_hip),\n", + " color=colors[len(kp_lines) + 1], thickness=2, lineType=cv2.LINE_AA)\n", + "\n", + " # Draw the keypoints.\n", + " for l in range(len(kp_lines)):\n", + " i1 = kp_lines[l][0]\n", + " i2 = kp_lines[l][1]\n", + " p1 = kps[0, i1], kps[1, i1]\n", + " p2 = kps[0, i2], kps[1, i2]\n", + " if kps[2, i1] > kp_thresh and kps[2, i2] > kp_thresh:\n", + " cv2.line(\n", + " kp_mask, p1, p2,\n", + " color=colors[l], thickness=2, lineType=cv2.LINE_AA)\n", + " if kps[2, i1] > kp_thresh:\n", + " cv2.circle(\n", + " kp_mask, p1,\n", + " radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA)\n", + " if kps[2, i2] > kp_thresh:\n", + " cv2.circle(\n", + " kp_mask, p2,\n", + " radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA)\n", + "\n", + " # Blend the keypoints.\n", + " return cv2.addWeighted(img, 1.0 - alpha, kp_mask, alpha, 0)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "If8z4OZfDHmC", + "colab_type": "text" + }, + "source": [ + "### Base Model Config\n", + "\n", + "This is the base model that we will finetune from. First we need to replace the bounding box heads and mask heads to make it compatible with our Shapes Dataset." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "wM0coO44ClbV", + "colab_type": "code", + "outputId": "7ecc3f19-8b98-4a66-c155-a630b6036691", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "%%writefile base_config.yaml\n", + "MODEL:\n", + " META_ARCHITECTURE: \"GeneralizedRCNN\"\n", + " WEIGHT: \"catalog://Caffe2Detectron/COCO/35858933/e2e_mask_rcnn_R-50-FPN_1x\"\n", + " BACKBONE:\n", + " CONV_BODY: \"R-50-FPN\"\n", + " RESNETS:\n", + " BACKBONE_OUT_CHANNELS: 256\n", + " RPN:\n", + " USE_FPN: True\n", + " ANCHOR_STRIDE: (4, 8, 16, 32, 64)\n", + " PRE_NMS_TOP_N_TRAIN: 2000\n", + " PRE_NMS_TOP_N_TEST: 1000\n", + " POST_NMS_TOP_N_TEST: 1000\n", + " FPN_POST_NMS_TOP_N_TEST: 1000\n", + " ROI_HEADS:\n", + " USE_FPN: True\n", + " ROI_BOX_HEAD:\n", + " POOLER_RESOLUTION: 7\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " POOLER_SAMPLING_RATIO: 2\n", + " FEATURE_EXTRACTOR: \"FPN2MLPFeatureExtractor\"\n", + " PREDICTOR: \"FPNPredictor\"\n", + " ROI_MASK_HEAD:\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " FEATURE_EXTRACTOR: \"MaskRCNNFPNFeatureExtractor\"\n", + " PREDICTOR: \"MaskRCNNC4Predictor\"\n", + " POOLER_RESOLUTION: 14\n", + " POOLER_SAMPLING_RATIO: 2\n", + " RESOLUTION: 28\n", + " SHARE_BOX_FEATURE_EXTRACTOR: False\n", + " MASK_ON: True\n", + "DATALOADER:\n", + " SIZE_DIVISIBILITY: 32" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Writing base_config.yaml\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "mOo-0LGFEAmc", + "colab_type": "text" + }, + "source": [ + "### Pretrained weight removal\n", + "\n", + "Here, the pretrained weights of bbox, mask and class predictions are removed. This is done so that we can make the model shapes dataset compatible i.e predict 3 classes instead of Coco's 81 classes." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ISFsxBxBDZcQ", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def removekey(d, listofkeys):\n", + " r = dict(d)\n", + " for key in listofkeys:\n", + " print('key: {} is removed'.format(key))\n", + " r.pop(key)\n", + " return r\n", + " \n", + "logger_dir = 'log'\n", + "\n", + "if logger_dir:\n", + " mkdir(logger_dir)\n", + "\n", + "logger = setup_logger(\"maskrcnn_benchmark\", logger_dir, get_rank())\n", + "logger.info(\"Using {} GPUs\".format(1))\n", + "\n", + "config_file = \"base_config.yaml\"\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "\n", + "# Add these for printing class names over your predictions.\n", + "COCODemo.CATEGORIES = [\n", + " \"__background\",\n", + " \"square\",\n", + " \"circle\",\n", + " \"triangle\"\n", + "]\n", + "\n", + "demo = COCODemo(\n", + " cfg, \n", + " min_image_size=800,\n", + " confidence_threshold=0.7,\n", + " convert_model=True)\n", + "\n", + "base_model = demo.model\n", + "\n", + "# Removes pretrained weights from state dict\n", + "new_state_dict = removekey(base_model.state_dict(), [ \n", + " \"roi_heads.box.predictor.cls_score.weight\", \"roi_heads.box.predictor.cls_score.bias\", \n", + " \"roi_heads.box.predictor.bbox_pred.weight\", \"roi_heads.box.predictor.bbox_pred.bias\",\n", + " \"roi_heads.mask.predictor.mask_fcn_logits.weight\", \"roi_heads.mask.predictor.mask_fcn_logits.bias\"\n", + " ])\n", + "\n", + "# Save new state dict, we will use this as our starting weights for our fine-tuned model\n", + "torch.save(new_state_dict, \"base_model.pth\")" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bbCBInqHFUg7", + "colab_type": "text" + }, + "source": [ + "### Fine Tuned Model Config\n", + "\n", + "Here we define our shape Dataset config. The important fields are \n", + "\n", + "1. WEIGHT: which point to our base_model.pth saved in the previous step\n", + "2. NUM_CLASSES: Which define how many classes we will predict . note that the number includes the background, hence our shapes dataset has 4 classes. \n", + "3. PANOPTIC.CHANNEL_SIZE: To set the channel size of the segmentation head of the FPN.\n", + "4. PANOPTIC.NUM_CLASSES: Number of classes of semantic segmentation head." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5AhIiTgmFXyi", + "colab_type": "code", + "outputId": "2cfd6b5c-348a-4ca0-ad06-d73937ca8c09", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "%%writefile shapes_config.yaml\n", + "MODEL:\n", + " META_ARCHITECTURE: \"GeneralizedRCNN\"\n", + " WEIGHT: \"base_model.pth\"\n", + " BACKBONE:\n", + " CONV_BODY: \"R-50-FPN\"\n", + " RESNETS:\n", + " BACKBONE_OUT_CHANNELS: 256\n", + " RPN:\n", + " USE_FPN: True\n", + " ANCHOR_STRIDE: (4, 8, 16, 32, 64)\n", + " PRE_NMS_TOP_N_TRAIN: 2000\n", + " PRE_NMS_TOP_N_TEST: 1000\n", + " POST_NMS_TOP_N_TEST: 1000\n", + " FPN_POST_NMS_TOP_N_TEST: 1000\n", + " ROI_HEADS:\n", + " USE_FPN: True\n", + " ROI_BOX_HEAD:\n", + " POOLER_RESOLUTION: 7\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " POOLER_SAMPLING_RATIO: 2\n", + " FEATURE_EXTRACTOR: \"FPN2MLPFeatureExtractor\"\n", + " PREDICTOR: \"FPNPredictor\"\n", + " NUM_CLASSES: 4 # background + num_classes : IMPORTANT dont forget to add this\n", + " ROI_MASK_HEAD:\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " FEATURE_EXTRACTOR: \"MaskRCNNFPNFeatureExtractor\"\n", + " PREDICTOR: \"MaskRCNNC4Predictor\"\n", + " POOLER_RESOLUTION: 14\n", + " POOLER_SAMPLING_RATIO: 2\n", + " RESOLUTION: 28\n", + " SHARE_BOX_FEATURE_EXTRACTOR: False\n", + " MASK_ON: True\n", + " PANOPTIC:\n", + " CHANNEL_SIZE: 128\n", + " NUM_CLASSES: 1 # just 1 class to seperate foreground from background\n", + "DATALOADER:\n", + " SIZE_DIVISIBILITY: 32" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Overwriting shapes_config.yaml\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tAn3omCjTFGI", + "colab_type": "text" + }, + "source": [ + "### Data Loader\n", + "\n", + "This function creates a data loader with our shapes dataset. This data loader is used internally in the repo to train the model." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "oODu2UpVTHXz", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def build_data_loader(cfg, dataset, is_train=True, is_distributed=False, start_iter=0):\n", + " num_gpus = get_world_size()\n", + " if is_train:\n", + " images_per_batch = cfg.SOLVER.IMS_PER_BATCH\n", + " assert (\n", + " images_per_batch % num_gpus == 0\n", + " ), \"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used.\".format(\n", + " images_per_batch, num_gpus)\n", + " images_per_gpu = images_per_batch // num_gpus\n", + " shuffle = True\n", + " num_iters = cfg.SOLVER.MAX_ITER\n", + " else:\n", + " images_per_batch = cfg.TEST.IMS_PER_BATCH\n", + " assert (\n", + " images_per_batch % num_gpus == 0\n", + " ), \"TEST.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used.\".format(\n", + " images_per_batch, num_gpus)\n", + " images_per_gpu = images_per_batch // num_gpus\n", + " shuffle = False if not is_distributed else True\n", + " num_iters = None\n", + " start_iter = 0\n", + "\n", + " if images_per_gpu > 1:\n", + " logger = logging.getLogger(__name__)\n", + " logger.warning(\n", + " \"When using more than one image per GPU you may encounter \"\n", + " \"an out-of-memory (OOM) error if your GPU does not have \"\n", + " \"sufficient memory. If this happens, you can reduce \"\n", + " \"SOLVER.IMS_PER_BATCH (for training) or \"\n", + " \"TEST.IMS_PER_BATCH (for inference). For training, you must \"\n", + " \"also adjust the learning rate and schedule length according \"\n", + " \"to the linear scaling rule. See for example: \"\n", + " \"https://github.com/facebookresearch/Detectron/blob/master/configs/getting_started/tutorial_1gpu_e2e_faster_rcnn_R-50-FPN.yaml#L14\"\n", + " )\n", + "\n", + " # group images which have similar aspect ratio. In this case, we only\n", + " # group in two cases: those with width / height > 1, and the other way around,\n", + " # but the code supports more general grouping strategy\n", + " aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []\n", + "\n", + " paths_catalog = import_file(\n", + " \"maskrcnn_benchmark.config.paths_catalog\", cfg.PATHS_CATALOG, True\n", + " )\n", + " DatasetCatalog = paths_catalog.DatasetCatalog\n", + " dataset_list = cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST\n", + "\n", + " # If bbox aug is enabled in testing, simply set transforms to None and we will apply transforms later\n", + " transforms = None if not is_train and cfg.TEST.BBOX_AUG.ENABLED else build_transforms(cfg, is_train)\n", + " \n", + " dataset.transforms = transforms\n", + " datasets = [ dataset ]\n", + " \n", + " data_loaders = []\n", + " for dataset in datasets:\n", + " sampler = make_data_sampler(dataset, shuffle, is_distributed)\n", + " batch_sampler = make_batch_data_sampler(\n", + " dataset, sampler, aspect_grouping, images_per_gpu, num_iters, start_iter\n", + " )\n", + " collator = BBoxAugCollator() if not is_train and cfg.TEST.BBOX_AUG.ENABLED else \\\n", + " BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)\n", + " num_workers = cfg.DATALOADER.NUM_WORKERS\n", + " data_loader = torch.utils.data.DataLoader(\n", + " dataset,\n", + " num_workers=num_workers,\n", + " batch_sampler=batch_sampler,\n", + " collate_fn=collator,\n", + " )\n", + " data_loaders.append(data_loader)\n", + " if is_train:\n", + " # during training, a single (possibly concatenated) data_loader is returned\n", + " assert len(data_loaders) == 1\n", + " return data_loaders[0]\n", + " return data_loaders" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xs_KL1R1aGSA", + "colab_type": "text" + }, + "source": [ + "### Semantic Segmentation Loss\n", + "\n", + "Loss for the Semantic Segmentation Head of the Model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9ipih-wPaJqK", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class SegLoss(nn.Module):\n", + " \n", + " def __init__(self):\n", + " super(SegLoss, self).__init__()\n", + "\n", + " def prepare_target(self, targets):\n", + " labels = []\n", + "\n", + " for t in targets:\n", + " t = t.get_field(\"seg_masks\").get_mask_tensor().unsqueeze(0)\n", + " labels.append(t)\n", + "\n", + " return cat(labels, dim=0).unsqueeze(1).to(\"cuda\", dtype=torch.float32)\n", + "\n", + " def forward(self, mask, target):\n", + " '''\n", + " mask : Tensor\n", + " target : list[Boxlist]\n", + " '''\n", + " \n", + " target = self.prepare_target(target)\n", + "\n", + " loss = Fx.binary_cross_entropy_with_logits(mask, target)\n", + " \n", + " return loss" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zBrlwqT7RsdJ", + "colab_type": "text" + }, + "source": [ + "### Segmenter Model\n", + "\n", + "The model modifies the FPN of the Mask RCNN as per [this](https://arxiv.org/abs/1901.02446) paper" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dJMk5lxwRvTh", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def panoptic_upsampler_block(in_channels, out_channels, expansion):\n", + " \n", + " modules = []\n", + " \n", + " if expansion == 0:\n", + " modules.append( make_conv3x3(\n", + " in_channels,\n", + " out_channels,\n", + " dilation=1,\n", + " stride=1,\n", + " use_gn=True,\n", + " use_relu=True,\n", + " kaiming_init=True\n", + " )) # no upsample\n", + " \n", + " for i in range(expansion):\n", + " modules.append(make_conv3x3(\n", + " in_channels if i == 0 else out_channels,\n", + " out_channels,\n", + " dilation=1,\n", + " stride=1,\n", + " use_gn=True,\n", + " use_relu=True,\n", + " kaiming_init=True\n", + " ))\n", + " modules.append(nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False))\n", + " \n", + " return nn.Sequential(*modules)\n", + "\n", + "\n", + "class PanopticRCNN(nn.Module):\n", + "\n", + " def __init__(self, cfg, num_classes):\n", + " super(PanopticRCNN, self).__init__()\n", + "\n", + " \n", + " self.scale1_block = panoptic_upsampler_block(in_channels=cfg.MODEL.RESNETS.BACKBONE_OUT_CHANNELS,\n", + " out_channels=cfg.MODEL.PANOPTIC.CHANNEL_SIZE, expansion=3) # 1/32\n", + " self.scale2_block = panoptic_upsampler_block(in_channels=cfg.MODEL.RESNETS.BACKBONE_OUT_CHANNELS,\n", + " out_channels=cfg.MODEL.PANOPTIC.CHANNEL_SIZE, expansion=2) # 1/16\n", + " self.scale3_block = panoptic_upsampler_block(in_channels=cfg.MODEL.RESNETS.BACKBONE_OUT_CHANNELS, \n", + " out_channels=cfg.MODEL.PANOPTIC.CHANNEL_SIZE, expansion=1) # 1/8\n", + " self.scale4_block = panoptic_upsampler_block(in_channels=cfg.MODEL.RESNETS.BACKBONE_OUT_CHANNELS,\n", + " out_channels=cfg.MODEL.PANOPTIC.CHANNEL_SIZE, expansion=0) # 1/4\n", + " \n", + " self.num_classes = num_classes\n", + " \n", + " self.final_seg_mask = nn.Sequential(\n", + " nn.Conv2d(kernel_size=1, in_channels=128, out_channels=self.num_classes),\n", + " nn.Upsample(scale_factor=4, mode='bilinear', align_corners=False)\n", + " )\n", + " \n", + " \n", + " def forward(self, features):\n", + " \"\"\"\n", + " Arguments:\n", + " features (list[Tensor]): feature maps gen post FPN, (N, C, H, W)\n", + " Returns:\n", + " segmentation_mask: semantic segmentation mask\n", + " \"\"\"\n", + " \n", + " \n", + " x1 = self.scale1_block(features[3])\n", + " \n", + " x2 = self.scale2_block(features[2])\n", + " \n", + " x3 = self.scale3_block(features[1])\n", + " \n", + " x4 = self.scale4_block(features[0])\n", + " \n", + " x = x1 + x2 + x3 + x4\n", + " \n", + " seg_mask = self.final_seg_mask(x)\n", + " \n", + " return seg_mask\n", + "\n", + "\n", + "class PanopticModel(nn.Module):\n", + " \"\"\"\n", + " Main class for Panoptic R-CNN. Currently supports boxes and masks.\n", + " It consists of three main parts:\n", + " - backbone\n", + " - rpn\n", + " - panoptic: ouputs semantic segmentation mask\n", + " - heads: takes the features + the proposals from the RPN and computes\n", + " detections / masks from it.\n", + " \"\"\"\n", + " def __init__(self, cfg):\n", + " super(PanopticModel, self).__init__()\n", + "\n", + " self.backbone = build_backbone(cfg)\n", + " self.loss = SegLoss()\n", + " self.training = True\n", + " self.rpn = build_rpn(cfg, self.backbone.out_channels)\n", + " self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels)\n", + " self.panoptic = PanopticRCNN(cfg, num_classes=cfg.MODEL.PANOPTIC.NUM_CLASSES)\n", + " \n", + "\n", + " def forward(self, images, targets=None):\n", + " \"\"\"\n", + " Arguments:\n", + " images (list[Tensor] or ImageList): images to be processed\n", + " targets (list[BoxList]): ground-truth boxes present in the image (optional)\n", + " Returns:\n", + " result (list[BoxList] or dict[Tensor]): the output from the model.\n", + " During training, it returns a dict[Tensor] which contains the losses.\n", + " During testing, it returns list[BoxList] contains additional fields\n", + " like `scores`, `labels` and `mask` (for Mask R-CNN models).\n", + " \"\"\"\n", + "\n", + " images = to_image_list(images)\n", + " features = self.backbone(images.tensors) \n", + " seg_mask = self.panoptic(features)\n", + " proposals, proposal_losses = self.rpn(images, features, targets)\n", + " \n", + " \n", + " if self.roi_heads:\n", + " x, result, detector_losses = self.roi_heads(features, proposals, targets)\n", + " else:\n", + " # RPN-only models don't have roi_heads\n", + " x = features\n", + " result = proposals\n", + " detector_losses = {}\n", + "\n", + " if self.training:\n", + " segmentation_loss = self.loss(seg_mask, targets)\n", + " \n", + " losses = {}\n", + " losses.update(detector_losses)\n", + " losses.update(proposal_losses)\n", + " losses.update(dict(segmentation_loss=segmentation_loss))\n", + " \n", + " return losses\n", + " \n", + " return seg_mask, result\n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NVjPYFN1Pz6D", + "colab_type": "text" + }, + "source": [ + "### Build Panoptic Network" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WE6K5qZ7Pt5T", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def build_panoptic_network(cfg):\n", + " return PanopticModel(cfg)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "kkLKDmRC0-CE", + "colab_type": "text" + }, + "source": [ + "### Train Panoptic\n", + "\n", + "The train function is the entry point into the training process. It creates data loaders, optimisers, loads from checkpoint. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "4e2-533F1Qmu", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# See if we can use apex.DistributedDataParallel instead of the torch default,\n", + "# and enable mixed-precision via apex.amp\n", + "try:\n", + " from apex import amp\n", + "except ImportError:\n", + " raise ImportError('Use APEX for multi-precision via apex.amp')\n", + " \n", + "def reduce_loss_dict(loss_dict):\n", + " \"\"\"\n", + " Reduce the loss dictionary from all processes so that process with rank\n", + " 0 has the averaged results. Returns a dict with the same fields as\n", + " loss_dict, after reduction.\n", + " \"\"\"\n", + " world_size = get_world_size()\n", + " if world_size < 2:\n", + " return loss_dict\n", + " with torch.no_grad():\n", + " loss_names = []\n", + " all_losses = []\n", + " for k in sorted(loss_dict.keys()):\n", + " loss_names.append(k)\n", + " all_losses.append(loss_dict[k])\n", + " all_losses = torch.stack(all_losses, dim=0)\n", + " dist.reduce(all_losses, dst=0)\n", + " if dist.get_rank() == 0:\n", + " # only main process gets accumulated, so only divide by\n", + " # world_size in this case\n", + " all_losses /= world_size\n", + " reduced_losses = {k: v for k, v in zip(loss_names, all_losses)}\n", + " return reduced_losses\n", + "\n", + "\n", + "def do_train_panoptic(\n", + " model,\n", + " data_loader,\n", + " optimizer,\n", + " scheduler,\n", + " checkpointer,\n", + " device,\n", + " checkpoint_period,\n", + " arguments,\n", + "):\n", + " logger = logging.getLogger(\"maskrcnn_benchmark.trainer\")\n", + " logger.error(\"Start training\")\n", + " meters = MetricLogger(delimiter=\" \")\n", + " max_iter = len(data_loader)\n", + " start_iter = arguments[\"iteration\"]\n", + " model.train()\n", + " start_training_time = time.time()\n", + " end = time.time()\n", + " \n", + " for iteration, (images, targets, _) in enumerate(data_loader, start_iter):\n", + " \n", + " if any(len(target) < 1 for target in targets):\n", + " logger.error(f\"Iteration={iteration + 1} || Image Ids used for training {_} || targets Length={[len(target) for target in targets]}\" )\n", + " continue\n", + " data_time = time.time() - end\n", + " iteration = iteration + 1\n", + " arguments[\"iteration\"] = iteration\n", + "\n", + " scheduler.step()\n", + "\n", + " images = images.to(device)\n", + " targets = [target.to(device) for target in targets]\n", + " \n", + " loss_dict = model(images, targets)\n", + " \n", + " losses = sum(loss for loss in loss_dict.values())\n", + " \n", + " # reduce losses over all GPUs for logging purposes\n", + " loss_dict_reduced = reduce_loss_dict(loss_dict)\n", + " losses_reduced = sum(loss for loss in loss_dict_reduced.values())\n", + " meters.update(loss=losses_reduced, **loss_dict_reduced)\n", + "\n", + " optimizer.zero_grad()\n", + " # Note: If mixed precision is not used, this ends up doing nothing\n", + " # Otherwise apply loss scaling for mixed-precision recipe\n", + " with amp.scale_loss(losses, optimizer) as scaled_losses:\n", + " scaled_losses.backward()\n", + " optimizer.step()\n", + "\n", + " batch_time = time.time() - end\n", + " end = time.time()\n", + " meters.update(time=batch_time, data=data_time)\n", + "\n", + " eta_seconds = meters.time.global_avg * (max_iter - iteration)\n", + " eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))\n", + "\n", + " if iteration % 20 == 0 or iteration == max_iter:\n", + " logger.info(\n", + " meters.delimiter.join(\n", + " [\n", + " \"eta: {eta}\",\n", + " \"iter: {iter}\",\n", + " \"{meters}\",\n", + " \"lr: {lr:.6f}\",\n", + " \"max mem: {memory:.0f}\",\n", + " ]\n", + " ).format(\n", + " eta=eta_string,\n", + " iter=iteration,\n", + " meters=str(meters),\n", + " lr=optimizer.param_groups[0][\"lr\"],\n", + " memory=torch.cuda.max_memory_allocated() / 1024.0 / 1024.0,\n", + " )\n", + " )\n", + " if iteration % checkpoint_period == 0:\n", + " checkpointer.save(\"model_{:07d}\".format(iteration), **arguments)\n", + " if iteration == max_iter:\n", + " checkpointer.save(\"model_final\", **arguments)\n", + "\n", + " total_training_time = time.time() - start_training_time\n", + " total_time_str = str(datetime.timedelta(seconds=total_training_time))\n", + " logger.info(\n", + " \"Total training time: {} ({:.4f} s / it)\".format(\n", + " total_time_str, total_training_time / (max_iter)\n", + " ))\n", + "\n", + "def train_panoptic(cfg, local_rank, distributed, dataset):\n", + " model = build_panoptic_network(cfg)\n", + "\n", + " device = torch.device('cuda')\n", + " model.to(device)\n", + " \n", + " optimizer = make_optimizer(cfg, model)\n", + " scheduler = make_lr_scheduler(cfg, optimizer) \n", + "\n", + " # Initialize mixed-precision training\n", + " use_mixed_precision = cfg.DTYPE == \"float16\"\n", + " amp_opt_level = 'O1' if use_mixed_precision else 'O0'\n", + " model, optimizer = amp.initialize(model, optimizer, opt_level=amp_opt_level)\n", + "\n", + " if distributed:\n", + " model = torch.nn.parallel.DistributedDataParallel(\n", + " model, device_ids=[local_rank], output_device=local_rank,\n", + " # this should be removed if we update BatchNorm stats\n", + " broadcast_buffers=False,\n", + " )\n", + "\n", + " arguments = {}\n", + " arguments[\"iteration\"] = 0\n", + "\n", + " output_dir = cfg.OUTPUT_DIR\n", + " save_to_disk = get_rank() == 0\n", + " checkpointer = DetectronCheckpointer(\n", + " cfg, model, optimizer, scheduler, output_dir, save_to_disk\n", + " )\n", + " extra_checkpoint_data = checkpointer.load(cfg.MODEL.WEIGHT)\n", + " arguments.update(extra_checkpoint_data)\n", + "\n", + "\n", + " data_loader = build_data_loader(cfg, dataset)\n", + "\n", + " checkpoint_period = cfg.SOLVER.CHECKPOINT_PERIOD\n", + "\n", + " do_train_panoptic(\n", + " model,\n", + " data_loader,\n", + " optimizer,\n", + " scheduler,\n", + " checkpointer,\n", + " device,\n", + " checkpoint_period,\n", + " arguments,\n", + " )\n", + "\n", + " return model" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pVeJNhzy2DZs", + "colab_type": "text" + }, + "source": [ + "## Train Panoptic Driver\n", + "\n", + "here we fire off training by calling the above function. before that we set some important config for our training. We make our dataset and update our config. Then we fire off training !" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "XtgfPl7F2CEP", + "colab_type": "code", + "colab": {} + }, + "source": [ + "config_file = \"shapes_config.yaml\"\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "cfg.merge_from_list(['OUTPUT_DIR', 'segDir']) # The output folder where all our model checkpoints will be saved during training.\n", + "cfg.merge_from_list(['SOLVER.IMS_PER_BATCH', 25]) # Number of images to take insiade a single batch. This number depends on the size of your GPU\n", + "cfg.merge_from_list(['SOLVER.BASE_LR', 0.0001]) # The Learning Rate when training starts. Please check Detectron scaling rules to determine your learning for your GPU setup. \n", + "cfg.merge_from_list(['SOLVER.MAX_ITER', 1000]) # The number of training iterations that will be executed during training. One iteration is given as one forward and backward pass of a mini batch of the network\n", + "cfg.merge_from_list(['SOLVER.STEPS', \"(700, 800)\"]) # These two numberes represent after how many iterations is the learning rate divided by 10. \n", + "cfg.merge_from_list(['TEST.IMS_PER_BATCH', 1]) # Batch size during testing/evaluation\n", + "cfg.merge_from_list(['MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN', 2000]) # This determines how many region proposals to take in for processing into the stage after the RPN. The rule is 1000*batch_size = 4*1000 \n", + "cfg.merge_from_list(['SOLVER.CHECKPOINT_PERIOD', 100]) # After how many iterations does one want to save the model.\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TRAIN', \"(192, )\"])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TRAIN', 192])\n", + "# Make the Output dir if one doesnt exist.\n", + "output_dir = cfg.OUTPUT_DIR\n", + "if output_dir:\n", + " mkdir(output_dir)\n", + "\n", + "# Start training.\n", + "model = train_panoptic(cfg, local_rank=1, distributed=False, dataset=ShapeDataset(2000))" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ccHt8YMdKq6K", + "colab_type": "text" + }, + "source": [ + "# Visualise\n", + "\n", + "An important part of validating your model is visualising the results. This is done below" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kb9VchvVzRpu", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Load Trained Model\n", + "config_file = \"shapes_config.yaml\"\n", + "\n", + "cfg.merge_from_file(config_file)\n", + "# manual override some options\n", + "# cfg.merge_from_list([\"MODEL.DEVICE\", \"cpu\"])\n", + "\n", + "# manual override some options\n", + "cfg.merge_from_list(['OUTPUT_DIR', 'segDir']) # The output folder where all our model checkpoints will be saved during training.\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TRAIN', \"(192, )\"])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TRAIN', 192])\n", + "\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TEST', 192])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TEST', 192])\n", + "# cfg.merge_from_list([\"MODEL.DEVICE\", \"cpu\"])\n", + "\n", + "\n", + "vis_demo = COCODemo(\n", + " cfg, \n", + " min_image_size=192,\n", + " confidence_threshold=0.7)\n", + "\n", + "# Add these for printing class names over your predictions.\n", + "COCODemo.CATEGORIES = [\n", + " \"__background\",\n", + " \"square\",\n", + " \"circle\",\n", + " \"triangle\"\n", + "]\n", + "\n", + "# Load Dataset\n", + "dataset = ShapeDataset(50)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "c8b6wHAXjyE5", + "colab_type": "text" + }, + "source": [ + "## Visualise" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "RSPq97dtWFrA", + "colab_type": "text" + }, + "source": [ + "### Input Image" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Ir-cYCvKSbNI", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 486 + }, + "outputId": "03b774d2-f7f1-4e86-e56b-37a734cb9e72" + }, + "source": [ + "# Visualise Input Image\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i)\n", + "# image = np.array(img)[:, :, [2, 1, 0]]\n", + "# result = vis_demo.run_on_opencv_image(image)\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(img)\n", + "plt.show()" + ], + "execution_count": 50, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3X2QZHV56PHvk13wBcFhWWrd7KJA\nQkw2VhSyIhZetcQkiIYlFa8XtJJVudlrXUw0JqWoRWmVpVfzotGqRFwDYc1FEVGLvdEYkQBWrLBh\nQeRtBVYU2a2FZV1XKLXExef+0We0GWZ2p/uc7vPr099P1dR0n+6efvo388wzz3NOn4nMRJIkleOX\n2g5AkiQ9lsVZkqTCWJwlSSqMxVmSpMJYnCVJKozFWZKkwoysOEfE6RFxZ0Rsj4jzR/U8kkbLXJbG\nL0bxPueIWALcBfwOsAO4ATgnM+9o/MkkjYy5LLVjVJ3zycD2zLwnMx8BLgPWjei5JI2OuSy1YOmI\nvu4q4L6+6zuA5y105yVHzOQhR68cUSjSZPrJPd/ck5lHtxzGQLkMMLN8aa58xiEjDUrjc0jsbzuE\nibfjO4+yd8/PYpDHjKo4H1REbAA2ACxd/jSe/n82tRWKVKS7/8fz7m07hsXqz+enPf0QPrHl+JYj\nUlNWxJ62Q5h4r3je3oEfM6qx9k7gmL7rq6ttP5eZGzNzbWauXXLEzIjCkFTTQXMZHpvPM8uXjC04\nqatGVZxvAE6IiOMi4lDgbGDziJ5L0uiYy1ILRjLWzsz9EfFG4N+AJcDFmXn7KJ5L0uiYy1I7RrbP\nOTO/CHxxVF9f0niYy9L4eYYwSZIKY3GWJKkwFmdJkgpjcZYkqTAWZ0mSCmNxlqQh7XnDc9oOQR3V\n2uk7JalEgxbcQe6//MKbBw1HU8rOWZKkwtg5S5pa4x5Lz/d8dtOaj8VZ0tQocR/x3Jgs1gLH2pIk\nFcfOWVKnldgtH4idtMDOWZKk4licJXXSnjc8Z+K65vl04TVocI61JXVGVwtZ/+tyzD0d7JwlSSqM\nnbOkidfVjnk+s6/VDrrb7JwlTbRpKsz9pvV1TwuLsyRJhXGsLWki2Tk64u4yO2dJE8fC/FiuR/dY\nnCVJKszQxTkijomIayLijoi4PSLeVG1fFhFXRcTd1ecjmwtX0ihMSj535cQio+DadEudznk/8BeZ\nuQY4BTgvItYA5wNXZ+YJwNXVdUllKz6fLTyL4zp1w9DFOTN3ZeZN1eWHgW3AKmAdsKm62ybgrLpB\nShot81kqSyNHa0fEscCJwBZgRWbuqm66H1jRxHNIGo/S8tlOcHAexT35ah8QFhFPAT4LvDkzH+q/\nLTMTyAUetyEitkbE1kcf2lc3DEkNaCKf9+15dAyRSt1WqzhHxCH0EvnSzPxctfmBiFhZ3b4S2D3f\nYzNzY2auzcy1S46YqROGpAY0lc8zy5eMJ2Cpw+ocrR3ARcC2zPxg302bgfXV5fXAlcOHJ2kcSs1n\nR9r1eAT35Kqzz/lU4I+AWyNidsfGO4D3A5dHxLnAvcCr6oUoaQzMZ6kgQxfnzPwPIBa4+bRhv66k\n8Sstn+32mrXnDc/x4LAJ4xnCJEkqjMVZkqTC+F+pJBXDcfbo+N7nyWLnLElSYSzOkiQVxuIsSVJh\nLM6SiuD+5vFwnSeDxVmSpMJYnCVJKozFWZKkwlicJUkqjCchkdQqD1AaP09IUj47Z0mSCmNxliSp\nMBZnSZIKY3GWJKkwFmdJkgpjcZYkqTAWZ0mSCmNxliSpMBZnSZIKY3GWJKkwtYtzRCyJiK9HxL9U\n14+LiC0RsT0iPh0Rh9YPU9I4jDufPXVnu1z/cjXROb8J2NZ3/QPAhzLzV4HvA+c28BySxmOs+ey5\nndvl+perVnGOiNXAy4F/rK4H8BLgiuoum4Cz6jyHpPEwn6Vy1O2c/w54K/Cz6vpRwL7M3F9d3wGs\nmu+BEbEhIrZGxNZHH9pXMwxJDWgkn/fteXT0kUodN3RxjohXALsz88ZhHp+ZGzNzbWauXXLEzLBh\nSGpAk/k8s3xJw9FJ06fO/3M+FTgzIs4AnggcAXwYmImIpdVf26uBnfXDlDRi5rNUkKE758x8e2au\nzsxjgbOBf8/M1wDXAK+s7rYeuLJ2lJJGynyWyjKK9zm/DXhLRGynt8/qohE8h6TxMJ+lFtQZa/9c\nZl4LXFtdvgc4uYmvK2n8zGepfZ4hTJKkwlicJUkqjMVZkqTCNLLPWZKGNXsKSc/zPD6etrN8ds6S\nJBXG4ixJUmEszpIkFcbiLElSYSzOA7hg6XVthyB1lgcpjYfrPBmm/mjtQQvuIPd/z/4XDRqOJEl2\nzpIklWaqOudxj6Xnez67aUnSwXS6OJe4j3huTBZr6Rc8IcnoDLuv+YFc3nAk0+en+dDAj3GsLUlS\nYTrXOZfYLR+InbQkaS47Z0mSCtOZ4nzB0usmrmueTxdeg1TX8gtv9v24DXItJ89Ej7W7Wsj6X5dj\nbk2z5Rfe7MFhNViUJ1dnOmdJkrpiIjvnrnbM85l9rXbQkjQ9Jq5znqbC3G9aX7fk/ufBuWaTb+KK\nsyRJXVerOEfETERcERHfjIhtEfH8iFgWEVdFxN3V5yObCLQrR2PX4RpolMaZz8OwE1wc16kb6nbO\nHwa+lJm/Djwb2AacD1ydmScAV1fXa7EgPZbroREZSz7X4bh2Ya5NtwxdnCPiqcALgYsAMvORzNwH\nrAM2VXfbBJxVN0hJo2U+S2Wp0zkfBzwI/FNEfD0i/jEiDgNWZOau6j73Ayvme3BEbIiIrRGx9dGH\n9s37BI5xF+baqGGN5fO+PY+OPFg7xMdyPbqnTnFeCpwEfDQzTwR+yJyRV2YmkPM9ODM3ZubazFy7\n5IiZx91u4Vkc10kNaSyfZ5YvGXmw4BgXXIMuq1OcdwA7MnNLdf0Kesn9QESsBKg+764XoqQxMJ+l\nggxdnDPzfuC+iHhmtek04A5gM7C+2rYeuHKQr+u4dnCumeoaVT6Pw7R2jtP6uqdF3TOE/SlwaUQc\nCtwDvI5ewb88Is4F7gVeVfM5JI3HxObzbKGahvNwW5SnQ63inJk3A2vnuem0Ol9X0viZz1I5ijq3\ntqPZejwPt6Zdf1fZpS7abnn6ePpOSZIKU0RxXhkP2zU3yLWUuvM2oy68Bg2uqLG2JDVt0kbdFmNB\nIZ2zJEn6BTvnjvLgMOnx5nalJXTSdsqaj8VZ0tSarzCOsmBbiLVYjrUlSSqMnbMk9Rmku93zhufY\nDWsk7Jw7zrdVSaNjYdaoWJwlSSqMxVmSpMJYnCVJKozFWZKkwlicp8AFS6/zwDBJmiAWZ0mSCmNx\nliSpMBZnSZIKY3GWJKkwFmdJkgpjcZYkqTAWZ0mSClPrv1JFxJ8D/xNI4FbgdcBK4DLgKOBG4I8y\n85GacWqCPHn1T9sOQUMwn6VyDF2cI2IV8GfAmsz8cURcDpwNnAF8KDMvi4gLgXOBjzYSraSRMJ/L\n8Kz//s62QyjebZ95b9shjEXdsfZS4EkRsRR4MrALeAlwRXX7JuCsms8haTzMZ6kQQxfnzNwJ/A3w\nXXpJ/AN6Y699mbm/utsOYNV8j4+IDRGxNSK2PvyDHw4bhgbgKTy1kCbzed+eR8cRstRpQxfniDgS\nWAccB/wycBhw+mIfn5kbM3NtZq49/KmHDRuGBvCe/S9qOwQVqsl8nlm+ZERRStOjzlj7pcC3M/PB\nzPwp8DngVGCmGosBrAZ21oxR0uiZz1JB6hTn7wKnRMSTIyKA04A7gGuAV1b3WQ9cWS9ESWNgPksF\nqbPPeQu9A0Vuove2i18CNgJvA94SEdvpvf3iogbilDRC5rNUllrvc87MdwHvmrP5HuDkOl9X0viZ\nz1I5PEOYJEmFsThLklQYi7MkSYWxOEuSVBiLsyRJhbE4S5JUmFpvpdJk8LSdkjRZ7JwlSSqMxVmS\npMJYnCVJKozFWZKkwlicO86DwSRp8licJUkqjMVZkqTCWJwlSSqMJyHpKPc1S9LksnOWJKkwFmdJ\nkgpTRHHelYc7hm2QaylJk62I4ixJkn6hqOJsx1fPe/a/yDWUpA4oqjhLkqRFFOeIuDgidkfEbX3b\nlkXEVRFxd/X5yGp7RMRHImJ7RNwSESeNMnhJgzGfpcmwmM75EuD0OdvOB67OzBOAq6vrAC8DTqg+\nNgAfHTQgR7ODc800gEsYYz5LGs5Bi3NmfhXYO2fzOmBTdXkTcFbf9k9kz/XATESsbCpYSfWYz9Jk\nGHaf84rM3FVdvh9YUV1eBdzXd78d1bbHiYgNEbE1IrY++tC+x91uJ7g4rpMa0Gg+79vz6OgilaZE\n7dN3ZmZGRA7xuI3ARoAn/spvzPv42cJzwdLr6oTYSRZljUIT+fwbv/2kgR8v6bGG7ZwfmB1vVZ93\nV9t3Asf03W91tU1SucxnqTDDFufNwPrq8nrgyr7tf1wd5XkK8IO+cdnQ7BIfy/VQw8aaz5IO7qBj\n7Yj4FPBiYHlE7ADeBbwfuDwizgXuBV5V3f2LwBnAduBHwOuaCtQRt0VZ9ZWSz5IO7KDFOTPPWeCm\n0+a5bwLn1Q1K0miYz9JkmLgzhE1r9zitr1uSplHto7XbME0jbouyJE2fieucJUnquonsnGf1d5Vd\n6qLtliVputk5S5JUmM4U567884cuvAZJUj0TPdaez6SNui3GkqS5OtM5S5LUFZ3rnPvN7UpL6KTt\nlCVJB9Pp4jzXfIVxlAXbQixJGoZjbUmSCjNVnfN8BuluL1h6nd2wJGnk7JwHYGGWJI2DxVmSpMJY\nnCVJKozFWZKkwlicJUkqjMVZkqTCWJwlSSqMxVmSpMJYnCVJKozFWZKkwhy0OEfExRGxOyJu69v2\n1xHxzYi4JSI+HxEzfbe9PSK2R8SdEfF7owpc0uDMZ2kyLKZzvgQ4fc62q4BnZeZvAXcBbweIiDXA\n2cBvVo/5h4hY0li0kuq6BPNZKt5Bi3NmfhXYO2fblzNzf3X1emB1dXkdcFlm/iQzvw1sB05uMF5J\nNZjP0mRoYp/z64F/rS6vAu7ru21Hte1xImJDRGyNiK2PPrSvgTAkNaB2Pu/b8+iIQ5S6r1Zxjoh3\nAvuBSwd9bGZuzMy1mbl2yREzB3+ApJFqKp9nljv5luoa+v85R8RrgVcAp2VmVpt3Asf03W11tU1S\nwcxnqSxDdc4RcTrwVuDMzPxR302bgbMj4gkRcRxwAvBf9cOUNCrms1Seg3bOEfEp4MXA8ojYAbyL\n3tGcTwCuigiA6zPzDZl5e0RcDtxBbzx2Xma6A0oqhPksTYaDFufMPGeezRcd4P7vBd5bJyhJo2E+\nS5PBM4RJklQYi7MkSYWxOEuSVJih30olSWrWbZ9x97567JwlSSqMxVmSpMLEL04G1GIQEQ8CPwT2\ntB3LApZTZmylxgXlxlZqXPD42J6RmUe3FcywIuJh4M6241jAJH3/S1FqXDA5sQ2cy0UUZ4CI2JqZ\na9uOYz6lxlZqXFBubKXGBWXHNoiSX4exDa7UuKDbsTnWliSpMBZnSZIKU1Jx3th2AAdQamylxgXl\nxlZqXFB2bIMo+XUY2+BKjQs6HFsx+5wlSVJPSZ2zJEmigOIcEadHxJ0RsT0izm85lmMi4pqIuCMi\nbo+IN1Xb3x0ROyPi5urjjJbi+05E3FrFsLXatiwiroqIu6vPR445pmf2rcvNEfFQRLy5rTWLiIsj\nYndE3Na3bd41ip6PVD97t0TESS3E9tcR8c3q+T8fETPV9mMj4sd963fhKGNrSin5bC4PHZf5PHxc\nzeZyZrb2ASwBvgUcDxwKfANY02I8K4GTqsuHA3cBa4B3A3/Z5lpVMX0HWD5n218B51eXzwc+0PL3\n837gGW2tGfBC4CTgtoOtEXAG8K9AAKcAW1qI7XeBpdXlD/TFdmz//Sbho6R8Npcb+36az4uPq9Fc\nbrtzPhnYnpn3ZOYjwGXAuraCycxdmXlTdflhYBuwqq14FmkdsKm6vAk4q8VYTgO+lZn3thVAZn4V\n2Dtn80JrtA74RPZcD8xExMpxxpaZX87M/dXV64HVo3r+MSgmn83lRpjPA8TVdC63XZxXAff1Xd9B\nIQkUEccCJwJbqk1vrMYVF7cxbqok8OWIuDEiNlTbVmTmrury/cCKdkID4GzgU33XS1gzWHiNSvv5\nez29v/xnHRcRX4+I6yLiv7UV1ABKW0/AXK7BfB5e7VxuuzgXKSKeAnwWeHNmPgR8FPgV4DnALuBv\nWwrtBZl5EvAy4LyIeGH/jdmbobRy+H1EHAqcCXym2lTKmj1Gm2t0IBHxTmA/cGm1aRfw9Mw8EXgL\n8MmIOKKt+CaVuTwc83l4TeVy28V5J3BM3/XV1bbWRMQh9JL50sz8HEBmPpCZj2bmz4CP0xvfjV1m\n7qw+7wY+X8XxwOzopvq8u43Y6P2SuSkzH6hiLGLNKgutURE/fxHxWuAVwGuqXzZk5k8y83vV5Rvp\n7cv9tXHHNqAi1nOWuVyL+TyEJnO57eJ8A3BCRBxX/aV2NrC5rWAiIoCLgG2Z+cG+7f37Lf4AuG3u\nY8cQ22ERcfjsZXoHH9xGb73WV3dbD1w57tgq59A3AithzfostEabgT+ujvI8BfhB37hsLCLidOCt\nwJmZ+aO+7UdHxJLq8vHACcA944xtCMXks7lcm/k8oMZzeVRHsy32g94RdnfR+2vinS3H8gJ6I5Jb\ngJurjzOAfwZurbZvBla2ENvx9I5+/QZw++xaAUcBVwN3A18BlrUQ22HA94Cn9m1rZc3o/ULZBfyU\n3j6ncxdaI3pHdf599bN3K7C2hdi209tPNvvzdmF13z+svs83AzcBvz/u7+uQr7GIfDaXa8VnPg8X\nV6O57BnCJEkqTNtjbUmSNIfFWZKkwlicJUkqjMVZkqTCWJwlSSqMxVmSpMJYnCVJKozFWZKkwlic\nJUkqjMVZkqTCWJwlSSqMxVmSpMJYnCVJKozFWZKkwlicJUkqjMVZkqTCWJwlSSqMxVmSpMJYnCVJ\nKozFWZKkwlicJUkqzMiKc0ScHhF3RsT2iDh/VM8jabTMZWn8IjOb/6IRS4C7gN8BdgA3AOdk5h2N\nP5mkkTGXpXYsHdHXPRnYnpn3AETEZcA6YN6EXn74TB579C+PKBRpMt347W17MvPolsMYKJcBjjo0\ncvWTxxSdHmfvkw5vO4TGLPvxw22H0IgdP4LvPZIxyGNGVZxXAff1Xd8BPK//DhGxAdgA8PTlT+OG\n9/3fEYUiTaZfOue37207BhaRy/DYfF79JPjyi0b1q0UH88k1j/v2TKxX33Ft2yE04nev2z/wY1rL\noMzcCGwEWHv8muZn65LGpj+fnz0T5vOYfXLNi9sOYST6X1dXCvVijao47wSO6bu+utomabKYywXr\nalGez+xrnZYiPaqjtW8AToiI4yLiUOBsYPOInkvS6JjLUgtGUpwzcz/wRuDfgG3A5Zl5+yieS9Lo\nmMvlmqauud+0vO6R7XPOzC8CXxzV15c0HuZyWaalOB3INIy4PUOYJEmFsThL0oSwa36sLq+Hb0aU\npMJ1uQjV1dURt52zJEmFsThLUsHsmhena+vkWFuSCtS1YjMOXRpx2zlLklQYi7MkSYWxOEtSQT65\n5sWOtGvqwvpZnCVJKozFWSPzha/tbTsEaaJ0oeMrxaRPICzOkiQVxrdSqXH9HfPs5ZefuqytcCRp\n4licJallkzx+Ld2kvvfZsbYkSYWxOKsxX/ja3gUPAvPgMElaPIuzJEmFsThLklQYi7MasZix9YHG\n3tK08mCw8Zi0dfZobdVisZWk5tk5S5JUGIuzxs5uW5IObOjiHBHHRMQ1EXFHRNweEW+qti+LiKsi\n4u7q85HNhauS1Cmy7n8ui/k8fpN+7udJNElrXqdz3g/8RWauAU4BzouINcD5wNWZeQJwdXVdUtnM\nZ6kgQxfnzNyVmTdVlx8GtgGrgHXApupum4Cz6gYpabTMZ6ksjexzjohjgROBLcCKzNxV3XQ/sGKB\nx2yIiK0RsfXBh7/fRBgakyZH0o62y1M3n/c+MpYwpU6rXZwj4inAZ4E3Z+ZD/bdlZgI53+Myc2Nm\nrs3MtUcf7m4sqQRN5POyQ8cQqNRxtYpzRBxCL5EvzczPVZsfiIiV1e0rgd31QlRJ7HS7y3yWylHn\naO0ALgK2ZeYH+27aDKyvLq8Hrhw+PEnjYD5LZalzhrBTgT8Cbo2Im6tt7wDeD1weEecC9wKvqhei\num62G3/5qctajmSqmc9SQYYuzpn5H0AscPNpw35dlclxdreZz1JZPEOYJEmFsTirGHbnktRjcdZB\njbNoelpPTYNJOYVkV03C+lucJUkqjMVZksbs1Xdc23YIU20S1r/OW6nUcW2Ol317laRpZucsSVJh\nLM6alwdlSVJ7LM4qmn8kSJpGFmdJkgrjAWF6DDtVSWqfnbOK54lJJE0bi7MkSYWxOAuYjO609Pgk\nqSkWZ0mSCuMBYbIjlVowewrJSfgnDF0xCaftnGXnrIkyCeN3SarL4ixJUmEca08xO1BJKpOdsyaS\nf1ioKyZpP+gkm7R1tjhLklSY2sU5IpZExNcj4l+q68dFxJaI2B4Rn46IQ+uHqabZeWo+5rNUhiY6\n5zcB2/qufwD4UGb+KvB94NwGnkMN6dLRzl16LQUxn6UC1CrOEbEaeDnwj9X1AF4CXFHdZRNwVp3n\nkDQe5rNUjrpHa/8d8Fbg8Or6UcC+zNxfXd8BrKr5HNIBfeFre3n5qcvaDqMLzOeWeEKS0Zm0A8Fm\nDd05R8QrgN2ZeeOQj98QEVsjYuuDD39/2DA0AEfAWkiT+bz3kYaDk6ZQnc75VODMiDgDeCJwBPBh\nYCYillZ/ba8Gds734MzcCGwEWHv8mqwRh6T6GsvnZ8+E+SzVNHTnnJlvz8zVmXkscDbw75n5GuAa\n4JXV3dYDV9aOUjoIDw6rx3wuw6SOYEv06juunej1HMX7nN8GvCUittPbZ3XRCJ5D0niYz1ILGjl9\nZ2ZeC1xbXb4HOLmJr6tm2FFqEOZzuzw4rL5J7phneYawjpu2wjxtr1dSN1mcJUkqjP+VSp0z2z37\n3mdNMsfbg+vCOHuWnXNHefSy1A1dKjij1LV1sjhLklQYx9rqLMfb6gpH3AvrWsc8y865gxxnS93U\n1UI0rC6vh8VZkqTCONbuEDvm+flfq9Qljri73THPsnOWpAk0DQVqPtPyui3OkiQVxrF2BzjOPjiP\n3FYXTdOIe1o65lkWZ00V9z+ri/oLV5cK9bQV5H6OtSVJKozFecI50pbUrwvd5qvvuLYTr6MOi7Mk\nSYVxn7OmjgeHqevmdp2TsB962jvluSzOE8pxtqRZF3z8fbznT96x4O0lFmuL8YE51pYkqTB2zhPI\nrrkZvq1Kk+6Cj7/vcZcP1EHPmq9rHaSbvuHf/2rR913wa9T+Cu340NNOHsvzWJwlSQONmSe1sE4S\nx9qSJBWmVnGOiJmIuCIivhkR2yLi+RGxLCKuioi7q89HNhXstPvC1/Y60m6Ya/oL5vNk6R9pL2a7\nJkvdzvnDwJcy89eBZwPbgPOBqzPzBODq6rpUNAs0YD5LxRi6OEfEU4EXAhcBZOYjmbkPWAdsqu62\nCTirbpCSRst8lspS54Cw44AHgX+KiGcDNwJvAlZk5q7qPvcDK+qFKLCz08iZzxNiMWPrQY7cVpnq\njLWXAicBH83ME4EfMmfklZkJ5HwPjogNEbE1IrY++PD3a4TRbe4THZ8pX+vG8nnvIyOPdWq5P3l6\n1CnOO4Admbmlun4FveR+ICJWAlSfd8/34MzcmJlrM3Pt0Yd7jInUssbyedmhY4lX6rShi3Nm3g/c\nFxHPrDadBtwBbAbWV9vWA1fWilDSyJnP3XTBx99ntz2h6p6E5E+BSyPiUOAe4HX0Cv7lEXEucC/w\nqprPMbWmeMTaqin+xxjmc6HqFtiDnXtb5alVnDPzZmDtPDedVufrSho/81kqh2cIkySpMBZnSZIK\nY3Eu0JS/pacYfg9UgqYO6PLgsMnif6Uq0DQdiPTD533lMdcP2/LSliKRpHLYOUuSVBg7Z7Vibsc8\nd7sdtKbdqEbQvq1qMlicJakg7hcWONaWJKk4FmeN3UIj7bn3Wcz9JA3OI7fLZ3HW2FhwpQMbd8G0\nQJfL4ixJUmEsziqanbakaeTR2hqLOkXWt1ep69ocL88+t2+vKoudsyRJhbE4S1KLPChL87E4a6Sa\nPELb/c/S6PhHQlkszpIkFcYDwiSpBXaqOhA7Z0mSCmNx1siMYh+xZxmTRsfTepbD4qzGWUClAyu9\nAJYe3zSwOEuSVJhaxTki/jwibo+I2yLiUxHxxIg4LiK2RMT2iPh0RBzaVLDSLDvz5pnPUjmGLs4R\nsQr4M2BtZj4LWAKcDXwA+FBm/irwfeDcJgJV+cY9znZ83hzzeTwmaZ/uJMXaRXXH2kuBJ0XEUuDJ\nwC7gJcAV1e2bgLNqPoek8TCfpUIMXZwzcyfwN8B36SXxD4AbgX2Zub+62w5g1XyPj4gNEbE1IrY+\n+PD3hw1DUgOazOe9j4wj4sljF6pB1BlrHwmsA44Dfhk4DDh9sY/PzI2ZuTYz1x59+JHDhqFCtDle\ndrRdX5P5vMy90p3ieLsddcbaLwW+nZkPZuZPgc8BpwIz1VgMYDWws2aMkkbPfJYKUuf0nd8FTomI\nJwM/Bk4DtgLXAK8ELgPWA1fWDVLlsmvtDPN5ROw6NYw6+5y30DtQ5Cbg1uprbQTeBrwlIrYDRwEX\nNRCndEAeuV2P+ayD8Y+M8ar1jy8y813Au+Zsvgc4uc7XlTR+5rNUDs8QpqHZqUoHZrepYfkvI9Up\nP3zeVzhsy0vbDkNTrqtF+eev6/fbjWMa2DlLklQYO2cNrPRx9mx8dtCSJpWdsyRJhbFz1kBK75ql\nNnV1X7PGz85ZneV7nyVNKouzJEmFcaytRbEDlQ7MkbaaZHFW5/neZ42SRVmj4FhbkqTCWJx1UI60\nJWm8HGtrQV0qyp6YRKPgSFujYucsSVJhLM6aKl2aBkjqLsfampdFTFqY42yNmp2zJEmFsThr6nha\nT0mlc6ytx7BoSQfmSFvjYOcsSVJh7Jw1tTytpwZhx/wLT/l/i5uwvedP3vG4bc99/seaDqe2G/7z\nf7UdwuPYOUuSVJiDFueIuDjUWthgAAAJCklEQVQidkfEbX3blkXEVRFxd/X5yGp7RMRHImJ7RNwS\nESeNMng1x4OkpoP5rDaV2DVDL67SYltM53wJcPqcbecDV2fmCcDV1XWAlwEnVB8bgI82E6ZGaZqL\n8hT+UXIJ5vPAHGkP54KPv2+i1q6kAn3Q4pyZXwX2ztm8DthUXd4EnNW3/RPZcz0wExErmwpWUj3m\nszQZhj0gbEVm7qou3w+sqC6vAu7ru9+Oatsu5oiIDfT+Gufpy582ZBhSM6b84LBG83n1k0YXqCZT\nSR3ppKh9tHZmZkTkEI/bCGwEWHv8moEfr/qmbJyrRWgin589M/jjSzVJI1k1Y/YPibaP4B72aO0H\nZsdb1efd1fadwDF991tdbZNULvNZKsywnfNmYD3w/urzlX3b3xgRlwHPA37QNy6TijbF//PZfJ7D\njrkZX7r4qLZDmFiLeSvVp4D/BJ4ZETsi4lx6Sfw7EXE38NLqOsAXgXuA7cDHgf89kqhVmyPt6WQ+\na1wmvTC3vZ/8oJ1zZp6zwE2nzXPfBM6rG5Sk0TCfpcng6TunjB3zwU3xeFs40tYvtHlwmMVZktSY\nSR9nl8Jza0uSVBg75yniSHswU35ikqnjOFsLaWO8becsSVJhLM7SAUzhP8aQhub+5uY41p4CFhfp\nwBxp1zMtRfm5z//Y2Ebbds6SJBXGzrnj7Jqb4cFh3WTHrFLZOUuShjYtI+1Zz33+x8Zyak+LsyRJ\nhbE4d5RHGTfPNe0WR9qqY9Tds/ucJUkDm7Zx9rjZOUuSVBg75w5y9DpaHrk92Rxn1/eli4/i9Nd/\n7+eXp9UoT+tpcZaG4L+VnFzv+ZN3tB3CxPvS8z821UV5HBxrS5JUGDvnDnGcLWmUxvH+3kk0itN6\n2jl3gG/xaY/rLmkULM6SJBXG4ixJOihH2gfW9Gk9Lc6SJBXmoMU5Ii6OiN0RcVvftr+OiG9GxC0R\n8fmImOm77e0RsT0i7oyI3xtV4Opxn2f7Jmmfv/msQc12hOP6P8aTrqnueTGd8yXA6XO2XQU8KzN/\nC7gLeDtARKwBzgZ+s3rMP0TEkkYildSESzCfNQTH2uN10OKcmV8F9s7Z9uXM3F9dvR5YXV1eB1yW\nmT/JzG8D24GTG4xXUg3mszQZmnif8+uBT1eXV9FL7lk7qm2PExEbgA0AT1/+tAbCmC6TMkadJh05\nrWftfF79pFGGp3GxUx5eE6f1rFWcI+KdwH7g0kEfm5kbgY0Aa49fk3XimEYdKAIqTFP5/OyZMJ+l\nmoYuzhHxWuAVwGmZOZuMO4Fj+u62utomqWDms1SWod5KFRGnA28FzszMH/XdtBk4OyKeEBHHAScA\n/1U/TEmjYj5rLkfazaizjgftnCPiU8CLgeURsQN4F72jOZ8AXBURANdn5hsy8/aIuBy4g9547LzM\nfHTo6CQ1ynzWgfTvK7VAt+ugxTkzz5ln80UHuP97gffWCUrSaJjP0mTwDGGSpMewa27Oc5//MbY9\n5RkDP87iLEn6+VnAPBNYGSzOkiQVpomTkEiSOsBxdjkszpI0xSzIZXKsLUlSYeIXJwNqMYiIB4Ef\nAnvajmUByykztlLjgnJjKzUueHxsz8jMo9sKZlgR8TBwZ9txLGCSvv+lKDUumJzYBs7lIoozQERs\nzcy1bccxn1JjKzUuKDe2UuOCsmMbRMmvw9gGV2pc0O3YHGtLklQYi7MkSYUpqThvbDuAAyg1tlLj\ngnJjKzUuKDu2QZT8OoxtcKXGBR2OrZh9zpIkqaekzlmSJGFxliSpOK0X54g4PSLujIjtEXF+y7Ec\nExHXRMQdEXF7RLyp2v7uiNgZETdXH2e0FN93IuLWKoat1bZlEXFVRNxdfT5yzDE9s29dbo6IhyLi\nzW2tWURcHBG7I+K2vm3zrlH0fKT62bslIk5qIba/johvVs//+YiYqbYfGxE/7lu/C0cZW1NKyWdz\neei4zOfh42o2lzOztQ9gCfAt4HjgUOAbwJoW41kJnFRdPhy4C1gDvBv4yzbXqorpO8DyOdv+Cji/\nunw+8IGWv5/3A89oa82AFwInAbcdbI2AM4B/BQI4BdjSQmy/CyytLn+gL7Zj++83CR8l5bO53Nj3\n03xefFyN5nLbnfPJwPbMvCczHwEuA9a1FUxm7srMm6rLDwPbgFVtxbNI64BN1eVNwFktxnIa8K3M\nvLetADLzq8DeOZsXWqN1wCey53pgJiJWjjO2zPxyZu6vrl4PrB7V849BMflsLjfCfB4grqZzue3i\nvAq4r+/6DgpJoIg4FjgR2FJtemM1rri4jXFTJYEvR8SNEbGh2rYiM3dVl+8HVrQTGgBnA5/qu17C\nmsHCa1Taz9/r6f3lP+u4iPh6RFwXEf+traAGUNp6AuZyDebz8GrnctvFuUgR8RTgs8CbM/Mh4KPA\nrwDPAXYBf9tSaC/IzJOAlwHnRcQL+2/M3gyllffGRcShwJnAZ6pNpazZY7S5RgcSEe8E9gOXVpt2\nAU/PzBOBtwCfjIgj2opvUpnLwzGfh9dULrddnHcCx/RdX11ta01EHEIvmS/NzM8BZOYDmfloZv4M\n+Di98d3YZebO6vNu4PNVHA/Mjm6qz7vbiI3eL5mbMvOBKsYi1qyy0BoV8fMXEa8FXgG8pvplQ2b+\nJDO/V12+kd6+3F8bd2wDKmI9Z5nLtZjPQ2gyl9suzjcAJ0TEcdVfamcDm9sKJiICuAjYlpkf7Nve\nv9/iD4Db5j52DLEdFhGHz16md/DBbfTWa311t/XAleOOrXIOfSOwEtasz0JrtBn44+ooz1OAH/SN\ny8YiIk4H3gqcmZk/6tt+dEQsqS4fD5wA3DPO2IZQTD6by7WZzwNqPJdHdTTbYj/oHWF3F72/Jt7Z\nciwvoDciuQW4ufo4A/hn4NZq+2ZgZQuxHU/v6NdvALfPrhVwFHA1cDfwFWBZC7EdBnwPeGrftlbW\njN4vlF3AT+ntczp3oTWid1Tn31c/e7cCa1uIbTu9/WSzP28XVvf9w+r7fDNwE/D74/6+Dvkai8hn\nc7lWfObzcHE1msuevlOSpMK0PdaWJElzWJwlSSqMxVmSpMJYnCVJKozFWZKkwlicJUkqjMVZkqTC\n/H9u0hIv+VW9TwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zWKmRev3WKK4", + "colab_type": "text" + }, + "source": [ + "### Visualise Panoptic Results" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "StOBbFmujxIw", + "colab_type": "code", + "outputId": "28e3f307-1c2b-4343-e759-23dd1e965d79", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 486 + } + }, + "source": [ + "\n", + "# Visualise Results\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i)\n", + " image = np.array(img)[:, :, [2, 1, 0]]\n", + " result = vis_demo.run_on_opencv_image(image, panoptic=\"True\")\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(result)\n", + "plt.show()\n" + ], + "execution_count": 51, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3XvQXXV97/HP90lEpdIi6qQ0QQIt\ndUqdo5AccMQGR7QnUDQ4dShoERXJdArWy6kQcMYETltDbbH2phOFEjhgRKtCL1jxmtNaKISLoBSN\nFEoygaiI0kq1yfM9f+y1wsp+1v2y12/t/X7NPPPsvfa6/PZ6nt/+7u93/dZa5u4CAADhmOu7AQAA\nYH8EZwAAAkNwBgAgMARnAAACQ3AGACAwBGcAAALTWXA2s9Vmdr+ZbTezdV1tB0C36MvA5FkX5zmb\n2SJJ35T0Kkk7JN0m6Ux3/0brGwPQGfoy0I+uMufjJG139wfc/SeStkha09G2AHSHvgz0YHFH610q\n6eHE8x2Sjs+a2cy4TBmw0Hfd/Xk9t6FSX5aq9+cVK1ZIkrZt25b6WtXpsbTXy6yj7TY1Xa6svDY3\nmTe5TJl9GkvOW2Z7Vf52Q+TuVmX+rsrar5O02t3fGj0/S9Lx7n5+Yp61ktZGT1csXAsw87a5+8o+\nG1CmL0fTa/fn+DPIbOFnl7tXnp5oU+F2s+Zps01Nlysrr81N5k0uU2afxpLzltlelb/dEFUNzl1l\nzjslHZZ4viyato+7b5K0SSJzBgJW2Jel+v256APfzGoFkiEZyvtq8reI52/ri8gs6OqY822SjjKz\nI8zsAElnSLqxo20B6E7vfdnMKn2gF83v7qW+FJTdZry+5OO0n2mS9R7LZu1ZyxK4n9JJ5uzue8zs\nfEn/IGmRpCvd/etdbAtAd+jLQD86OeZcuRGUtYE0vR9zrqPNsnbV5aocF62bpY1vI5Sye9fHnMu2\nocl+lfrfj12pesyZK4QBwBQJIeFCcwRnAAACQ3AGELS2B1TFA4/y1lllm2XWF6+zyvSqyg6oyht8\nVfS+s14vOyAsbx8wIGx/XZ1KBQCp5uZX7Xu8yE8sDE6L/MQFy6U9L5qeJmvbWdss2kbee1nkJxYu\nF2+3iTL7NK0d83Nb92tP2XOSq6LsXg6ZMwAAgSFzBjBxySzNVL2UmbVMlXWNz1s2487KbqtmvfE+\nqPP+i1RZZ/J9F5WVm5adKVuXR3AGMDOqlLy7ltWW5BcXzC7K2gAABIbMGcDUCyljLhK3lQx6thGc\nAUy1IQXmpGS7CdSzh7I2AACBIXMGMJXqZsxtZqnj2W/dNlHqnj1kzgAABIbMGcDUaHJ8uUpW+vpz\n/r7cjOdK111xSuo26rSV49Czg1tGAuGayltGzs2vaj2wlA10edstHXBblgzeUrWg3cZ+pGQ+Gdwy\nEgCAgSNzBsJF5lywnrKyttdXtpwlmUVPKoMmc56Mqpkzx5wBDE7dwBVaMB6XbN91c+UDdReHCtAv\nytoAAASGzBnAYNTJmEPPlrOkZdF575/y9HThmDMQLo45jy1XZH5u62CDcVnxcem8/VFl/xLUJ4PR\n2gAADFztsraZHSbpaklLJLmkTe7+QTM7RNLHJS2X9KCk0939+82bCqArIffnshnzrIgrA9fNnZK5\nb8iGh692WdvMDpV0qLvfYWYHSdom6TRJb5L0mLtvNLN1kp7t7hcWrIuyNrDQxMrak+zPVcvaRcF5\nFkrZWcqUuKX8IE0gn4yJlbXdfZe73xE9fkLSfZKWSlojaXM022aNOjiAgNGfgbC0MlrbzJZLOkbS\nrZKWuPuu6KVHNCqTARiIkPpzW4OeplWZErfEedBD1HhAmJk9S9JfS3qHu/8w+ZqPauapJS4zW2tm\nt5vZ7U3bAKAdofTnuflVpQPzrJa0k9gH06dRcDazp2nUka91909Fkx+Njl/Fx7F2py3r7pvcfeUQ\nTxUBphH9GQhHk9HaJukKSfe5++WJl26UdLakjdHvGxq1EEDnQunPVQY2kS3u74xz10mStnxkY+rr\nDPwalibHnE+QdJake8zsrmjaxRp14uvN7BxJD0k6vVkTAUwA/RkISO3g7O7/KClraPhJddcLYPLo\nz9PjjHPXZWbPEoPDhoIrhAEYHEramHYEZwAAAsNdqQAEgSuBAU8hOAPoVZNLTwLTirI2AACBITgD\nGARK2pglBGcAmDJnnLtu30VJ0hRdHhX9IzgDABAYgjMATKkyGTTCRHAGACAwBGcAAAJDcAYAIDAE\nZwC94QIkQDqCMwAAgSE4AwAQGIIzAACB4cYXACaOY81APjJnAAACQ3AGACAwBGcAAAJDcAYAIDAE\nZwAAAtM4OJvZIjO708z+Nnp+hJndambbzezjZnZA82YWc3e5+yQ2BUytUPozMOvayJzfLum+xPPL\nJH3A3X9B0vclndPCNkrrI0AP5UtB/AUm7aerdffdLlQWVH8GZlWj4GxmyyT9mqSPRs9N0iskfTKa\nZbOk05psA8Bk0J+BcDS9CMmfSLpA0kHR8+dIetzd90TPd0hamragma2VtLbh9pPra2tVUyfOQLP2\nUZMMtey6817P+9sVvY5WBdOfk15/zt93sVogaLUzZzM7VdJud99WZ3l33+TuK919Zd02oB1mVquM\nHAfOOsEz3h6BNwz0ZyAsTTLnEyS9xsxOkfQMST8t6YOSDjazxdG37WWSdjZvJoCO0Z+BgNTOnN39\nIndf5u7LJZ0h6Yvu/gZJX5L0umi2syXd0GAbpQcJZb2WNr3MgKO2BydVWV+bg6GaZLdtalo6Z3BY\ntybRnwGU18V5zhdKepeZbdfomNUVHWwDwGTQn4EetHJXKnf/sqQvR48fkHRcC+tMzfa6HLyUte02\nsua09TEYCiHqoj+Pi+86lXV3qng6d6fCrArulpFlAmjZIDu+TJmgnLbe+HnVYFlmfWmGEpDjgWTx\n46rLSnwBAYA0XL4TAIDADC44x4Ob6mZqaZKn9eTNVydrTq5/mq9+lfUey/yt8q4QRlY93YrK1lll\nb2DaBVfWniazEFjqHiqosvws7EcASBpc5gwAwLQjc54BdQdtASG47opTJHEZzzq2fGRj7uvzc1s5\ndBCoQQbntoPN+OjputeBHl9n3fW1+f6S7cjS5JSuvLaWHSHf9T4AZk2ZoIywUdYGACAwYWTOK56l\nuduOlSQt8hMl5V/4I54nrRxTdFGDMrK2vchPrLX+OuuLl4vfaxNl9mmRSdzZCgAwEkZwjiRLLaaK\np0plzJ81PS0o1g2EWculTS9TTqr63staNH9irXJWW6eXdbE8Zsd1V5zCcWfMDMraAAAEJqjMuUuh\njEgcbwcDMzDruM42sNBUBudQAnEZfPAAI0Wn9czNr5LOnWCDBoqR2tOBsjYAAIGZusx5SFlzEhk0\nUIwLkmQjY54uZM4AAARmajLnJhnzxk+/tcWWjKx77UdrLZd8H3zTxawpOzjsujlOq8J0G3xwrhuU\nuwjIZdZfJWjPza8iQAMpGBy2P0ra04eyNgAAgRl05lwla+46Uy5rvB1FmTRlbsyiovK29FS2eMa5\n6ybSphCRMU+vwQXnMgE5lEBcRtzWMuVuRnRj1pS5peGWj2ycyQCdF5j5jBg+ytoAAASmUeZsZgdL\n+qikF0pySW+RdL+kj0taLulBSae7+/cbtVLlS9hDypqTqmbQfDNG2ybZn6ugxL0/StmzwZrcrs/M\nNkv6f+7+UTM7QNKBki6W9Ji7bzSzdZKe7e4X5q5n5UE+d9uxmf9U01bKLqtMoK7SEQnqg7PN3VdO\namOt9Wez3A+VJv+HZT4L4nVP46lWXZSyOVw2Ge5e6RZ8tcvaZvYzklZJuiLa8E/c/XFJayRtjmbb\nLOm0utsAMBn0ZyAsTcraR0j6jqS/MrMXSdom6e2Slrj7rmieRyQtSVvYzNZKWitJev7TUzcwqxlz\nbOOn31pqNDffeNGC9vpzh8qUuOPXtmg6St1FZWyJrHca1S5rm9lKSbdIOsHdbzWzD0r6oaS3ufvB\nifm+7+7Pzl1XSll71gPzuLwgXbZjEsgHZ2Jl7Vb7c4dl7fH1lDWkAF0mGMfa3I98NnRrYmVtSTsk\n7XD3W6Pnn5R0rKRHzexQSYp+726wDQCTQX8GAlK7rO3uj5jZw2b2Ane/X9JJkr4R/ZwtaWP0+4Yq\n6y36NjxL2XJSXombb75oqqv+3KUqlbasbDSEjLpKppxEf59uTUdrv1ijUy8OkPSApDdrlI1fL+n5\nkh7S6NSLx3LXE5W188xqUE5TdBw6b9Q7HXpQJj1au53+PKGydta665pkoK4bkKX2gzJf7iejalm7\n0XnO7n6XpLQPj5OarBfA5NGfgXA0ypxba0RB5kzWvFCd7JnMeXAmmjm3pc/MOV5/W9rMpptky7Eu\n9huZ82RMckAYAADowOBufAEAecYzwCaZdBvZblNktLOJ4DxQRdfiplQFjJS5s1VI6LOQKGsDABAc\nMueBK7rEJ4PAgPzTC/tG/0QagjOAmVX1ksFdbBdIQ1kbAIDAkDmjU1nn0ZuVO+Wv7PLxfEXrdfdS\n85TZZtr6xpfN2lbZ+aZVCOXkPs36+58GedcIaaM/E5zRmbxAWBRM017v+oI5TbdZJfCPb6Psl4tp\nQEkXQ1b2s6spytoAAASGzHkKlD3neQjyvpWaWWfZc1bWG0/L227TknqX7wvAZCX7c5NKWNDBmWtq\nV1MUpPuQFnTMrNTx20looxNVOYZNEEZVTcZtNBk/kbe+vPmqHIstOpRUpXRc9xBaFZP8jKKsDQBA\nYILOnDFsZb71zsIAqNgsvVc0V2bgUUgDKtPaVHVApVT98FC8XFFW20dlrgmCMyaOY6xAd5qMn2iy\nzeQ2qm6zSUm9aBtDCshJlLUBAAgMmTM6U7WM1NYox6y2ZG0zfr2rb9hzG5YXbmOWznNGNVkXrKl7\nIZ62tdF3qgwwm5WqG5kzAACBIXNGa2z94QumFX3LTVumznLJ7DRN/Pr8hgdb22bR9CrbiNuXXJdf\n8lDp9WL6MKByf7P0XiWCMzoQB5U5La+8bBykssxveLDR8lmvFy1Xd3ttbANI6nJgF4pN6vABZW0A\nAALTKHM2s3dKeqskl3SPpDdLOlTSFknPkbRN0lnu/pOG7cSAVCnzIhz05zDE/SeuEuVVXJJ9LXlo\np6hKU+cQTbJqNb7dOJvsYrvjh6yqnvfdZqZb5jTQtgaX1g7OZrZU0u9IOtrdnzSz6yWdIekUSR9w\n9y1m9mFJ50j6UOOWorSNn35r4XW2uTMQkujPYUgLUkWHctLUWWZuw/Lc5Yper7vdquqez9xW0JzU\nYYWmZe3Fkp5pZoslHShpl6RXSPpk9PpmSac13AaAyaA/A4GonTm7+04z+yNJ/y7pSUmf06js9bi7\n74lm2yFpadryZrZW0lpJ0vOfXrcZCASl7GzxALmQ91Gr/RmVpf1vdDVIsSg7rrPuSQ+orHImQ1cD\nt7oeENakrP1sSWskHSHpcUmfkLS67PLuvknSJkmylQcx7HCg6gScZMey9YdX62iX5HeIf/ynF5Ve\n18tOuLv0vG3Ie599B+5W+7MZ/RloqElZ+5WS/s3dv+Pu/y3pU5JOkHRwVBaTpGWSdjZsI4Du0Z+B\ngDQZrf3vkl5iZgdqVAY7SdLtkr4k6XUajfA8W9INdVbOvZyHoWzJNitrrJI1V8mK21jfpDPrnnXa\nnwFU0+SY861m9klJd0jaI+lOjcpafydpi5n9XjTtijYaitnUdkBue9vTEsDpz2Er8yX22tN+VHp9\n//fu+5o0p3Vv+MyB+z3v+zBPCBqd5+zu6yWtH5v8gKTjmqwXwOTRn4FwcPlONFK3nF2kz4y5imQ7\npyWLxvBUyZqve+2THbaknmtP+9GC7HnWEZwRlKEE5TRZbSdooytVgnLoku/lN/VLPbYkDFxbGwCA\nwJA5T6GsS3dK7V+2s2i0djy9THl7yFlznvh9kUGjTdOUNWMhMmcAAAJD5ozeTWvGPO4f/+lFZM9o\njIx5NhCc0UhXo7UBLERgnh2UtQEACAyZM3o1KyXtGIPDUAcZ8+whOKORuqO1Zy0oA0AVlLUBAAgM\nmfMUyTu/GWFh5DaAPMEG53Wv/Si3jRwARmsDQPsoawMAEJhgM2eUV1TOji/ZOTe/qvVtt3n5TgDA\nCJkzAACBITgDABCYoIPzutd+lBHIBcqWtEPCOc4AkI9jzpgYgjIAlBN05gwAwCwaRObMOc8LFZWz\n//kTuyRJx0+iMQCAVpE5AwAQmMLgbGZXmtluM7s3Me0QM7vZzL4V/X52NN3M7E/NbLuZfc3Mjq3S\nmDjbS8PgsKeUzZqBcZPszwDqK5M5XyVp9di0dZK+4O5HSfpC9FySTpZ0VPSzVtKHqjbonz+xqzBI\nz6qiLyhF+w7QhPszgHoKg7O7b5X02NjkNZI2R483SzotMf1qH7lF0sFmdmhbjQXQDP0ZGIa6A8KW\nuHucoj0iaUn0eKmkhxPz7YimLUjnzGytRt/Gpec/fd/043/jW6MHv/GtzMtNxtnjLA0SK8qYx+3b\nj0CxdvszgMYaDwhzd5fkNZbb5O4r3X2lnvc0SQsDyvzc1tyLaMxCibvOsXYCM+pqpT8DaKxucH40\nLm9Fv3dH03dKOiwx37JoGoBw0Z+BwNQNzjdKOjt6fLakGxLT3xiN8nyJpB8kymWdmOZR3GXeV7Kk\nffxvfCvYrJmrgwUtmP4MYKTwmLOZfUzSyyU918x2SFovaaOk683sHEkPSTo9mv3vJZ0iabukH0l6\ncxuNnJ/bWni7w2QgG/Kx6LJfNMaDcl+ybhUZ23eryF89eAKtQZEQ+jOAYoXB2d3PzHjppJR5XdJ5\nTRsFoBv0Z2AYBnH5TumpuysVZdDSMEdzVy1hx/ouY8eZcVYGXZRZAwAWGkxwjiVHb1cpdY/rK3DX\nPT4+P7eV62QDwIzg2toAAARmcJlzUpmBYlkmMYCsjVHkeed5D8nLTribEdsJLzvh7r6bACBgZM4A\nAARm0JmzVG2gWJY2jk13ca71tGTNeAoZM4AyBh+cY1UGilUx6QucEJABAJS1AQAIzNRkzkltlLon\njYwZABCbyuAcywt4fQVugvDs4ngzgLIoawMAEJipzpzzdDWALGsbAACUFVRwHtIx4jKm7f2gHsrZ\nAKqirA0AQGDCyJy3/Qcl4BkQZ5BcxhMA8pE5AwAQGIIzAACBITgDHWIwGIA6CM6YuFkIWC874e6Z\neJ8AukFwBgAgMGGM1sZg2frDc1/3Sx6aUEv6R6aMrrzhMwdKkq497Uc9twSTQuYMAEBgCoOzmV1p\nZrvN7N7EtPeb2b+a2dfM7NNmdnDitYvMbLuZ3W9m/6urhgOhGNLxZfrzsMUZNKZfmcz5Kkmrx6bd\nLOmF7v4/JH1T0kWSZGZHSzpD0i9Hy/ylmS1qrbUIjl/yUG7p2tYfnlr6jgPaUILauAG3/yrRnwft\nDZ85kCA9AwqDs7tvlfTY2LTPufue6OktkpZFj9dI2uLuP3b3f5O0XdJxLbYXQAP0Z2AY2jjm/BZJ\nN0WPl0p6OPHajmjaAma21sxuN7PbW2gDepKVGceKMushGmC2XAX9eSCmNXue1vdVVaPR2mb2Hkl7\nJF1bdVl33yRpU7Qeb9IODFsy2IV63e0pD8iS6M9DlBbIhjiim4C8UO3gbGZvknSqpJPcPe6MOyUd\nlphtWTQNQMDoz0BYagVnM1st6QJJJ7p78mvajZKuM7PLJf2cpKMk/UvjVmJmjGeofWbSs5AtS/Tn\nISi6nkDSb+qXOmxJN+xF5d/frCgMzmb2MUkvl/RcM9shab1GozmfLulmM5OkW9z9t9z962Z2vaRv\naFQeO8/d93bVePQvPp6c9eERT6973DkvQDYN3LMSfJPoz8AwFAZndz8zZfIVOfP/vqTfb9IoAN2g\nPwPDwOU70Uifl+8synyTmfUsZskAhsueGvvRYyMY3TlYVY6FoRq/5KFt7r6y73ZURX+uj/5UbKin\nZrq7VZmfa2sDABAYMme0gm/87SNznk30pXRDzZhjVTNngjNaw4dKuwjOwPSgrA0AwMCFkjl/R9J/\nSvpu323J8FyF2bZQ2yWF27ZQ2yUtbNvh7v68vhpTl5k9Ien+vtuRYUh//1CE2i5pOG2r3JeDCM6S\nZGa3h1rCC7VtobZLCrdtobZLCrttVYT8PmhbdaG2S5rutlHWBgAgMARnAAACE1Jw3tR3A3KE2rZQ\n2yWF27ZQ2yWF3bYqQn4ftK26UNslTXHbgjnmDAAARkLKnAEAgAIIzma22szuN7PtZrau57YcZmZf\nMrNvmNnXzezt0fQNZrbTzO6Kfk7pqX0Pmtk9URtuj6YdYmY3m9m3ot/PnnCbXpDYL3eZ2Q/N7B19\n7TMzu9LMdpvZvYlpqfvIRv40+t/7mpkd20Pb3m9m/xpt/9NmdnA0fbmZPZnYfx/usm1tCaU/05dr\nt4v+XL9d7fZld+/tR9IiSd+WdKSkAyTdLenoHttzqKRjo8cHSfqmpKMlbZD0u33uq6hND0p67ti0\nP5S0Lnq8TtJlPf89H5F0eF/7TNIqScdKurdoH0k6RdJNkkzSSyTd2kPbflXS4ujxZYm2LU/ON4Sf\nkPozfbm1vyf9uXy7Wu3LfWfOx0na7u4PuPtPJG2RtKavxrj7Lne/I3r8hKT7JC3tqz0lrZG0OXq8\nWdJpPbblJEnfdvfeLoLr7lslPTY2OWsfrZF0tY/cIulgMzt0km1z98+5+57o6S2SlnW1/QkIpj/T\nl1tBf67Qrrb7ct/BeamkhxPPdyiQDmRmyyUdI+nWaNL5Ubniyj7KTRGX9Dkz22Zma6NpS9x9V/T4\nEUlL+mmaJOkMSR9LPA9hn0nZ+yi0/7+3aPTNP3aEmd1pZl8xs1/pq1EVhLY/JdGXG6A/19e4L/cd\nnINkZs+S9NeS3uHuP5T0IUk/L+nFknZJ+uOemvYydz9W0smSzjOzVckXfVRD6WX4vZkdIOk1kj4R\nTQpln+2nz32Ux8zeI2mPpGujSbskPd/dj5H0LknXmdlP99W+oaIv10N/rq+tvtx3cN4p6bDE82XR\ntN6Y2dM06szXuvunJMndH3X3ve4+L+kjGpXvJs7dd0a/d0v6dNSOR+PSTfR7dx9t0+hD5g53fzRq\nYxD7LJK1j4L4/zOzN0k6VdIbog8bufuP3f170eNtGh3L/cVJt62iIPZnjL7cCP25hjb7ct/B+TZJ\nR5nZEdE3tTMk3dhXY8zMJF0h6T53vzwxPXnc4rWS7h1fdgJt+ykzOyh+rNHgg3s12l9nR7OdLemG\nSbctcqYSJbAQ9llC1j66UdIbo1GeL5H0g0S5bCLMbLWkCyS9xt1/lJj+PDNbFD0+UtJRkh6YZNtq\nCKY/05cboz9X1Hpf7mo0W9kfjUbYfVOjbxPv6bktL9OoRPI1SXdFP6dIukbSPdH0GyUd2kPbjtRo\n9Ovdkr4e7ytJz5H0BUnfkvR5SYf00LafkvQ9ST+TmNbLPtPoA2WXpP/W6JjTOVn7SKNRnX8R/e/d\nI2llD23brtFxsvj/7cPRvL8e/Z3vknSHpFdP+u9a8z0G0Z/py43aR3+u165W+zJXCAMAIDB9l7UB\nAMAYgjMAAIEhOAMAEBiCMwAAgSE4AwAQGIIzAACBITgDABAYgjMAAIEhOAMAEBiCMwAAgSE4AwAQ\nGIIzAACBITgDABAYgjMAAIEhOAMAEBiCMwAAgSE4AwAQGIIzAACBITgDABAYgjMAAIEhOAMAEJjO\ngrOZrTaz+81su5mt62o7ALpFXwYmz9y9/ZWaLZL0TUmvkrRD0m2SznT3b7S+MQCdoS8D/Vjc0XqP\nk7Td3R+QJDPbImmNpNQObWbtf0OYIStWrJAkbdu2reeWhGEI+2PFihVl2vddd3/eJNqTo1Jfjuah\nP2dZ8az0yXrBgmnbdH/uvOOvZ00vbFJi20XLltlG2+ura3yfLtjGtv9YuEy5frlv3n2rKrGMu1up\nFUe6ypxfJ2m1u781en6WpOPd/fzEPGslrY2erli4FpQV/w3NKv3tp9YQ9oe7l2nfNndfOYn2ZCnT\nl6Pp9OcS5uZXpU7fa19ZMG2Rn5g77/jrWdOLJLddtGyZbbS9vrrG9+n4Nubnti5YpmS/3DdvrMwy\nVYNzV5lzIXffJGmTxDdtYOjoz9WMBwZT+c/trHkXzdcLcFUCY5l5215fWhAtI2+fZn1JqvKFvusv\n/10F552SDks8XxZNQ0vSKh5Z3+TSMskq85bZdtYyRdsu2lbd7XaxviFk5B2gLwcsK8hMk/H3WDdY\nD01Xo7Vvk3SUmR1hZgdIOkPSjR1tC0B36MtADzrJnN19j5mdL+kfJC2SdKW7f72Lbc2qqtlunXmz\nls/Lgqtuu8zydZersr6idXYxNmMI6MvhaitrfsXFp7WynnFf/IPPdLLeuflVM5E9dzIgrHIjOEbV\nSFHArTJwoShw5W0jr6ydtWyZ16uWmfPaWmZ9ee1py1AGhNVBf84WB9QmwaVuUO4qCNfVRvCuux/7\nCPBVB4RxhTAAAALT22htTF5bpew2t21muYPbusheQ6gWAXWUyZpDy5CzlGlnUXY9zSVugjNmzoyN\ntsYUmKagXMUrLj6tVICWpm8UN2VtAAACQ3AGgIDlZc3zc1s1P7d1KrPm2CsuPq3U+5u2c74pa6NQ\n8rjwpErC8XbaON5dVd0R61Xk7dMZvdgJxhQFm/m5rXrlussn1Jr+xQE6r8w9TSVuMmcAAAJD5jxA\ni957Vur05CjkxevfWHq5utuosv4y266zzbzl0qYvXv/GwvUVnY/dNkaPI6lMxixpprLmpLKDxIae\nPXMRkgFa9N6ztPfSa/puRu+aln8HsB+5CMmUKVN2JTiXlxeki/YxFyEBAACVUNYGgAEhY35KXol7\n6IPDyJwBAAgMmTOCl3ZzjBDGSgBtKzrePM3nM9dVdIrVUDNogjNalzZKusnAq2RA5hxgTCOCMsZR\n1gYAIDBkzmhN0XnOTU9bIlsGMCvInNGKshcZqXIhFGAWUNJGGoIzAACBoayNRupkwgO4MhfQOTJm\n5CFzBgAMXtGXmaHdUpLgDABAYAjOqK2opJ1XumZwGABkq33M2cwOk3S1pCWSXNImd/+gmR0i6eOS\nlkt6UNLp7v795k1FKJoEZYTekKh3AAAbP0lEQVSJ/oxpUPZqYUPQJHPeI+l/u/vRkl4i6TwzO1rS\nOklfcPejJH0heg4gbPRnICC1g7O773L3O6LHT0i6T9JSSWskbY5m2yyJIYdA4OjPQFhaOZXKzJZL\nOkbSrZKWuPuu6KVHNCqTpS2zVtLaNraPyckraaeVs+NpWctxWlV46M/dyyuvcgoVpBYGhJnZsyT9\ntaR3uPsPk6/56C4FqbcPcvdN7r7S3Vc2bQOAdtCfgTA0Cs5m9jSNOvK17v6paPKjZnZo9PqhknY3\nayKG5ubz/2bfT4yR2+GjPwPhqB2cbXQXgisk3efulydeulHS2dHjsyXdUL95ACaB/gyEpckx5xMk\nnSXpHjO7K5p2saSNkq43s3MkPSTp9GZNRAiKMtvPPna6dP7CP3Uye9Zjo1+rD7k+cxscf+4N/RkI\nSO3g7O7/KCnrHn4n1V0vgMmjPwNh4QphAAAEhuCMQnkl7c8+dvqopF1B3vwMDgMAbhmJHKWOMwMA\nWkfmDABAYAjO6EVROZzyNoC6puEqawRnpGr7OHPeuuq2AwDSZN2VakgIzgAABIYBYdgPg8CA/kxD\nORbtIHNG7zj+DIxMQzkW7SA4AwAQGMrakEQ5GwBCQuaMYDByGwBGCM4AAASG4IygStoMDgMAgjMA\nAMEhOAMAEBhGa8+wkMrZWdtefcj1qa8veu9Z2nvpNZNsEtCa+bmtmptflfpafK4zFySZbWTOAAAE\nhswZADAViq6wNj+3VZIyqxYhIXOeUSGXtJMYuQ2gjLKBeSgIzgAABKZxcDazRWZ2p5n9bfT8CDO7\n1cy2m9nHzeyA5s0EMAn0ZyAMbWTOb5d0X+L5ZZI+4O6/IOn7ks5pYRtoSVEpuOgiIH0oc2EStIb+\nHIjPb3yXPr/xXX03Az1pFJzNbJmkX5P00ei5SXqFpE9Gs2yWxPkAwADQn4FwNB2t/SeSLpB0UPT8\nOZIed/c90fMdkpY23AZaUpQxY+bRnzu2174iSTJZ4cjhePrn596lV667fDINHKBpGwgWq505m9mp\nkna7+7aay681s9vN7Pa6bUA5ZUrZQ8DI7e7Qn/O5e2frKAoeIZ72c/P73tl3Ewrtta9or31F7r7g\nJ/6S1ETaeuOfNjTJnE+Q9BozO0XSMyT9tKQPSjrYzBZH37aXSdqZtrC7b5K0SZLMrJ13A6Au+jMQ\nkNqZs7tf5O7L3H25pDMkfdHd3yDpS5JeF812tqQbGrcSiDA4rBv05+6ZmRb5iVrkJy54bX5ua24G\n/cU/+Exh+XYWZe2TvfYVmVnmzyI/sXaWGy+Xte7kPE10cZ7zhZLeZWbbNTpmdUUH2wAwGfRnoAet\nXL7T3b8s6cvR4wckHdfGetHcNBxrTvPZx07PvSmGJG6MURP9uT95N8SQRpkiN8QoHgTWJzNr5bgz\n19YGgDFpH67JaXH5Mjk9bdr49ORo7Sx5g5UW+Yn7zn1+5brL9w3MetVFH9g3T9pgreTrWbIGeZVZ\nto31pb2XNHmBed+hgSkY9cDlOwEACAyZ85Qayo0tgBAls11J+wYAFUnLoqtIbicub49n0snzn5XI\nTstm0ePqLldlfUXrLbO9rkrZZf+2k0ZwnkLTepx5XPxeOPaMEDT5kM8L6ov8xNRSd/LY9M3ve2du\nOTjr9azpr7roA7UCdFE74nnibSS3l6VsUK56sZHkyOoQUdYGACAwZM5TZFZL2Xkjt6XRfiF7Rtfa\nKGUnFV3ec3zbaZf4jDPS8Sy47OCrMrLWXWfZWNyuZNYcVw/GzxNPZsxls+C99pX9Bo1lVS7yKhpl\nthXPs3LlylLtSiI4YypQ4sa0mp/bWjj6eG5+1egYdGQS1+LOOlad/EJQpdSeDNRdlbLjYJl1qKDM\n8kVfwsZH6tctm1PWBgAgMGTOADAFkuXvOIsO8W5WWSX1uFxdJqOtc6eptJHwUrnstswo/OQ8Wee5\nV0FwngKzeqw5DcefMTTJD/OiD/KiK4jF4nm+qLg8vP+x3bKl5zKqXGwkr3RdtsxcJjCnBdzxi8FU\nLU+PT8+66Eza4zrHnClrAwAQGDLngSNrBtqV1aeSGdHi9W8svVzdbWQutyEx4ZLSm8wYZPXOwtHV\nXzrgztTpaftAkva87+rc9aXdlSsva27rkpxlB2a1MV8bFzWxEE7A5v6v1ZT5EIhLtzef/zetbvuV\nf3aqPv+2v211nV3JK2/HAi9xb3P36vWwng29P3d56CMuN9c5ZlpmvW3KOnWpyvLjy6ZNy1su7TSp\nssd986bNza8qte6yhxtKjuKuFLEpawMAEBjK2lMo+a3/VX/+6n3f/ppkvK/8s1Mbt2vSss593nPJ\nqOyWVZYDhqbsQLE21cmsk+cX5y1XdzR2kq0/fPQ7cZnOuQ3LFywXzxc/9kseWjC9SJVBfWWROQMA\nEBgyZ5QylOPM06Dtb+CYDclss0oWnRyMVeVc47Tlx8VZ6H42FC+XlrXGWW/WOKm5Dcs1v+HBBcsn\nH8evj1ZUftvJaanvKSGtfVy+E0iRd+4zl/XENEgOEt176TWpZeGsgJ0MyFVK1Wkl4jKKltsvgFZc\nvu5rbSzT9pdpytoAAASGzHlAik6hSmZ/aaWV5KCuZJk67fSoKvPmbScpbbnx9aUtW6aknjdgrW5J\nPqt8lvUNOdSbtiM8bQ3esg0LS7CL3nuWfEN+6TVNlYFdRaVdNEdwHoi8wJxWks06z6/ovOc4yNUJ\naGnLVhnlXfQloexyacvE+yhrPybPbS0653FcCNcKwDC0eX5z3mdCHLTTPhsWv+aRwnXvufFn09db\nYQTztIr3QddfUChrAwAQmEaZs5kdLOmjkl6o0di3t0i6X9LHJS2X9KCk0939+41aic41yZjj5dOW\njaflZcBF2856PWt6mW2mqXs1oGkpZdOfh6HOZUKrDnhc/JpHFmTPoWfN2256qVac/NV9z9POV86b\nFgvlfTYta39Q0mfd/XVmdoCkAyVdLOkL7r7RzNZJWifpwobbQUte9eev3vc4WeJuEpSbLF+07Off\n9repQbbLy4jOcJma/hy4KoG5qbj8nVXiTooD3PilT+847szK273zptdrm15aeTlpFKD3OS56fJNS\npx37Lx9LXUedi5B0oXZZ28x+RtIqSVdIkrv/xN0fl7RG0uZots2STmvaSADdoj8DYWmSOR8h6TuS\n/srMXiRpm6S3S1ri7ruieR6RtKRZE9GVOIv2P5vuTDFZLShjWsrUFdGfA1bmTI28eRa99yzZXe9v\nu1mDFmf1WRl035oE58WSjpX0Nne/1cw+qFHJax9396w71JjZWklrG2wfLSobwEIK5EVtDqmtA0B/\nRqvqlLTxlCajtXdI2uHut0bPP6lR537UzA6VpOj37rSF3X2Tu68c4i3xgClEfwYCUjtzdvdHzOxh\nM3uBu98v6SRJ34h+zpa0Mfp9QystRaayg0SKyl5l7XcpzPWjx3v+7Orcuzzt0ampl9DMmj4+T9p2\n44FbRXeXansQTdbFRtq6CEkfZXX68/AVncvvL363JFUub8cDw/bq+FLzkzG3o+lo7bdJujYa2fmA\npDdrlI1fb2bnSHpI0ukNt4GWxLdKlLq5XWJy/ZPU1nbjfVI0WrvsxUkGiP4coKoXICo6/uwvfjfH\nnxPuOO7MII87NwrO7n6XpLQy1klN1gtg8ujPQDi4fOcMKMqSq2TRWfPWzcTLLFd3m1kZddZycRZi\nGRdsyMpGZnR0NzpW5Vr6aCbEkdtcvhMAgMAQnAeCb8ndYv9imuy99Jrc/2l/8bv3DRBrwx3HnclA\nsJZR1h6QotGYs2p8oFuVAWIEZYSmjXL2Hz56riTpor/4r1bahMkjcwYAIDBkzgNU9M15/OLzIcrK\nDuq02y69Zt/pTHsuuZoBWphqcVZcxvvOe4ak7Ay6zGlV8Y0v7Jgw7tY0K8icMXFFF0OpU7Y3s30/\nwFB1NUI7DtJp2j7+PGQhHTcnOAMAEBjK2piYSdwkHphmVUrabal6+U60g+CMoA3h+DnQVNEX17yy\ndFltHH/G5FDWBgAgMGTOmAjOzQYwBPGgsBX6aq/tIDijU1VGn2bNy/FnTLNJlLQxPJS1AQAIDJkz\nglF0H1oGh2Ga9JUxv++8Z+QOCpPSq1RchGSyCM7oTNWbxAOYHcfcep3uPP71fTcjWJS1AQAIDJkz\nglJ05y0Gh2Ea5FWVJjEArOic56xDSPMbHpQkzW1YXnvbx9x6Xe1lQxNf07+LywaTOQMAEBgyZ7Su\njYv3l8mgyZ6B4eE4czkEZwCYEM5pRlmUtQEACEyjzNnM3inprZJc0j2S3izpUElbJD1H0jZJZ7n7\nTxq2EwPQxb1o8859ZnBYu+jPsyfvnOe4f+255OoFr8UDw6T9B4ftueRq3fnZp8rW44O/kiXtMqdS\nZQ0eS1tufH1py5YpqcfLzae8VmcgnLtr5cqVlZerHZzNbKmk35F0tLs/aWbXSzpD0imSPuDuW8zs\nw5LOkfShutsB0D36c/f6HqFdtO2sIL14/RslSfO2tdJo7SbnMccBsijYVtl2meWTy604+av7fQmp\nw91rj+RuWtZeLOmZZrZY0oGSdkl6haRPRq9vlnRaw20AmAz6MxCI2pmzu+80sz+S9O+SnpT0OY3K\nXo+7+55oth2SlqYtb2ZrJa2tu32EpYuS9viyjNzuDv25O7M2CCwt6626fNqy8bS8DLho21mvZ02P\nKwRVM+g2zn9uUtZ+tqQ1ko6Q9LikT0haXXZ5d98kaVO0Lq/bDvSry6Ccti6OP3eD/oy8489VtFnK\nbnPbdx7/+tTA3uVlROMgXeeYc5Oy9isl/Zu7f8fd/1vSpySdIOngqCwmScsk7WywDQCTQX8GAtJk\ntPa/S3qJmR2oURnsJEm3S/qSpNdpNMLzbEk3NG0kpttlDzxRar4Ljzyo45bMNPpzB0IdBJalaHAY\nqumlrO3ut5rZJyXdIWmPpDs1Kmv9naQtZvZ70bQrarcOQatS0i4bgPNc9sATuviq3268HixEf27X\nrB1nRvsanefs7uslrR+b/ICk45qsF8Dk0Z+BcHD5TlQ26Yw56Q/e9JeSlJlBM3IbaK6twWFVJEdj\nT/L628mBYiFd95vgjNa1HZDT/MGb/jI3QEuM3Aa6Mn6FsP+5ehRK9kj7HhfJmnePii8YkrVc0bbj\ncwLH54unJ7ebdoWwpGSSMp6wxMea+xqtDQAAOkDmjEryStpxyRmYVdM+ECzrsp23fXaUey7WG/c9\nLnLnZ1+v2zKmF8laLm162rrH50vb5oqTv7rvcdpFSPZeeo0spTqX/B9oMlqbzBkAgMAQnDG1Fr33\nrMJMBmjLtGfNmCzK2iil6IOnj5J20cjtGCO4AZSx30C0Dcsb35WqCTJnAAACQ+aMwcs7rQroGuXs\n6ZE853l+w4Ol7lvdFYIzCg1hhHaZi5NInPuMdhGYp098IZLkaO0+UNYGACAwBGdkKhrtHErWnFTU\nJkZvAxgCytoAUNEslbPnbWvu69tueumEWjJbyJwBAAgMmTMAVJCXNU9Txhyb81WSsjPoFSd/tbPs\nuc1BWdtuemnvg7yqIDgj1dCONScxchuYrGTQW3Fyjw3JMaTALFHWBgAgOGTO2E+Il+msq0wGTfaM\nsmZpEFiaOV9VODhsFvglD01kOwRnAEApRcefm663LyF+6aCsDQBAYMicIWm6ytnj8q69zeAwlDFr\nI7ST4vd30V/8175pbZS4+86Wk7qqCDRB5gwAQGAKM2czu1LSqZJ2u/sLo2mHSPq4pOWSHpR0urt/\n38xM0gclnSLpR5Le5O53dNN0oDwGh43Qn9GWutlmSBnzuDlfJbvr/ZKkPTf+bK9tKVPWvkrSn0u6\nOjFtnaQvuPtGM1sXPb9Q0smSjop+jpf0oeg3AjbNJW0scJXoz6UV9Y29l14jnXfuhFoTpjlftV/p\nO63Mf/G5/0eS5C9+90TbVkfcxsV6pNcAXVjWdvetkh4bm7xG0ubo8WZJpyWmX+0jt0g62MwObaux\nAJqhPwPDUHdA2BJ33xU9fkTSkujxUkkPJ+bbEU3bpTFmtlbS2prbB9Ae+jMKve+8Z+w3KCwpa3ps\nCBlzaBqP1nZ3NzOvsdwmSZskqc7yaKbMrROnsZzNyO189OeRUuXsGZQ2crsNm5755VbXV8baJ18+\n8W1WUXe09qNxeSv6vTuavlPSYYn5lkXTAISL/gwEpm7mfKOksyVtjH7fkJh+vplt0WjgyA8S5TIE\noEzGLE1n1hzjxhgL0J8rmKH/i8aKMuw+MubxbWdl0P7id496QE/KnEr1MUkvl/RcM9shab1Gnfh6\nMztH0kOSTo9m/3uNTrvYrtGpF2/uoM2oaVZL2XgK/RmT8pwrP6vRv1rYNj3zy5kBus8v6oXB2d3P\nzHjppJR5XdJ5TRsFoBv0Z2AYuHwnJM1mxpw3OEyanQuTYIRBYO0KfcBVUlGJu48MmuCMmcbxZ0jN\nAvMFSz4iSfrDR2f7YiSxsiO5+zzePARcWxsAgMAQnGfAoveelZsZzGJJG2hbnEEj36ZnfjnYrLmo\nXWXPdmkDwRkAgMBwzHnKkTEDCEWoGXNSKIPDCM6ARh0t74vMJMtZGK5pHByWVq6v2h+GEJRDQ1kb\nAIDAEJyByN5Lr+GUKbTigiUfGfQAsay2Fw0unSZ9Dw6jrD2lSp23yTFnoFMXLPnIYErcXXyZGHo5\nu8/jz2TOAAAEhsx5xlC2TXfZA0/owiMPkvTUPsqqPnBZz9nTpITZ9r2Pu5L3/17H0LPmvhGcAQCt\nISi3g7I2AACBIThPkaKRlJRiq8nbX7M0anUW0DcQGsraU6DuHXXiY6zS6JjrLEvuC8ymorEGKDat\nJe0yo7bb/oJH5gwAQGDInCFpYeY4K5k0GTPGFV3KFbNr0zO/XHjOc1sIzkh14ZEHTWWAJhijDI5B\np19ggy8tk0NZGwCAwJA5z6jkN+AyA8bKrCcW34ry4qt+u2bryiG7ASYnL2ue1oFgaSY1cJDMGQCA\nwJi7589gdqWkUyXtdvcXRtPeL+nVkn4i6duS3uzuj0evXSTpHEl7Jf2Ou/9DYSPM8huBXBwH6k/H\n2fs2d1/Z5grpzyir7OfK3kuv0RXvO6zj1oTjnIse3ve4ymms7m5VtlOmrH2VpD+XdHVi2s2SLnL3\nPWZ2maSLJF1oZkdLOkPSL0v6OUmfN7NfdPe9VRqFajg/c/IGXFK/SvRnoJJkQJ6UwrK2u2+V9NjY\ntM+5+57o6S2SlkWP10ja4u4/dvd/k7Rd0nEtthdAA/RnYBjaOOb8Fkk3RY+XSkp+xdgRTVvAzNaa\n2e1mdnsLbYBG2dyAM7rBmPJ9TH9GKcnPm3MueriX7LJvRZ+5TaqZjUZrm9l7JO2RdG3VZd19k6RN\n0Xo4RtUiLqLQjSkPyvRnNDYeoId8LLrvLxu1g7OZvUmjgSUn+VOjynZKSv41lkXTAASM/gyEpVZw\nNrPVki6QdKK7/yjx0o2SrjOzyzUaQHKUpH9p3EpUVnRVn7au+pOWTaZdWagLk6wOTHPWTH9GV9Ky\nz9Cy6b4z5CxlTqX6mKSXS3qupEclrddoNOfTJX0vmu0Wd/+taP73aHTcao+kd7j7TePrTNkGZTBg\noS5OpaI/o1DdO9010UbQ7ivQlrlVb+unUrn7mSmTr8iZ//cl/X6VRgCYDPozMAxcvhMAUEkXh5TW\nPtl8HYve23wdoeDynQAABIbgDABAYAjOAAAEhuAMANjPNJ86OElN9iPBGQCAwBSe5zyRRph9R9J/\nSvpu323J8FyF2bZQ2yWF27ZQ2yUtbNvh7v68vhpTl5k9Ien+vtuRYUh//1CE2i5pOG2r3JeDCM6S\nZGa3t33BhbaE2rZQ2yWF27ZQ2yWF3bYqQn4ftK26UNslTXfbKGsDABAYgjMAAIEJKThv6rsBOUJt\nW6jtksJtW6jtksJuWxUhvw/aVl2o7ZKmuG3BHHMGAAAjIWXOAABABGcAAILTe3A2s9Vmdr+ZbTez\ndT235TAz+5KZfcPMvm5mb4+mbzCznWZ2V/RzSk/te9DM7onacHs07RAzu9nMvhX9fvaE2/SCxH65\ny8x+aGbv6GufmdmVZrbbzO5NTEvdRzbyp9H/3tfM7Nge2vZ+M/vXaPufNrODo+nLzezJxP77cJdt\na0so/Zm+XLtd9Of67Wq3L7t7bz+SFkn6tqQjJR0g6W5JR/fYnkMlHRs9PkjSNyUdLWmDpN/tc19F\nbXpQ0nPHpv2hpHXR43WSLuv57/mIpMP72meSVkk6VtK9RftI0imSbpJkkl4i6dYe2varkhZHjy9L\ntG15cr4h/ITUn+nLrf096c/l29VqX+47cz5O0nZ3f8DdfyJpi6Q1fTXG3Xe5+x3R4yck3SdpaV/t\nKWmNpM3R482STuuxLSdJ+ra7P9RXA9x9q6THxiZn7aM1kq72kVskHWxmh06ybe7+OXffEz29RdKy\nrrY/AcH0Z/pyK+jPFdrVdl/uOzgvlfRw4vkOBdKBzGy5pGMk3RpNOj8qV1zZR7kp4pI+Z2bbzGxt\nNG2Ju++KHj8iaUk/TZMknSHpY4nnIewzKXsfhfb/9xaNvvnHjjCzO83sK2b2K301qoLQ9qck+nID\n9Of6GvflvoNzkMzsWZL+WtI73P2Hkj4k6eclvVjSLkl/3FPTXubux0o6WdJ5ZrYq+aKPaii9nBtn\nZgdIeo2kT0STQtln++lzH+Uxs/dI2iPp2mjSLknPd/djJL1L0nVm9tN9tW+o6Mv10J/ra6sv9x2c\nd0o6LPF8WTStN2b2NI0687Xu/ilJcvdH3X2vu89L+ohG5buJc/ed0e/dkj4dtePRuHQT/d7dR9s0\n+pC5w90fjdoYxD6LZO2jIP7/zOxNkk6V9Ibow0bu/mN3/170eJtGx3J/cdJtqyiI/RmjLzdCf66h\nzb7cd3C+TdJRZnZE9E3tDEk39tUYMzNJV0i6z90vT0xPHrd4raR7x5edQNt+yswOih9rNPjgXo32\n19nRbGdLumHSbYucqUQJLIR9lpC1j26U9MZolOdLJP0gUS6bCDNbLekCSa9x9x8lpj/PzBZFj4+U\ndJSkBybZthqC6c/05cbozxW13pe7Gs1W9kejEXbf1OjbxHt6bsvLNCqRfE3SXdHPKZKukXRPNP1G\nSYf20LYjNRr9erekr8f7StJzJH1B0rckfV7SIT207ackfU/SzySm9bLPNPpA2SXpvzU65nRO1j7S\naFTnX0T/e/dIWtlD27ZrdJws/n/7cDTvr0d/57sk3SHp1ZP+u9Z8j0H0Z/pyo/bRn+u1q9W+zOU7\nAQAITN9lbQAAMIbgDABAYAjOAAAEhuAMAEBgCM4AAASG4AwAQGAIzgAABOb/Axcbmm6Kh2yMAAAA\nAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8qYiiUqGWR1r", + "colab_type": "text" + }, + "source": [ + "### Visualise Instance Seg + Obj Det Results" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "PWwT_DbRT2ID", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 486 + }, + "outputId": "21794a6f-800b-4727-e899-41df4cb44ca3" + }, + "source": [ + "\n", + "# Visualise Results\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i)\n", + " image = np.array(img)[:, :, [2, 1, 0]]\n", + " result = vis_demo.run_on_opencv_image(image, objDet=\"True\")\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(result)\n", + "plt.show()" + ], + "execution_count": 52, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3X+0XXV55/H3k5ukoMUGDCukCTFg\nUYmu4VcIUCuwxDqBUoOrmoKdGhXNzBq0akdt0HYRZ1rB4ujYjtVGYQxTMAYrhlWVgoxgp0qAQJQf\nEYwYJFkhQSPVpY5wk2f+OPsk+56799n77B9nf/c5n9daWffcffaP5+7km+c+z/7ufczdERERkXDM\naDoAERERmUrJWUREJDBKziIiIoFRchYREQmMkrOIiEhglJxFREQCU1tyNrPlZvaImW03szV1HUdE\n6qWxLDJ8Vsd9zmY2ATwK/C6wE7gHuMTdH678YCJSG41lkWbUVTkvA7a7+2Pu/gywAVhR07FEpD4a\nyyINmFnTfhcAT8S+3wmckbbynOdN+PyjZ9UUikg7ffexX/3I3Y9uOIyBxjLAzLlzfNYL5uc+wEvt\nuQA85D9PfG/Q5V1J7+fZR9Uxld0ur34xl1l30jr/N5/EbL7NM33XPYnZB1/H1+0u77d92rajYP+O\nnRz40T4bZJu6knMmM1sNrAY4Zu5M1l+5qKlQRIJ0xh9+7/GmY8grPp5nLTqG4zdfl3vbe2eeDsBL\nJ+9JfG/Q5V1J7+fZR9Uxld0ur34xl1n3Rzavs83EIubt/2H//U4c+n88vm53eb/t07YdBfvOuHDg\nbepKzruAY2PfL4yWHeTu64B1ACe+8DA94FskTJljGaaO58NPOzH3eH4oIzG9dPIeHhogkbRRW36u\neft/yJ4cSTZtW4A9ORK8dNSVnO8BTjCz4+gM5IuBN9R0LBGpT+NjedDklbV+nmQ/yDEfilXr8dej\nbM9EcqcznsCLbNvv/XFTS3J290kzezvwz8AEcK27P1THsUSkPhrLIs2o7Zqzu38F+Epd+xeR4dBY\nzqct7ekyslrSWW3rfu8VbZmPKj0hTERkhIxLa33UKTmLiIgERslZRIL20MzTK60GXzp5z5RZ4GWP\nmWd/3X0OsnxQ3TiydGfIJ62b9XPvmViUOGGruzxP2ztt+bz9P1RLO6ax+5xFZDx9d+I9B19POOyf\neWff9Sf8nOhFfOmdU/aTvTzJnamJKPmY2cful9gm/JzU/XW3O3jcEvKc06T4X7L/IwdfZ93iVmZG\ntWZj56PKWUREJDC1fPDFoE584WGuJ4SJTHXGH35vi7svbTqOQR1+2one7wlh3514z5QqLRT5K+5q\nhHIOuj93WjzdJ4RJcfvOuJBn7/1OOx7fKSIybMNOwP2kxRJK0pZmqa0tIiISGFXOIjLyQqqYs2S1\nmWU8KDmLyEhrU2KOi8etRD1+1NYWEREJjCpnERlJRSvmi9+2prIYNnz6qin7jX8/CLW6x48qZxER\nkcCochaRkVHm+nKVFXPaPuPfF6midR16fCg5i0jr5U3KdSTgotJiyZu0Q32Yi1RDbW0REZHAqHIW\nkVYapIUdUsWcpRtrngpaE8VGl5KziLTOqCbmuEGuT6vFPXrU1hYREQmMKmcRaY1xqJiT5Gl1q8U9\nWpScRaQV8iTmUUrISfK0utXiHg1qa4uIiASmcHI2s2PN7Otm9rCZPWRm74yWH2Vmt5nZ96KvR1YX\nrojUIeTx/N2J92RWzRe/bc3IV829+v28ec6ZhK1M5TwJ/Bd3XwKcCVxmZkuANcDt7n4CcHv0vYiE\nrbXjedySclzWLyVK0u1VODm7+253vy96/TNgG7AAWAGsj1ZbD1xUNkgRqZfGs0hYKrnmbGaLgVOA\nzcA8d98dvfUkMK+KY4jIcIQ0nvtVfePYyk6TdR5UPbdP6dnaZvbrwD8C73L3n5rZwffc3c3MU7Zb\nDawGOGauJo2LhKCK8Txr0TGl48hzjVlklJWqnM1sFp2BfL27fzFavMfM5kfvzwf2Jm3r7uvcfam7\nL53zvIkyYYhIBaoazxNz5wwnYJERVrhktc6v1NcA29z9o7G3bgZWAVdFXzeVilBEahfKeFbFXFzW\ng0r0kJJ2KdNPfjnwx8ADZrY1WvZ+OoN4o5ldCjwOrCwXoogMgcazSEAKJ2d3/7+Apbx9XtH9isjw\naTyPjovftibzMZ+qnsOnJ4SJiIgERslZREQkMErOIhIETQYTOUQ3GItIo5SURaZT5SwiIhIYJWcR\nEZHAKDmLiIwYfVpV+yk5i4iIBEbJWURkROWpoCVMSs4iIiKBUXIWEREJjJKziIhIYJScRaQxegCJ\nSDIlZxERkcAoOYuIiARGyVlERCQw+uALERk6XWsW6U+Vs4iISGCUnEVERAKj5CwiIhIYJWcREZHA\nKDmLiIgEpnRyNrMJM7vfzP4p+v44M9tsZtvN7PNmNrt8mNmWrXyUZSsfHcahREZWKONZZNxVcSvV\nO4FtwPOi7z8MfMzdN5jZp4BLgU9WcJxclq18lLs3vmhYh2vsmEX0++WlbPxp+86z3zrjkoEFNZ5F\nxlWpytnMFgK/B3wm+t6AVwJfiFZZD1xU5hgiMhwazyLhKFs5/w/gfcAR0ffPB55298no+53AgqQN\nzWw1sBrgmLnlC3hVWOm6lWnaOSpzOSDvvvu93+/vri1diRFRyXieteiYmsMUGX2Fs6KZXQjsdfct\nZnbuoNu7+zpgHcCJLzzMi8Yh5d298UWZSTRJmcRZ5HhSnyrH8+GnnajxLFJSmZL15cBrzOwC4DA6\n16g+Dswxs5nRb9sLgV3lwxSRmmk8iwSkcHJ298uBywGi37Tf4+5/ZGY3Aq8DNgCrgE1FjzHIJKO0\nSixpeXy/g7R6y1R5g+yvyqoylLawquywDWM8i0h+ddzn/GfAn5rZdjrXrK6p4RgiMhwazyINqORT\nqdz9DuCO6PVjwLKy+0yrtOqcvJR27LL3T6ftT5OhJER1jOdeL9n/ESD906k2fPoqQJ9OJeMruI+M\nzJNA65q8lLbf7veDJss8+0vSloRcdCJZfH39AiIiMp0e3ykiIhKY4CrnLEWrrCoq8SJVc+/rUZXV\nBeh3DrKeEDYO529cvWT/R1Jb29Bpb6u1LeOodcm5TcahXVv0UsEg24/DeRQRiVNbW0REJDCqnMeA\n7hMWGU/dWe9psi4rSHNamZyrTja910WLPge6d59F91flz5fnmm2ZW7r6xZp3hnzd50Bk3ORJyhI2\ntbVFREQCE0Tl/N0jZ3HW6+d3vvFzANjfp9qbiNahuw2wP/p6VmxZv+VJuuumVZoTfs6UY8a3S9p/\nmf11tzv4s5aR45xmGcYnW4mISEcQybnrWzfuPvj6btLbmd9KWNZdv/e9tOVJybRoIkzbLml5/GdM\n+jlg6s+etk4RE68/Z8rx8+rXWi7bdlbbWkRkOrW1RUREAhNU5VynPG3tYeiNo0glKzJK9JxtkelG\nMjmHkojz6MaqJC3jTk8Lq0bVM7Xn+p4y4QjwU3924G3U1hYREQnMyFXObaqa41RBi0gZurd5tKhy\nFhERCczIVM5lKuarbnprhZF0rHntZwptF/85VEXLuNHkMJGOVifnO2f+ReEkWEdCzrP/QeI96/Xz\nlaBFEmhy2FRqaY8etbVFREQC08rK+c6ZfwEMVoXWXSnn1RtH1s+gNreMo6z2NqjFDaqYR1nrknMn\nWfVPaKEk4jy6seb5RUMzumXc5PlIw3FtcfdLzErK7ae2toiISGBKVc5mNodOGfsywIG3AI8AnwcW\nAzuAle7+k1JRkn82dpuq5rhBK2hVz1K1YY7nQajFPZVa2eOhbOX8ceAWd38JcBKwDVgD3O7uJwC3\nR9+XkicxX3XTW1ubmOO6P0fWz3LW6+e39oErEqyhjOei8iSdrMTVdlmtbCXm0VE4OZvZbwBnA9cA\nuPsz7v40sAJYH622HriobJAiUi+NZ5GwlGlrHwc8BfwvMzsJ2AK8E5jn7t2e65PAvKSNzWw1sBqA\nRb+WeIC8FfOouuqmt+aaza0Wt1SgsvE8a9ExtQU5SIu7q+2t7jzdAFXMo6dMcp4JnAq8w903m9nH\n6Wl5ububmSdt7O7rgHUAtvSIaeuMe2LuGuRatEgJlY3nw087MXGdKuWZxd3Vttncg7TmlZRHV5lr\nzjuBne6+Ofr+C3QG9x4zmw8Qfd1bLkQRGQKNZ5GAFK6c3f1JM3vCzF7s7o8A5wEPR39WAVdFXzcN\nst+sinkcquUk/Vrcuv9ZyqprPNcpXjXmuRc6SQgVddFJbKqaR1vZh5C8A7jezGYDjwFvplONbzSz\nS4HHgZV5d9YvMY9rUo7LanErSUtJlY7nYcpzLTpJPDEOM1GXmVWupDweSiVnd98KLE1467wy+xWR\n4dN4FglHKx7fqap5qqxZ3JrBLeNqkIlivZKq2Sqr6SruwVbVPD70+E4REZHAtKJyFhHJq7e6LFpJ\nQxhPHFO1PJ6UnFtKk8NE8inT6m6CkrGA2toiIiLBUeXccpocJpItrRoNoaJWpSxJlJxFZGwN8iCT\nuo4rkkRtbRERkcCocpZaLVv5aOLyuze+qNLtu+tl7XfZykdzrZPnmEn769027Vh51xtVIbSTmzTu\nP/8oeGjm6anvvXTyntL7V3KW2vRLhFnJNOn9tKRZlbLHHCTx9x4j7y8Xo0AtXWmzblJOS8D9kvYg\n1NYWEREJjCrnEZD3nuc26FdB3r3xRbVVz2lVb3dZv+OWbanX+XOJyHC9dPKezOo6j6CTs56pPZis\nJN2EpKRz98YX5bp+OwxVtJMHuYatJCyDSmuT5vmPf5BtH5p5euY+8ySdQa7FJu0vvv0greOsdau4\nDpznHFVFbW0REZHABF05S7vlmak8DhOgusbpZ5Xy8kw8GqRarGqiUj+9MQ1yzLwVbtoxsqraYVa9\nVVBylqHTNVaR+qQloe6yOpJ0WmLNe8wyLfWsY7QpIceprS0iIhIYVc5Sm0EneMUr6qpbwFkPFqlz\nMtqMtYsBOLB2R+H7vmV89VaE3Uowz+SqYaiiXTzIBLNhtOdDoMpZREQkMKqcpTJnPvzMlO8PkH3r\nUNI2FNiOWHWaJF69Ttu2YKzd7ZKWp0k7Rjc+ka48txG19XpqEeP0s4KSs9TgriWzgWIJJ2ubtOSb\nd/u094smx0G2UwKWKtQ5sUuyDevygdraIiIigSlVOZvZu4G3Ag48ALwZmA9sAJ4PbAH+2N3z9/2k\n9QZp80o4NJ7DsO0vXw8c6hKldlzW7ji4LhyqtA6s3dG/S9OzXeby2H57Y+weN0+svdv1XR4T/7lg\n8Pu+q6x044/mTFPV/dSFk7OZLQD+BFji7r80s43AxcAFwMfcfYOZfQq4FPhk6Uglt6tuemvmc7a/\ndePuYYYkgdN4DkNSksq6lJOkyDYz1i7uu13W+0WPO6ii9zNXlTSHdVmhbFt7JnC4mc0EngPsBl4J\nfCF6fz1wUcljiMhwaDyLBKJw5ezuu8zsI8APgV8Ct9Jpez3t7pPRajuBBUnbm9lqYDUAi36taBgS\nCLWy0/3Lq+cA8Ipbn244knRVjudZi46pP+ARk1Qx1zVJMas6LrLvYU+oPPHPb8y9XV0Tt+qeEFam\nrX0ksAI4DngauBFYnnd7d18HrAOwpUd40TikWUWScjdZQSdhxb/P8q/fPHnaPtqiX8xNJ+4qx/Ph\np52o8SxSUpm29quAH7j7U+7+LPBF4OXAnKgtBrAQ2FUyRhGpn8azSEDKzNb+IXCmmT2HThvsPOBe\n4OvA6+jM8FwFbCqyc32Wczt072nOqqDTqsY2VsAjqtbxLCKDKXPNebOZfQG4D5gE7qfT1voysMHM\n/jJadk0VgYpIfTSew/YP394W9P6q9h9OOrHpEBpX6j5nd78CuKJn8WPAsjL7FZHh03gWCYce3yml\nFG1ni0gzbnjtL5sOQXLQ4ztFREQCo+QsIiISGLW1R1Daozuh+sd2Zs3W7t6/q/a2iEh+qpxFREQC\no+QsIiISGLW1pRTN1hYRqZ4qZxERkcAoOYuIiARGyVlKuWvJ7IMztpO84tanG//EJRGRtlFyFhER\nCYwmhI2Qfvc3i4hIewRbOSvRtMOZDz/Td8b2v7x6jmZsi4gMKNjkLCIiMq7U1h4BWV2G7iM7z3r9\n/MqPrcd3iohUT5WziIhIYJScRUREAhN0cl7z2s9oYliGvC1tERFpj6CTs4iIyDhSchYREQlMK2Zr\nr3ntZ7jqprc2HUZQ1M4WERldqpxFREQCk5mczexaM9trZg/Glh1lZreZ2feir0dGy83M/sbMtpvZ\nd8zs1EGC6VftaXLYIaqapahhjmcRKS5P5fxZYHnPsjXA7e5+AnB79D3A+cAJ0Z/VwCcHDehbN+7O\nTNLjKusXlKxzJ8KQx7OIFJOZnN39G8C+nsUrgPXR6/XARbHl13nHXcAcM6v+sVQiUojGs0g7FL3m\nPM/duyXak8C86PUC4InYejujZdOY2Wozu9fM7uWpZ6e9rxb3VKqYpUaVjuf9P9Lnd4uUVXpCmLs7\n4AW2W+fuS919KUfPSlxHLe7x/EVEmlPFeJ6Yq+eoi5RVNDnv6ba3oq97o+W7gGNj6y2MlolIuDSe\nRQJTNDnfDKyKXq8CNsWWvzGa5Xkm8G+xdlktRrmyzPNzqZ0tFQhmPItIR+ZDSMzsc8C5wFwz2wlc\nAVwFbDSzS4HHgZXR6l8BLgC2A78A3lxFkN+6cXfmxx3GE1mbH1iS9xeNUJJy2kdFdumjIsMSwngW\nkWyZydndL0l567yEdR24rGxQIlIPjWeRdmjF4zvhUKWYVUHDoeqzTRV0W1vYdy2ZDaRX0K+4tTNz\nVxW0iEh+rUnOXfEENUiru1dTibvo9fEQE7OIiNRDz9YWEREJTOsq57g8E8XSDGMCWRWzyFUxi4iM\nH1XOIiIigWl15QyDTRRLU8W16TrutVbVLCIynlqfnLsGmSg2iGE/4EQJWURE1NYWEREJzMhUznFV\ntLqHTRWziIh0jWRy7uqX8JpK3ErCIiKSRW1tERGRwIx05dxPXRPI0o4hIiKSV1DJuU3XiPMYtZ9H\nRESGQ21tERGRwARROb/kJ8+yXi1gERERQJWziIhIcJScRUREAqPkLCIiEhglZxERkcAoOYuIiAQm\niNna0l5nPvxM3/f/5dVzhhSJiMjoUOUsIiISmMzkbGbXmtleM3swtuxqM/uumX3HzG4yszmx9y43\ns+1m9oiZ/fu6AheRwWk8i7RDnsr5s8DynmW3AS9z938HPApcDmBmS4CLgZdG2/ydmU1UFq0E564l\ns7lryezU919x69O84tanhxiRZPgsGs8iwctMzu7+DWBfz7Jb3X0y+vYuYGH0egWwwd1/5e4/ALYD\nyyqMV0RK0HgWaYcqJoS9Bfh89HoBncHdtTNaNo2ZrQZWAxwzV/PS2koTwkZO6fE8a9ExdcYnMhZK\nTQgzsw8Ak8D1g27r7uvcfam7L53zPHXKRJpW1XiemKtfyETKKlyymtmbgAuB89zdo8W7gGNjqy2M\nlolIwDSeRcJSKDmb2XLgfcA57v6L2Fs3AzeY2UeB3wROAO4uHaWI1EbjOXz/4aQTmw5BhiwzOZvZ\n54BzgblmthO4gs5szl8DbjMzgLvc/T+5+0NmthF4mE577DJ3319X8NK87kzttGvP3ZnauvYcBo1n\nkXbITM7ufknC4mv6rP9XwF+VCUpE6qHxLNIOmiYtpWi2tohI9ZScpVZ6AIlIthP//EYAtv3l6xuO\nJHzdczXq9GxtERGRwCg5SylZj+8UkfzGpSos4sQ/v3Gszo/a2lKJu5bMzrz+LCLZxikBSTpVziIi\nIoGxQw8DajAIs6eAnwM/ajqWFHMJM7ZQ44JwYws1Lpge2wvc/eimginKzH4GPNJ0HCna9PcfilDj\ngvbENvBYDiI5A5jZve6+tOk4koQaW6hxQbixhRoXhB3bIEL+ORTb4EKNC0Y7NrW1RUREAqPkLCIi\nEpiQkvO6pgPoI9TYQo0Lwo0t1Lgg7NgGEfLPodgGF2pcMMKxBXPNWURERDpCqpxFRESEAJKzmS03\ns0fMbLuZrWk4lmPN7Otm9rCZPWRm74yWrzWzXWa2NfpzQUPx7TCzB6IY7o2WHWVmt5nZ96KvRw45\nphfHzstWM/upmb2rqXNmZtea2V4zezC2LPEcWcffRP/2vmNmpzYQ29Vm9t3o+DeZ2Zxo+WIz+2Xs\n/H2qztiqEsp41lguHJfGc/G4qh3L7t7YH2AC+D5wPDAb+DawpMF45gOnRq+PAB4FlgBrgfc0ea6i\nmHYAc3uW/TWwJnq9Bvhww3+fTwIvaOqcAWcDpwIPZp0j4ALgq4ABZwKbG4jt1cDM6PWHY7Etjq/X\nhj8hjWeN5cr+PjWe88dV6VhuunJeBmx398fc/RlgA7CiqWDcfbe73xe9/hmwDVjQVDw5rQDWR6/X\nAxc1GMt5wPfd/fGmAnD3bwD7ehannaMVwHXecRcwx8zmDzM2d7/V3Sejb+8CFtZ1/CEIZjxrLFdC\n43mAuKoey00n5wXAE7HvdxLIADKzxcApwOZo0dujdsW1TbSbIg7camZbzGx1tGyeu++OXj8JzGsm\nNAAuBj4X+z6Ecwbp5yi0f39vofObf9dxZna/md1pZq9oKqgBhHY+AY3lEjSeiys9lptOzkEys18H\n/hF4l7v/FPgk8ELgZGA38N8bCu133P1U4HzgMjM7O/6md3oojUy/N7PZwGuA7lP7QzlnUzR5jvox\nsw8Ak8D10aLdwCJ3PwX4U+AGM3teU/G1lcZyMRrPxVU1lptOzruAY2PfL4yWNcbMZtEZzNe7+xcB\n3H2Pu+939wPAp+m074bO3XdFX/cCN0Vx7Om2bqKve5uIjc5/Mve5+54oxiDOWSTtHAXx78/M3gRc\nCPxR9J8N7v4rd/9x9HoLnWu5Lxp2bAMK4nx2aSyXovFcQJVjuenkfA9wgpkdF/2mdjFwc1PBmJkB\n1wDb3P2jseXx6xavBR7s3XYIsT3XzI7ovqYz+eBBOudrVbTaKmDTsGOLXEKsBRbCOYtJO0c3A2+M\nZnmeCfxbrF02FGa2HHgf8Bp3/0Vs+dFmNhG9Ph44AXhsmLEVEMx41lguTeN5QJWP5bpms+X9Q2eG\n3aN0fpv4QMOx/A6dFsl3gK3RnwuA/w08EC2/GZjfQGzH05n9+m3goe65Ap4P3A58D/gacFQDsT0X\n+DHwG7FljZwzOv+h7AaepXPN6dK0c0RnVucnon97DwBLG4htO53rZN1/b5+K1v2D6O95K3Af8PvD\n/nst+DMGMZ41lkvFp/FcLK5Kx7KeECYiIhKYptvaIiIi0kPJWUREJDBKziIiIoFRchYREQmMkrOI\niEhglJxFREQCo+QsIiISGCVnERGRwCg5i4iIBEbJWUREJDBKziIiIoFRchYREQmMkrOIiEhglJxF\nREQCo+QsIiISGCVnERGRwCg5i4iIBEbJWUREJDBKziIiIoFRchYREQmMkrOIiEhgakvOZrbczB4x\ns+1mtqau44hIvTSWRYbP3L36nZpNAI8CvwvsBO4BLnH3hys/mIjURmNZpBkza9rvMmC7uz8GYGYb\ngBVA4oCec8Rc/82jF9cUyuh7zlGdr7/Y12wcoWjD+XjOUdnxbfvBlh+5+9HDiSjVQGMZwGY/33nO\nwiGF1zK/9Vji4tN48bRlW3ik77q976ctzxI/dta2eY5R9f6K6j2n046x/fjp2/zWLLZsfzbf/n9r\n1qF9Z23zi534Mz+2XDuO1JWcFwBPxL7fCZwRX8HMVgOrAY6Zu4h/+NA9NYUy+k69uPN3ft+G6rsg\nbdSG83HqxZYZ32mXzHh8SOH0kzmWYep45vCFzDzn1qEE1zYHvrQycfm9due0ZRN+Tt91e99PW54l\nfuysbfMco+r9FdV7TnuPMeOijdO32TSPWSv25Nv/pnkHX2dtM3nnq3PtM66u5JzJ3dcB6wCWHL80\n3P9FRSRTfDzbnJM0njP0JoZZTP/PfQbTk0d83fj75y65gYkPFUtwgyTGPOtWvb9Xvv8iAO54+A25\n9wvTz2n8fKX9kpQ3MQ+6bhF1JeddwLGx7xdGy6Qi3eowbVm8KkuqJAdZN8+x07bJOnbWsYoet479\ntaEir4HGcsC6Seb/NBxHnf7Ph74Uvep8Tap4R1Fds7XvAU4ws+PMbDZwMXBzTccSkfpoLIs0oJbK\n2d0nzeztwD8DE8C17v5QHccaV4NWu0XWTdu+XxU86LHzbF90u0H2l7XPMsdrM43lcKW1ZgfVbRtX\n7VDFW60DX1o5FtVzbdec3f0rwFfq2r8MLs8kpH7bwvSE1v0+K3n1O3a/XxjStrtvgxdKmHnOQVI8\nY9bKnkJjOSxFk3JdSbjo8cok7+45GOUkrSeEiYiIBKax2doyfFW1sqs8dloFXOfkq3FtUUv75ama\nh10hF5UnzqzqepRb3ErOMnbGuUUt7TRKSXkQr3z/RbkSNIxei1ttbRERkcCochYRCVi/qnkUq+Ve\n3Z9x3FrcSs6SKX5deFgt4fgs8GG3obNutaoinn7n9OA18UtKH0ZaLKuVPQ6JOS5Pkh6lFrfa2iIi\nIoFR5dxCy7atmvL9JNcBU2chz7zijdPe790uSdq6ScfIiivvsfutM8l1mbOr02JNPOYVMPnB6/ru\nr99jSOug2eMSp4q5v7yTxNpePdfyec6DWnL8UtenUuW3bNsq7j5xfdNhNK5smzn083jaJTO2uPvS\npuMYlM05yfWpVMnytF2VnPPrl6SzzvEwk/fkna/Gn/72QL+Fq60tIiISGCVnqc2+f/1y0yGItMq5\nS25oOoRW6ddFOPCllZU9f7wJSs4iIiKB0YQwqVy8Yu6+Purlv1d4f0mfPa1JVDKKsiY66XrzdFm3\nWLX19iolZ6nc8qOmD4K7KZ6c4wl52PdbiwyDJoFJL7W1RUREAqPKWSrT717mKm5bUrUsIuNClbNU\nIs8DTpZtW5VrPZFxopa2JFFyFhERCYza2lJKkUo49CdziQyDKubhOnfJDdzx8BuaDiM3Vc4iItJ6\nWb/MZN2mFholZxERkcAoOUthWS3tW/alt+00OUxEJF3ha85mdixwHTAPcGCdu3/czI4CPg8sBnYA\nK939J+VDlVCUScoSJo1nGQUJ0HGYAAAbDElEQVR5nxbWBmUq50ngv7j7EuBM4DIzWwKsAW539xOA\n26PvRSRsGs8iASmcnN19t7vfF73+GbANWACsALpTcdcDmnIoEjiNZ5GwVHIrlZktBk4BNgPz3H13\n9NaTdNpkSdusBlYDHDN3URVhyBD0a2kntbO7y5Ket93dn26rCkvZ8czhC2uPse36tVd1C5VABRPC\nzOzXgX8E3uXuP42/5+5O5/rVNO6+zt2XuvvSI484umwYIlKBKsYzs48aQqQio61UcjazWXQG8vXu\n/sVo8R4zmx+9Px/YWy5EGQWauR0+jWeRcBROzmZmwDXANnf/aOytm4Hu/7SrgE3FwxORYdB4FglL\nmWvOLwf+GHjAzLZGy94PXAVsNLNLgceB9sxdl1RV3D6l689B03gWCUjh5Ozu/xewlLfPK7pfERk+\njWeRsOgJYSIiIoFRcpZMWbdPDfpEME0OExHpTx8ZKan0mE4RkWaochYREQmMkrM0Iqsdrva2iBQ1\nCk9ZU3KWRFVfZ+63r6JxiIgkSftUqjZRchYREQmMJoTJFJoEJtKcUWjHSjVUOUvjdP1ZpGMU2rFS\nDSVnERGRwKitLYDa2SIiIVHlLMHQzG0RkQ4lZxERkcAoOUtQLW1NDhMRUXIWEREJjpKziIhIYDRb\ne4yF1M5OO/byozYmvr9s2yruPnH9MEMSqcyMizZy4EvJ46t7r7MeSDLeVDmLiIgERpWziIiMhKwn\nrM24qNOJS+tahESV85gKuaUdp5nbIpJH3sTcFkrOIiIigSmdnM1swszuN7N/ir4/zsw2m9l2M/u8\nmc0uH6aIDIPGs0gYqqic3wlsi33/YeBj7v5bwE+ASys4hlQkqxWc9RCQJuR5MIlURuNZJAClkrOZ\nLQR+D/hM9L0BrwS+EK2yHtD9ACItoPEsEo6ys7X/B/A+4Ijo++cDT7v7ZPT9TmBByWNIRbIqZhl7\nGs812293AjCLPZkzh3W/cz6jNhGsq3ByNrMLgb3uvsXMzi2w/WpgNcAxcxcVDUNyaMvM7Cy37FvZ\n96EkgB5MUlCV45nDF1YbXACe3TSPWSv2lN5Hkn4PJIFO8gktQd925bv53cs/1nQYfXV/EWJT0rt3\nMot6/j6B0v9WoFzl/HLgNWZ2AXAY8Dzg48AcM5sZ/ba9ENiVtLG7rwPWASw5fqmXiENEyqtsPNuc\nkzSeRUoqnJzd/XLgcoDoN+33uPsfmdmNwOuADcAqUn5vESlCj/Wsh8Zz/Wat2HOwQp7B1H+/anEX\nk9bS3m939q1eD3xpJfs3RZcYBqxyuxVz2nbPbpqXuU4eddzn/GfAn5rZdjrXrK6p4RgiMhwazyIN\nqOTxne5+B3BH9PoxYFkV+5XyRnUSmK4/10fjuTltvP7chKxJYE2atWJP3+vReenZ2iIiPZL+c40v\ni7crk1qYaevGZ2unOTiRKcGEnzPl+9uufDfAlMlZ3WVxeSZvJW2Xd9sq9pf0syTpl5gP/uJyZY4A\nA6fHd4qIiARGlfOIGpXbp0Sa0DuRJ++tVGUnAsWP021v91bSaZPD8lbRvYpuN8j+svab53h5Wtl3\nPPyGzHV6VXGbXB2UnEfQqF5n7pVn5jbo2rMMR5n/5Psl9Qk/J7HV3UlWnaSWdd9x2vtpy3/38o8V\nStB57n9OSt79tsl7fXnQh410z3UV14froLa2iIhIYFQ5j5BxbWX3m7kNuvdZhqOKVnZc1r3PcWmV\nZ3d5bxWcd/JVHmn7LrJtVzeueNXc7R70ToqLV8x5q+D9dueUO/bTOhf9Ohp5jtVdZ+nSWbniilNy\nlpGgFreMqhkXbcx89EsTt1ilXauO/0IwSKs9nqjramV3k2XapYI822f9EtY7U79o21xtbRERkcCo\nchYRGQHxajPkB5WktdS77eo8FW2RT5qaUvXGCvM81W2eWfjxdXqr58ntzw4cr5LzCBjXa81JdP1Z\n2ib+n3lWyzTrCWJd09vCU6/t5m095zHIw0b6ta7ztpnzJOakhNv7MJhB29O9y9MeOpP0usg1Z7W1\nRUREAqPKueVUNYtUy09+b8LS66ZURDOveOOU99K3m76f5HU7y7Paq37ye7G1Lzi04IM5DhlJnmT1\n7szZ1V+ffX/i8qnn4JDJK6/ru7/e2dbQv2qu6pGceSdmVbFeb1WutvYYaSopv+pvL+Rr7/inWvZd\nlbwzt0XS2Narp3zf+5+tcfW09+LL0nQT04ytU/9t5r0Nq/cYSddQ+x0373I4lDB97ePJ263tf6z9\ndue0/acl4fh68XVmXLSx8wksBfQ+ZCR+jg98aeW0j+3s3S6+bdr78fWqfsqY2toiIiKBUeU8gnqr\n5lf97YUApSre7j7aJK2Cnvxgp+2W1pYTaZu8E8WqlPZQkH7i9xf3267obOwp8Z1yBgCzVmw++P6M\ntYsBMF7Qs96Og68n7t88bXmWQSb15aXKWUREJDCqnCWX0K8zj5JTL7bOi0uajUPaJV5tDlJFx6/x\nDnKvcdL2vfyD069Xd69V99uuW/XGzYi2O7BpR+I2M9Yu5sDaHdO2P/R6x8H3gdhT16buL+nY8WXd\nyjpN0nVqPb5TJEG/e5/1WE8ZBfHZ37b16sS2cFrCjifkQVrV3RbxoLK2m5JAB9y+6HtVbNOvnV1k\ntrba2iIiIoFR5dwig9w+lTSBK74s3qZOuj1qkHX7HScuabve/SVtm6el3m/CWtGW/MH2co/7Nnjq\n+mnvicRVNXlryj3PET/5vYm3P2UZZGJXYrtaKqXk3BL9EnPSPc1JCW+QJFckoSVtO8gs76xfEvJu\nl7RNnnufu63tblLuTbRpyTptuUivIrOQ0/R76Ek3affer11W0vXYcdM9B1nXnstSW1tERCQwpSpn\nM5sDfAZ4GeDAW4BHgM8Di+lMg1vp7j8pFaXUruy90Gnt7u6yfhVw1rHT3k9bnueYSdIq5qT14uuM\nSitb47kd8j0mdOq6VVTQoVfNW77625x2/jenfA/kXtYVf69JZdvaHwducffXmdls4DnA+4Hb3f0q\nM1sDrAH+rORxpGZlknKZ7bO2/do7/in1+nldt3eNcZta4zlwgyTmYeomuNOXz+SeWyZL7ev+r76B\nLfx29op94iiyrPe9ppN04ba2mf0GcDZwDYC7P+PuTwMrgO59KeuBcD9YVEQAjWeR0JSpnI8DngL+\nl5mdBGwB3gnMc/fd0TpPAvk+4kMkEKPSph6QxnPAsirmKy87jMs/8f/6bl/15DCpV5kJYTOBU4FP\nuvspwM/ptLwOcnenc+1qGjNbbWb3mtm9P/nZUyXCGG37/vXL7PvXLzcdhoy+ysYzz+yrPViRUVcm\nOe8Edrp7dz75F+gM7j1mNh8g+ro3aWN3X+fuS9196ZFHHF0iDBGpQGXjmdlHDSVgkVFWuK3t7k+a\n2RNm9mJ3fwQ4D3g4+rMKuCr6uqnPbqQCaffudk1yYd/1Jrkwcx+p617ReT35t9f1/ZSntGPkOXZi\n/FdsZPJv+3+6VNbPXVTaw0aqeghJE211jef2u/KywwBS29tVztyW+pWdrf0O4PpoZudjwJvpVOMb\nzexS4HFguJ9jNkKqbmd3PyoR6vm4xPj+h6mq43bPSdb+8j6cpIU0ngPU73pzNyH3LtP15/YrlZzd\nfSuwNOGt88rsV0SGT+NZJBx6fOcYyKqSB6mi09YtWonn2a7oMdMq4LTtuo/47N4/fdTLf2/K+2mf\nYDWms7ulZnlmaMvo0uM7RUREAqPkHKCk26eSPtxCqpN0fnULm7TVlZcd1rey9pPfG+zTxqRDbe0A\n9bZTu+6mszzroyPHTe9Et0EmiPW2qEWapna2gCpnERGR4KhybqGsai/+2cShSqv+P/SmvwPgVZuf\nm3tf923wg7czTX7wumkTtEI/FyJ1yXPvs26rCpOSswxdv7b8+z/7nwF41YAJVTOmZRTU1dLud++z\nHk4SJrW1RUREAqPKWYZmkIlsafcUi4iMAyVnCVobrp+LlDWMGdq6/twuamuLiIgERslZhkL3ZouI\n5Ke2ttQqKyl3b52CQzO10/ah9raMIj10RJKochYREQmMkrMEI15FJ1FrXEZJ1vOts56PXZSeud0O\namtLbfol06xELCKj7ZTNN3D/GW9oOoxgqXIWEREJjCpnCUq3otbkMBllWe3suhW95/nA2h0AzFi7\nuPCxT9l8Q+FtQ/PspnkAzFqxp/J9q3IWEREJjCpnqdwgt09lrdOvglb1LNI+us6cj5KziMiQ6J5m\nyUttbRERkcCUqpzN7N3AWwEHHgDeDMwHNgDPB7YAf+zuz5SMU1qginZ20jaaHDYcGs/jJ8/nPE9+\n8LpoyY6D73UnhsHUyWGTH7yO+2851LbunfwVb2nnuZUqbfJY0na9+0vaNk9LvbvdgYT3ikyEe3bT\nPJYunTXwdoWTs5ktAP4EWOLuvzSzjcDFwAXAx9x9g5l9CrgU+GTR44hI/TSe69f0DO2sY6cl6ZlX\nvBGAA/aNgWZrl7mPuZsgs5LtIMfOs318u9PO/+aUX0KKeHbTPGat2MPk9mcH3rZsW3smcLiZzQSe\nA+wGXgl8IXp/PXBRyWOIyHBoPIsEonDl7O67zOwjwA+BXwK30ml7Pe3uk9FqO4EFSdub2WpgNcAx\ncxcVDUMCUUdLu3dbzdyuT5XjmcMX1h5vm4zbJLCkqnfQ7ZO27S7rVwFnHTvt/bTl3Q7BoBV0Ffc/\nl2lrHwmsAI4DngZuBJbn3d7d1wHrAJYcv9SLxiHNqjMpJ+1L15/rUeV4tjknaTy3UL/rz4OospVd\n5bHvP+MNiYm9zseIdpN0kWvOZdrarwJ+4O5PufuzwBeBlwNzorYYwEJgV4ljiMhwaDyLBKTMbO0f\nAmea2XPotMHOA+4Fvg68js4Mz1XAprJBikjtNJ5rEOoksDRZk8NkMN22dpEJYWWuOW82sy8A9wGT\nwP102lpfBjaY2V9Gy64pegwJ2zBb2lIvjedqjdt1Zqleqfuc3f0K4IqexY8By8rsV0SGT+NZJBx6\nfKcMrMmKWTO3RepX1eSwQcRnYw/z+dvxiWIhPfdbyVlaSTO3RZrT+4Sw05d3UskkHHydJW3dSbIf\nGJK2Xdaxu/cE9q7XXR4/btITwuI6ly6ui70+ZNaKzsdtNjVbW0RERGqgylkG0q+lrQlgMu5GfSJY\n2mM777mlU3vO5I0HX2e5/5Y3cE/K8ixp2yUtT9p373pJxzzt/G8efJ30EBLbevXB2djG1QeXx/8N\nlJmtrcpZREQkMErOMrKWbVuVOXlNpCqjXjXLcKmtLbmEeE9z1sztLs3gFpE8pkxEW7u49KdSlaHK\nWUREJDCqnKX1+t1WJVI3tbNHR/ye5wNrd+T63Oq6KDlLpjbM0M7zcBLQvc9SLSXm0dN9EEl8tnYT\n1NYWEREJjJKzpMqa7RxK1RyXFZNmb4tIG6itLSIyoHFqZx+wb/R9f8tXf3tIkYwXVc4iIiKBUeUs\nIjKAflXzKFXMXTP8bCC9gj7t/G/WVj1XOSlry1d/u/FJXoNQcpZEbbvWHKeZ2yLDFU96p53fYCB9\ntCkxg9raIiIiwVHlLFOE+JjOovJU0KqeJa9xmgSWZIafnTk5bBxM3L95KMdRchYRkVyyrj+X3W9T\nQvylQ21tERGRwKhyFmC02tm9+j17W5PDJI9xm6Ed1/35Lv/E/zu4rIoWd9PVclxdHYEyVDmLiIgE\nJrNyNrNrgQuBve7+smjZUcDngcXADmClu//EzAz4OHAB8AvgTe5+Xz2hi+SnyWEdGs9SlaLVZkgV\nc68Zfja29eqmwwDytbU/C/xP4LrYsjXA7e5+lZmtib7/M+B84ITozxnAJ6OvErBRbmnLNJ9F4zm3\ncZ+hnccMP3tK6zvpnLz/bf8NyD6fIejG2HSSzmxru/s3gH09i1cA3TJjPXBRbPl13nEXMMfM5lcV\nrIiUo/Es0g5FJ4TNc/fd0esngXnR6wXAE7H1dkbLdtPDzFYDqwGOmbuoYBgiUoFKxzOHL6wtUGnO\nlZcdNmVSWFza8q42VMyhKT1b293dzLzAduuAdQBLjl868PZSTp6PThzFdrZmbvdXxXi2OSe1fjyr\nnZ0saeZ2FdYdfkel+8tj9S/PHfoxB1F0tvaebnsr+ro3Wr4LODa23sJomYiES+NZJDBFK+ebgVXA\nVdHXTbHlbzezDXQmjvxbrF0mAchTMcNoVs1d+mCMaTSeBzCuVXMRWRV2ExVz77HTKmg/+b2NTgrL\ncyvV54BzgblmthO4gs4g3mhmlwKPAyuj1b9C57aL7XRuvXhzDTFLQePaypZDNJ5lWJ5/7S10/qmF\nbd3hd/RN0NDMzO3M5Ozul6S8dV7Cug5cVjYoEamHxrNIO+jxnQKMZ8Xcb3IYjM+DSaRDk8CqFfqE\nq7g8LW4YbgWt5CxjTdefBZSYq5R3JneT15vbQM/WFhERCYwq5zGgx3OKSChCrpj7TQ6D4c7gVuUs\nIiISGFXOI65f1ayKWUSGKeSquSuUyWGqnEXI/kUl78NbRGS6dYff0YrEHBIlZxERkcCorS0Sybqt\nSkbPhz79FwBc/omGA2mJqj/wImRNTw5Tch5RmqEtIk1reyu7yevPamuLiIgERpXzmFHFnC3PU8P0\nxLD267a08xindm6voj9726vmpik5i4hIZZSUq6G2toiISGCUnEfIsm2r9NCRCvU7X1nnWsL1oU//\nxbSWtj7YQkKjtvYI0Mxskfze/7b/lrjcoq9Zn1Al6Ua1pZ1n1nbVM7ZVOYuIiARGyVlEJGZYnzok\n7dOvM+Anv7fSrova2iIlfO2Mn/Oqzc9tOgypmBJ08gM21PIfHlXOIiIigVHlPKbiD9goM2Es6UEd\nw3pGtSa6iQxPv6p5VCeCJcmaHFYVVc4iIiKBMXfvv4LZtcCFwF53f1m07Grg94FngO8Db3b3p6P3\nLgcuBfYDf+Lu/5wVxJLjl/o/fOieMj/H2PraGT/Xpyg1KF69V33t+bRLZmxx96VV7nMY49nmnOQz\nz7m1yrClAXmvL49T1dwrq3ruXq+fvPPV+NPftr4r98hTOX8WWN6z7DbgZe7+74BHgcsBzGwJcDHw\n0mibvzOziUECksF96E1/pxbvkLX4nH8WjWeR4GUmZ3f/BrCvZ9mt7j4ZfXsXsDB6vQLY4O6/cvcf\nANuBZRXGKyIlaDyLtEMV15zfAnw1er0AeCL23s5o2TRmttrM7jWze3/ys6cqCGO8fO2Mn/O1M34+\nZVmLq7lWSTvHvX8fLVV6PPPMvqRVZMSsO/yOsW5pQ/Y5KHPrWanZ2mb2AWASuH7Qbd19HbAOOtec\ny8Qxjvpd33zViev13OcaHPyYyM3NxlGXqsazzTlJ41mkpMLJ2czeRGdiyXl+aFbZLuDY2GoLo2Ui\nEjCNZ5GwFErOZrYceB9wjrv/IvbWzcANZvZR4DeBE4C7S0cpAztY5ZH8wRhZ7xc5Tu/+kt6r0jC7\nA3X/LE3SeJZef3/W6bXfxyv9ZSZnM/sccC4w18x2AlfQmc35a8BtZgZwl7v/J3d/yMw2Ag/TaY9d\n5u776wpe8slKLFUnnmElslFOmHXReJZ+/v6s04HsW4TG/VrzMGQmZ3e/JGHxNX3W/yvgr8oEJSL1\n0HgWaQc9vlNERAailnd+f3/W6fzXLU8OvJ2Ss4iI8Pdnnc5//Fb0pEZ9+lTj9GxtERGRwKhyFhER\nIP+EMKmfkrOIyBjrJuS4dYffoQRdgTKz2tXWFhERCUzmR0YOJQizp4CfAz9qOpYUcwkztlDjgnBj\nCzUumB7bC9z96KaCKcrMfgY80nQcKdr09x+KUOOC9sQ28FgOIjkDmNm9VX92bVVCjS3UuCDc2EKN\nC8KObRAh/xyKbXChxgWjHZva2iIiIoFRchYREQlMSMl5XdMB9BFqbKHGBeHGFmpcEHZsgwj551Bs\ngws1Lhjh2IK55iwiIiIdIVXOIiIigpKziIhIcBpPzma23MweMbPtZram4ViONbOvm9nDZvaQmb0z\nWr7WzHaZ2dbozwUNxbfDzB6IYrg3WnaUmd1mZt+Lvh455JheHDsvW83sp2b2rqbOmZlda2Z7zezB\n2LLEc2QdfxP92/uOmZ3aQGxXm9l3o+PfZGZzouWLzeyXsfP3qTpjq0oo41ljuXBcGs/F46p2LLt7\nY3+ACeD7wPHAbODbwJIG45kPnBq9PgJ4FFgCrAXe0+S5imLaAcztWfbXwJro9Rrgww3/fT4JvKCp\ncwacDZwKPJh1joALgK8CBpwJbG4gtlcDM6PXH47Ftji+Xhv+hDSeNZYr+/vUeM4fV6VjuenKeRmw\n3d0fc/dngA3AiqaCcffd7n5f9PpnwDZgQVPx5LQCWB+9Xg9c1GAs5wHfd/fHmwrA3b8B7OtZnHaO\nVgDXecddwBwzmz/M2Nz9VnefjL69C1hY1/GHIJjxrLFcCY3nAeKqeiw3nZwXAE/Evt9JIAPIzBYD\npwCbo0Vvj9oV1zbRboo4cKuZbTGz1dGyee6+O3r9JDCvmdAAuBj4XOz7EM4ZpJ+j0P79vYXOb/5d\nx5nZ/WZ2p5m9oqmgBhDa+QQ0lkvQeC6u9FhuOjkHycx+HfhH4F3u/lPgk8ALgZOB3cB/byi033H3\nU4HzgcvM7Oz4m97poTRyb5yZzQZeA9wYLQrlnE3R5Dnqx8w+AEwC10eLdgOL3P0U4E+BG8zseU3F\n11Yay8VoPBdX1VhuOjnvAo6Nfb8wWtYYM5tFZzBf7+5fBHD3Pe6+390PAJ+m074bOnffFX3dC9wU\nxbGn27qJvu5tIjY6/8nc5+57ohiDOGeRtHMUxL8/M3sTcCHwR9F/Nrj7r9z9x9HrLXSu5b5o2LEN\nKIjz2aWxXIrGcwFVjuWmk/M9wAlmdlz0m9rFwM1NBWNmBlwDbHP3j8aWx69bvBZ4sHfbIcT2XDM7\novuazuSDB+mcr1XRaquATcOOLXIJsRZYCOcsJu0c3Qy8MZrleSbwb7F22VCY2XLgfcBr3P0XseVH\nm9lE9Pp44ATgsWHGVkAw41ljuTSN5wFVPpbrms2W9w+dGXaP0vlt4gMNx/I7dFok3wG2Rn8uAP43\n8EC0/GZgfgOxHU9n9uu3gYe65wp4PnA78D3ga8BRDcT2XODHwG/EljVyzuj8h7IbeJbONadL084R\nnVmdn4j+7T0ALG0gtu10rpN1/719Klr3D6K/563AfcDvD/vvteDPGMR41lguFZ/Gc7G4Kh3Lenyn\niIhIYJpua4uIiEgPJWcREZHAKDmLiIgERslZREQkMErOIiIigVFyFhERCYySs4iISGD+PweNEw/z\nE74eAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fIPEGKH9WYCg", + "colab_type": "text" + }, + "source": [ + "### Visualise Semantic Segmentation Results" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nE-8-7TXT54I", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 486 + }, + "outputId": "91194862-7228-4d61-97a8-50ae14650dca" + }, + "source": [ + "\n", + "# Visualise Results\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i)\n", + " image = np.array(img)[:, :, [2, 1, 0]]\n", + " result = vis_demo.run_on_opencv_image(image, semantic=\"True\")\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(result)\n", + "plt.show()" + ], + "execution_count": 53, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3X2wXXV56PHvc4nUoiggTqQJllBp\nZyhzrUyKdKDqiO1FiiQdHQr1XqKmpp3RFsrthCDj4L3eawFbWju3VxuEGjq8+F5ir7YipVU7JSWJ\nIG9FIkpJJi8KorQ4avS5f+x1ZHNyTs7Ze6+91m/t/f3MnDlrr7P2Xs9ZJ0+e/fzWb60dmYkkSSrH\nf2o7AEmS9EwWZ0mSCmNxliSpMBZnSZIKY3GWJKkwFmdJkgoztuIcEWdGxIMRsSMiNoxrP5LGy1yW\nmhfjuM45Ig4BvgL8CrATuBM4PzPvr31nksbGXJbaMa7O+RRgR2Y+nJnfB24GVo1pX5LGx1yWWrBk\nTK+7DHi07/FO4OXzbRwR3qZMOtA3M/OFLccwUC6D+SzNJTNjkO3HVZwXFBHrgHVt7V/qgEfaDmCx\nzGepXuMqzruAY/seL6/W/VhmbgQ2gu+0pYItmMtgPkt1G9c55zuBEyJiRUQcCpwHbB7TviSNj7ks\ntWAsnXNm7o+ItwN/BxwCXJeZ941jX5LGx1yW2jGWS6kGDsJhMGku2zJzZdtBDMp8lg406IQw7xAm\nSVJhLM6SJBXG4ixJUmEszpIkFcbiLElSYSzOkiQVxuIsSVJhLM6SJBXG4ixJUmEszpIkFcbiLElS\nYSzOkiQVxuIsSVJhxvKRkZI0yX5z7acXve2N1541xkg0qfzISKlcfmRkgwYpuHWyeE8HPzJSkqSO\ns3OWymXnPGZtdcvzsYueXIN2zhZnqVwW55qVVowPxkI9WRzWliSp4+ycpXLZOdegS93yfOyiu89h\nbWlyWJyHMAnF+GAs1N3ksLYkSR03dHGOiGMj4vaIuD8i7ouIC6v1R0XErRHxUPX9yPrClTQO5nN3\n/ObaT0/86IBGGNaOiGOAYzJze0QcDmwDVgNvAh7PzCsiYgNwZGZessBrOawtHaixYe2u5/O0FiuH\nuLujsWHtzNydmdur5SeBB4BlwCpgU7XZJnoJLqlg5rNUllrOOUfEccDLgC3A0szcXf1oD7C0jn1I\naob53B0OcU+ukYtzRDwX+DhwUWZ+p/9n2Rszn3OIKyLWRcTWiNg6agyS6tHFfLY4eQwm0UjFOSKe\nRS+Rb8jMT1Sr91bnr2bOY+2b67mZuTEzV3bxUhFpEpnPUjlGmRAW9M5BPZ6ZF/Wtfy/wWN8EkqMy\nc/0Cr+WEMOlATU4I61w+2y3OzUliZRp0Qtgon+d8GvDfgHsi4q5q3TuAK4CPRMRa4BHg3BH2IakZ\n5rNUEO8QJpXLO4QdhJ3z/Oyey+MdwiRNPAuzJp3FWZKkwjisLZXLYe1Z7JgH4/B2ORzWliSp4yzO\nkiQVxmFtqVwOa/dxSHt4Dm+3z2FtSZI6zuIsSVJhLM6SJBXG4ixJUmEszpIkFcbiLElSYSzOkjTh\nvAyteyzOkiQVxuIsSVJhLM6SJBXG4ixJUmEszpIkFcbiLElSYSzOkiQVxuIsSVJhLM6SJBVm5OIc\nEYdExJci4m+qxysiYktE7IiID0fEoaOHKakJ5rNUhjo65wuBB/oeXwn8SWa+BPgWsLaGfUhqhvks\nFWCk4hwRy4FfAz5YPQ7g1cDHqk02AatH2YekZpjPUjmWjPj8PwXWA4dXj18APJGZ+6vHO4Flcz0x\nItYB60bcv6T6FJnPfmiDptHQnXNEnA3sy8xtwzw/Mzdm5srMXDlsDJLqYT5LZRmlcz4NOCcizgKe\nDTwPeB9wREQsqd5tLwd2jR6mpDEzn6WCDN05Z+almbk8M48DzgP+PjPfCNwOvKHabA1wy8hRShor\n81kqyziuc74EuDgidtA7Z3XtGPYhqRnms9SCUSeEAZCZ/wD8Q7X8MHBKHa8rqXnms9Q+7xAmSRPu\nxmvPajsEDcjiLElSYSzOkiQVxuIsSVJhLM6SJBXG4iypaDdee5YTmjR1LM6SJBXG4ixJUmFquQmJ\nJKk8ng7oLjtnSZ1godE0sThLklQYh7UXcNXHrx/6uetff0GNkUiSpoXFeQ6jFOT5XsdCLakpngLo\nPoe1JUkqTGRm2zEQEa0GUVenvFh20VqkbZm5su0gBtVEPv/m2k+PexedZudcnsyMQba3c5YkqTBT\n2zk33S3PxQ5aC7BzPgi75/nZOZdn0M55aiaElVCMZ3PCmKS6WZgng8PakiQVZuKHtUvsmA/GDlp9\nHNZeBIe3e+yYyzbosPZEFueuFeTZLNCqWJwXyQJtcS6ds7UlSeq4kTrniDgC+CBwEpDAW4AHgQ8D\nxwFfB87NzG8t8Dq1vdPuetfczw566jXaOZeYz4Oatg7abrk7Gh3WjohNwBcy84MRcShwGPAO4PHM\nvCIiNgBHZuYlC7zO0EFMUjE+GAv1VGq6OLeez3Wa9EJtYe6Wxoa1I+L5wCuAa6sdfz8znwBWAZuq\nzTYBq4fdh6RmmM9SWUa5znkF8A3gLyPipcA24EJgaWburrbZAyyd68kRsQ5YN8L+p6ZrlhrQej7X\nbaaznLQO2o55Ogw9rB0RK4E7gNMyc0tEvA/4DvC7mXlE33bfyswjF3itgYKY9qLsEPfUaGxYu818\nbkKXC7TFeDI0OVt7J7AzM7dUjz8GnAzsjYhjAKrv+0bYh6RmmM9SQUadEPYF4Lcy88GIeBfwnOpH\nj/VNIDkqM9cv8DqLDmLau+YZds9ToekJYY3nc9O61EHbMU+Wpmdr/wK9Sy8OBR4G3kyvG/8I8GLg\nEXqXXjy+wOssGIRFeW4W6YnWdHFuLJ9L03bRthBPvkY/+CIz7wLm+s/jjFFeV1LzzGepHJ24fadd\n88HZPU8sb9/Zgia6aDvl6ePtOyVJ6jg75wliBz1x7JylCdHoOedxsyhLkqaRw9qSJBWmiGHt5S9Z\nkRde9T/bDmOiOMQ9ERzWliaEE8IkSeo4i7MkSYWxOEuSVBiLsyRJhbE4S5JUGIvzhLrq49d7nbgk\ndZTFWZKkwlicJUkqjMVZkqTCWJwnnOedJal7LM6SJBXG4ixJUmEszpIkFcbiLElSYSzOkiQVxuIs\nSVJhRirOEfH7EXFfRNwbETdFxLMjYkVEbImIHRHx4Yg4tK5gJY2P+SyVY+jiHBHLgN8DVmbmScAh\nwHnAlcCfZOZLgG8Ba+sIVNL4mM9SWUYd1l4C/GRELAEOA3YDrwY+Vv18E7B6xH1Iaob5LBVi6OKc\nmbuAPwL+jV4SfxvYBjyRmfurzXYCy+Z6fkSsi4itEbH1P7795LBhSKpBnfncRLzSpFsy7BMj4khg\nFbACeAL4KHDmYp+fmRuBjQDLX7Iih41D6vfFf3rporc9/bS7xxhJt9SZzxFhPhfmhtVPtbr/N/71\nYa3uv4tGGdZ+DfC1zPxGZv4A+ARwGnBENSwGsBzYNWKMksbPfJYKMnTnTG/469SIOAz4LnAGsBW4\nHXgDcDOwBrhl1CAlGKwrruP1pqyzNp8nSNud8mz98dhFL87QxTkzt0TEx4DtwH7gS/SGtf4fcHNE\n/K9q3bV1BKrpVHdBrnvfk1LAzefuK60gz2euOC3YBxqlcyYzLwcun7X6YeCUUV5XUvPMZ6kcIxVn\naVza7JgH0R/npHTR6p6udM3zuWH1U3bPs1icVZSuFOW5zBe7RVvj0vWi3M/z0s/kvbUlSSqMxVnF\n6HLXfDBf/KeXTuzvpvZMUtc82w2rn5ro328xLM6SJBXGc85q3bR0lV/8p5d6/lkjm/aOclrYOUtS\nR1iYp4fFWZKkwlic1appGdKe4eQwDWNaJ0hN4+88w3POaoUFSpLmZ+csSVJhLM4Tbv3rL2g7BM3B\nkQNpcaZ1SN/iLElSYSzOkiQVxuIsSQWbxiFdOVt7Yr1y/7vbDkGSNCQ7Z0mSCmPnrMY5U1lamMPZ\n062Izvnw3O0wbI1KPZbeHUuSFqeI4ixJkp5WVHHu7/i8ecbgXrn/3cV2zZKkxSuqOEuSpEUU54i4\nLiL2RcS9feuOiohbI+Kh6vuR1fqIiD+LiB0R8eWIOHmU4Na//gI7aKlGbeazpMVbTOf8IeDMWes2\nALdl5gnAbdVjgNcCJ1Rf64D3DxqQQ7OD85hpAB+iwXyWNJwFi3Nmfh54fNbqVcCmankTsLpv/fXZ\ncwdwREQcU1ewkkZjPkvdMOx1zkszc3e1vAdYWi0vAx7t225ntW43s0TEOnrvxnnR0U+H8fLfeGjI\nkKbTfB2zx1EDqDWfJY1u5JuQZGZGRA7xvI3ARoCIyIMVk/Wvv4CrPn798EFOoIMNY1uYNay68rn2\nwKQpM+xs7b0zw1vV933V+l3AsX3bLa/WSSqX+SwVZtjivBlYUy2vAW7pW39BNcvzVODbfcNlI3Hm\n9tMONpRdatfsncGK1ng+Szq4BYe1I+Im4FXA0RGxE7gcuAL4SESsBR4Bzq02/zRwFrADeAp4c90B\n9xfoaRvq9vyyRlVaPkua24LFOTPPn+dHZ8yxbQJvGzUoSeNhPkvd0OlPpZqmLtquWZKmR6eLc7+5\nzkdPQsGeKcoWYUmaHt5bW5KkwkxM5zyXrl4fPYmz0k8/7W5nbPc5/bS72w5BUsHsnCVJKsxEd84w\nfxfaZkc9iZ2xFseOWdJiTHxxnk/TM70tyJKkxXJYW5KkwkTvPgMtB1HwjfLn66rthAfnhLCBh7W3\nZebKccUyLiXncxfdsPqptkNo1Rv/+rC2Q6hFZsYg21uc1ZhpL85DnG+2OAuY7gI9rcXZYW1Jkgpj\ncZYkqTBTO1tbaoqXT0kalJ2zJEmFsTirMaefdrddpCQtgsVZkqTCWJwlSSqMxVkaI4fxJQ3D4qzG\nTUPB8vy6pFFYnCVJKozXOUs1sVPWuMzcwnLabuM5KbfuHIadsyRJhVmwOEfEdRGxLyLu7Vv33oj4\n14j4ckR8MiKO6PvZpRGxIyIejIj/Mq7ApVJ06fyy+dxt09xJTpsFP5UqIl4B/DtwfWaeVK37VeDv\nM3N/RFwJkJmXRMSJwE3AKcBPAZ8DfjYzf7jAPvwUmynWxU+raqgY1/6pVObz5JjkIe5JfBNS+6dS\nZebngcdnrftsZu6vHt4BLK+WVwE3Z+b3MvNrwA56iS2pAOaz1A11nHN+C/CZankZ8Gjfz3ZW6w4Q\nEesiYmtEbK0hBqkxXRnCHpL53BGT2F3C5P5egxpptnZEXAbsB24Y9LmZuRHYWL2Ow2BTrL/YlTrE\nPeEFGTCfu2iuQtbF4W4L8oGGLs4R8SbgbOCMfPrE9S7g2L7NllfrJBXMfJbKsuCEMICIOA74m74J\nJGcCVwOvzMxv9G3388CNPD2B5DbgBCeQaFhtdtIFdMu1TwgD83malNxFT1u3POiEsMXM1r4JeBVw\nNLAXuBy4FPgJ4LFqszsy83eq7S+jd95qP3BRZn5m9mvOsQ+TWQMbtXAXUHwXMo7Z2uazfmwcxXva\niu5i1V6cm2AyaxgW5zKZz91hcW6OxVmq9BfvDhTiuVic1ahhi7UFeWEWZ2lyWJylCVH7TUgkSVKz\nLM6SJBXG4ixJUmEszpIkFWak23fW6JvAf1TfS3Q0ZcZWalxQbmylxgUHxvbTbQUyon8HHmw7iHl0\n6e9filLjgu7ENnAuFzFbGyAitpY6M7XU2EqNC8qNrdS4oOzYBlHy72Fsgys1Lpjs2BzWliSpMBZn\nSZIKU1Jx3th2AAdRamylxgXlxlZqXFB2bIMo+fcwtsGVGhdMcGzFnHOWJEk9JXXOkiSJAopzRJwZ\nEQ9GxI6I2NByLMdGxO0RcX9E3BcRF1br3xURuyLirurrrJbi+3pE3FPFsLVad1RE3BoRD1Xfj2w4\npp/rOy53RcR3IuKito5ZRFwXEfsi4t6+dXMeo+j5s+rf3pcj4uQWYntvRPxrtf9PRsQR1frjIuK7\nfcfvA+OMrS6l5LO5PHRc5vPwcdWby5nZ2hdwCPBV4HjgUOBu4MQW4zkGOLlaPhz4CnAi8C7gD9o8\nVlVMXweOnrXuKmBDtbwBuLLlv+ceetf0tXLMgFcAJwP3LnSMgLOAzwABnApsaSG2XwWWVMtX9sV2\nXP92XfgqKZ/N5dr+nubz4uOqNZfb7pxPAXZk5sOZ+X3gZmBVW8Fk5u7M3F4tPwk8ACxrK55FWgVs\nqpY3AatbjOUM4KuZ+UhbAWTm54HHZ62e7xitAq7PnjuAIyLimCZjy8zPZub+6uEdwPJx7b8BxeSz\nuVwL83mAuOrO5baL8zLg0b7HOykkgSLiOOBlwJZq1dur4Yrr2hhuqiTw2YjYFhHrqnVLM3N3tbwH\nWNpOaACcB9zU97iEYwbzH6PS/v29hd47/xkrIuJLEfGPEfHLbQU1gNKOJ2Auj8B8Ht7Iudx2cS5S\nRDwX+DhwUWZ+B3g/8DPALwC7gT9uKbTTM/Nk4LXA2yLiFf0/zN4YSivT7yPiUOAc4KPVqlKO2TO0\neYwOJiIuA/YDN1SrdgMvzsyXARcDN0bE89qKr6vM5eGYz8OrK5fbLs67gGP7Hi+v1rUmIp5FL5lv\nyMxPAGTm3sz8YWb+CLiG3vBd4zJzV/V9H/DJKo69M0M31fd9bcRG7z+Z7Zm5t4qxiGNWme8YFfHv\nLyLeBJwNvLH6z4bM/F5mPlYtb6N3Lvdnm45tQEUczxnm8kjM5yHUmcttF+c7gRMiYkX1Tu08YHNb\nwUREANcCD2Tm1X3r+89b/Dpw7+znNhDbcyLi8JllepMP7qV3vNZUm60Bbmk6tsr59A2BlXDM+sx3\njDYDF1SzPE8Fvt03XNaIiDgTWA+ck5lP9a1/YUQcUi0fD5wAPNxkbEMoJp/N5ZGZzwOqPZfHNZtt\nsV/0Zth9hd67ictajuV0ekMkXwbuqr7OAv4KuKdavxk4poXYjqc3+/Vu4L6ZYwW8ALgNeAj4HHBU\nC7E9B3gMeH7fulaOGb3/UHYDP6B3zmntfMeI3qzOP6/+7d0DrGwhth30zpPN/Hv7QLXt66u/813A\nduB1Tf9dh/wdi8hnc3mk+Mzn4eKqNZe9Q5gkSYVpe1hbkiTNYnGWJKkwFmdJkgpjcZYkqTAWZ0mS\nCmNxliSpMBZnSZIKY3GWJKkwFmdJkgpjcZYkqTAWZ0mSCmNxliSpMBZnSZIKY3GWJKkwFmdJkgpj\ncZYkqTAWZ0mSCmNxliSpMBZnSZIKY3GWJKkwFmdJkgoztuIcEWdGxIMRsSMiNoxrP5LGy1yWmheZ\nWf+LRhwCfAX4FWAncCdwfmbeX/vOJI2NuSy1Y8mYXvcUYEdmPgwQETcDq4A5Ezoi6n+HIHXfNzPz\nhS3HMFAuV9uYzwU7/EXL2w7hoJ7cs7PtEMYiM2OQ7cdVnJcBj/Y93gm8vH+DiFgHrBvT/qVJ8Ejb\nAbCIXAbzuUte/qaL2w7hoD53RdnxNWVcxXlBmbkR2Ai+05a6znwu32s2XN12CIvymg1XW6AZ34Sw\nXcCxfY+XV+skdYu5PAG6UphnvGbD1Z2LuW7jKs53AidExIqIOBQ4D9g8pn1JGh9zWWrBWGZrA0TE\nWcCfAocA12Xm/z7Itg6DSQfalpkr2w5ikFyutjefWzaJXWfXh7oHnRA2tuI8UBAmszSXIorzoMzn\n9k1icZ7R1SI9aHH2DmGSJBXGzlkql52zBjbJXfNsXeqiHdaWJofFWYs2TUW5X1cKtMPakiR1nMVZ\nkjpuWrvmSeawtlQuh7V1UBblp5U+vO2wtiRJHWdxlqQOsmt+pkk7HhZnSZIKY3GWJKkwrX1kpCRp\ncJM2fKu52TlLklQYi7MkdYRd8/TwOmepXF7nLMCiPKgSr3n2OmdJkjrO4ixJmiiTMNJgcZakgk1C\nodHgLM6SJBXG65wlqUB2zNPNzlmSNHFes+HqTr/BsThLklQYh7UlqSBd7vZUn6E754g4NiJuj4j7\nI+K+iLiwWn9URNwaEQ9V34+sL1xJ42A+l8HCXL+uDm+PMqy9H/jvmXkicCrwtog4EdgA3JaZJwC3\nVY8llc18lgoydHHOzN2Zub1afhJ4AFgGrAI2VZttAlaPGqSk8TKfpbLUcs45Io4DXgZsAZZm5u7q\nR3uApfM8Zx2wro79S6qP+Sy1b+TZ2hHxXODjwEWZ+Z3+n2XvUzXmvAl+Zm7MzJVdvLG/NKnMZ6kM\nI3XOEfEseol8Q2Z+olq9NyKOyczdEXEMsG/UINUtt779Uz9e/pX/87oWI9EgzGepHKPM1g7gWuCB\nzOyfCrcZWFMtrwFuGT48SU0wn6WyDP15zhFxOvAF4B7gR9Xqd9A7T/UR4MXAI8C5mfn4Aq/l5792\nXH+3vBC76UVr7POczed2dfFSny5q83OeB/0856GHtTPzi8B8Oztj2NdVtwxSlPufY4Eui/kslcXb\nd0qSVBhv36mBDdMtS5qbQ9qai52zWnHr2z9lkZekeVicJUkqjMVZA6m727V7lqQDDX0pVa1BeOlF\n8Zooos7gPkBjl1LVyXwenOedm9GlS6nsnCVJKozFWQtqaujZIW5J49Jm1zwMi7MkSYWxOEuSVBhv\nQqJ5tTHMPLNPJ4dJmmbO1tacSjn/O+VF2tnaU8ZZ2+PT9jlnZ2tLktRxFmdJkgpjcZYkqTAWZz1D\naR9IUVIsktQUJ4QJ6EYRnMLJYU4Im0JOChsPJ4RJkqSRWJwlqSCfu+Li1rs8tc/irM7owtC7JNXB\n4ixJUmFGLs4RcUhEfCki/qZ6vCIitkTEjoj4cEQcOnqYGqcudaSlzSafNOazVIaRZ2tHxMXASuB5\nmXl2RHwE+ERm3hwRHwDuzsz3L/Aazu5sQdeL3BTM3m58trb5XB5nb4+mlPP3jc7WjojlwK8BH6we\nB/Bq4GPVJpuA1aPsQ1IzzGepHKMOa/8psB74UfX4BcATmbm/erwTWDbiPiQ1w3wuUCmdn5o19EdG\nRsTZwL7M3BYRrxri+euAdcPuX6Pp+pA2+PGSdTKfNWm6/qZmlM9zPg04JyLOAp4NPA94H3BERCyp\n3m0vB3bN9eTM3AhsBM9RSQUwn6WCDD2snZmXZubyzDwOOA/4+8x8I3A78IZqszXALSNHKWmszOey\neWOS6TOO65wvAS6OiB30zlldO4Z9SGqG+Sy1wA++mDKTcK75YCbs/LMffKEDeGnVwkocZRj0UqpR\nzjlLkho2U3gs0gcqsSgPy9t3SpJUGDvnKTLpQ9rSNPncFRfbPTNZ3XI/zzlPgWkryhN03tlzzlqU\naS3SXSrMjd6+U5Ik1c/iLEkd16UOsg7TcN23w9pTYNqGtWdMwPC2w9oayiQOc3e9GDusLUlSxzlb\nW5ImzKRcC931bnkUFmdJmlBdLdLTXJRnOKwtSVJh7Jwn3LROBpP0tP5OtMQu2k75QBZnSVLjLMgH\n57C2JEmF8TrnKTDtQ9sdvt7Z65xVu8UOa9vZ1svrnCVJ6jiLsyRNkcV0xHbN7XNCmCberW//VJeH\ntqXGWJTLYecsSVJh7JwlaYosOWcP/3D/getfdeKNP54sVmIHvf2U8+dcf/K/3NRwJM1wtvaUmdaZ\n2x0d1na2tmq35Jw9C26zf/OLGohk/oI7jNKLtLO1JUnquJGGtSPiCOCDwElAAm8BHgQ+DBwHfB04\nNzO/NVKUksbOfNaMJefsGUv3XGenvNBrl95JL2SkYe2I2AR8ITM/GBGHAocB7wAez8wrImIDcGRm\nXrLA6zgM1oJpGuJ2WHth5vN0WMywdr9RivQ4i/Gomi7ejQ1rR8TzgVcA11Y7/n5mPgGsAjZVm20C\nVg+7D0nNMJ+lsowyrL0C+AbwlxHxUmAbcCGwNDN3V9vsAZaOFqLGpb+bnOQuuqNdc9PM5wk3aMc8\n6Wa6+lKHv0cpzkuAk4HfzcwtEfE+YEP/BpmZ8w1xRcQ6YN0I+1eNFlvASiniFtzamc+qVclD2l0w\nymztncDOzNxSPf4YveTeGxHHAFTf98315MzcmJkru3ipiDSBzGepIEN3zpm5JyIejYify8wHgTOA\n+6uvNcAV1fdbaolURbBjnUzms+YzMxy+2Ilhdsz1GPUOYb8L3FDN7HwYeDO9bvwjEbEWeAQ4d8R9\nSGqG+ayps/2U84s87zxScc7Mu4C5hrHOGOV1JTXPfJbK4R3CJGnCXbX3rY3sp6tD2ttPOb+42C3O\nkiQVxk+lkqQp8J5r3gnAO9767tpfu7SucxJYnCVpwjQ1jK3xcVhbkqTC2DlLUoeM2hW/55p3DjS0\n3dRnO+uZ7JwlSaKsc+cWZ0mSCmNxlqSOqGui13uueeePZ28vZMk5e/xEqxZYnCWpcFftfetYZmAv\ntkAfTElDwZPE4ixJUmGcrS1JGtikdswzv1fbH4Zh5yxJBRv3DUUGOf+s5licJUkqjMPaklSgNm/B\n6Y1H2mfnLEkFGdfM7IU4tF0Wi7MkSYWxOEtSIfw0Kc2wOEuSVBgnhEmSgKfPO6/f3HIgsjhLUpsc\nytZcHNaWJKkwIxXniPj9iLgvIu6NiJsi4tkRsSIitkTEjoj4cEQcWlewksbHfJbKMXRxjohlwO8B\nKzPzJOAQ4DzgSuBPMvMlwLeAtXUEKml8zOfmtXU982KUGtc0GXVYewnwkxGxBDgM2A28GvhY9fNN\nwOoR9yGpGeazVIihi3Nm7gL+CPg3ekn8bWAb8ERm7q822wksm+v5EbEuIrZGxNZhY5BUD/O5WV3o\nTEvu7KfB0LO1I+JIYBWwAngC+Chw5mKfn5kbgY3Va+WwcUganfncDIudFmuUYe3XAF/LzG9k5g+A\nTwCnAUdUw2IAy4FdI8YoafzMZ6kgo1zn/G/AqRFxGPBd4AxgK3A78AbgZmANcMuoQWqyXfnwk4va\n7pLjDx9zJFPNfJYKEpnDj0BFxP8AfgPYD3wJ+C1656RuBo6q1v3XzPzeAq/jMNiEW2wBXqwpKdTb\nMnNlUzszn8erq0Pa65dec9Cfbz/l/IYiadbJ/3JTra+XmTHI9iPdISwzLwcun7X6YeCUUV5XUvPM\nZ6kc3r5TY1V3x9z/ulPSPUtFmtSOuRQWZ9VuXAX5YPuxUKtEXR3K7jfzOyw0vK16eW9tSZIKY+es\nWjTVLS+0fztoSZPAzlmSpMJnwpdrAAAJz0lEQVRYnDWSKx9+svWuWdL4TcL58y5xWFtDK7EoO4tb\nJZjUQubksObYOUuSVBg7Zw2kxG55NieHqS2T2jHP5jXO42fnLElSYSzOkiQVxmFtLagLQ9lzcXhb\nUlfZOWvidfXNhbplWs43qxkWZ0mSCuOwtiRJlbo/x3lYFmfNa5KGgz3/rHFxOFvj4LC2JEmFsThr\nTpPUNfeb1N9L0mRxWFuSVJv+c7beSWx4ds6SJBXGzlnP4LCvpIV87lPPnf+HS59enD3zeck5e/iL\nX/rFMUU1vN/+5zv7HpUxW9vOWZKkwizYOUfEdcDZwL7MPKladxTwYeA44OvAuZn5rYgI4H3AWcBT\nwJsyc/t4Qpc0KPNZ49Z/adk73vruZ/ysxK4Zno7rt//5TpacsweA/Ztf1GZIi+qcPwScOWvdBuC2\nzDwBuK16DPBa4ITqax3w/nrC1Lhd+fCTUzOkPU2/6xw+hPmshrznmnfynmve2XYYi9b/5mGmSLdl\nweKcmZ8HHp+1ehWwqVreBKzuW3999twBHBERx9QVrKTRmM9SNww7IWxpZu6ulvfw9BSAZcCjfdvt\nrNbtZpaIWEfv3bikdpnPGqtSh7NLNvKEsMxMIId43sbMXJmZK0eNQRrGFA9tz8t81rT7i1/6xSLe\nTAxbnPfODG9V3/dV63cBx/Ztt7xaJ6lc5rNUmGGL82ZgTbW8Brilb/0F0XMq8O2+4TJJZTKfNRYv\nuO5vecF1f9t2GJ20mEupbgJeBRwdETuBy4ErgI9ExFrgEeDcavNP07vsYge9Sy/ePIaYVTOHd6eH\n+aymdL0o/8Uv/SK/TXuXVS1YnDNzvpujnjHHtgm8bdSgJI2H+Sx1g7fvlCRpDj++OUkLHbTFWVNt\nZkj/kuMPbzkSddX6pdcAz7wz1jTr+nB2Kby3tiRJhbE4T7Epv42lVKuZDlqTp41rny3OkiQVxuIs\nSaqF55vr44QwSarJJE4Om2+4vv93tCjXz85ZkqTC2DlLUs263kEvZnJb/zbX/uGxB9lSw7A4S9KY\nrF96TWcKtLPNy+KwtiRJhbFzluhd8+1dwjQOc3WkpXTTdsvlsjhLUsNmF8VRi7VFthnX/uGxrL30\n0Ub25bC2JEmFsXOWpJbZ+Wo2i/MU6z/HOu332PZ8s6SSOKwtSVJh7JwFHNg5TksnbccsqUQWZ83p\nkuMPn8gCbTGW1AUOa0uSVBg7Z81rkC5zXF22na6kaWTnLElSYRbsnCPiOuBsYF9mnlStey/wOuD7\nwFeBN2fmE9XPLgXWAj8Efi8z/25MsasgdrjdYD5L3bCYzvlDwJmz1t0KnJSZ/xn4CnApQEScCJwH\n/Hz1nP8bEYfUFq2kUX0I81kayNpLH/3xV1MWLM6Z+Xng8VnrPpuZ+6uHdwDLq+VVwM2Z+b3M/Bqw\nAzilxngljcB8lrqhjnPObwE+Uy0vA/rfWuys1h0gItZFxNaI2FpDDJLqYT5rYE13ldNgpNnaEXEZ\nsB+4YdDnZuZGYGP1OjlKHJJGZz5rVLML9LV/eGxLkYyu7TcbQxfniHgTvYklZ2TmTDLuAvr/Gsur\ndZIKZj5LZRmqOEfEmcB64JWZ+VTfjzYDN0bE1cBPAScA/zJylJLGxnzWuMzVfZbWTbfdIc8nnn6T\nPM8GETcBrwKOBvYCl9ObzfkTwGPVZndk5u9U219G77zVfuCizPzM7NecYx8Og0kH2paZK+t8QfNZ\npaqjaJdaaAEyMwbZfsHi3ASTWZpT7cW5CeazhmFxfiaLs1Qui7Om2mIKdskFuZ/FWZocFmdpQgxa\nnL23tiRJhbE4S5JUGIuzJEmFsThLklSYkW7fWaNvAv9RfS/R0ZQZW6lxQbmxlRoXHBjbT7cVyIj+\nHXiw7SDm0aW/fylKjQu6E9vAuVzEbG2AiNha6szUUmMrNS4oN7ZS44KyYxtEyb+HsQ2u1LhgsmNz\nWFuSpMJYnCVJKkxJxXlj2wEcRKmxlRoXlBtbqXFB2bENouTfw9gGV2pcMMGxFXPOWZIk9ZTUOUuS\nJCzOkiQVp/XiHBFnRsSDEbEjIja0HMuxEXF7RNwfEfdFxIXV+ndFxK6IuKv6Oqul+L4eEfdUMWyt\n1h0VEbdGxEPV9yMbjunn+o7LXRHxnYi4qK1jFhHXRcS+iLi3b92cxyh6/qz6t/fliDi5hdjeGxH/\nWu3/kxFxRLX+uIj4bt/x+8A4Y6tLKflsLg8dl/k8fFz15nJmtvYFHAJ8FTgeOBS4GzixxXiOAU6u\nlg8HvgKcCLwL+IM2j1UV09eBo2etuwrYUC1vAK5s+e+5h94F960cM+AVwMnAvQsdI+As4DNAAKcC\nW1qI7VeBJdXylX2xHde/XRe+Sspnc7m2v6f5vPi4as3ltjvnU4AdmflwZn4fuBlY1VYwmbk7M7dX\ny08CDwDL2opnkVYBm6rlTcDqFmM5A/hqZj7SVgCZ+Xng8Vmr5ztGq4Drs+cO4IiIOKbJ2DLzs5m5\nv3p4B7B8XPtvQDH5bC7XwnweIK66c7nt4rwM6P+k7J0UkkARcRzwMmBLtert1XDFdW0MN1US+GxE\nbIuIddW6pZm5u1reAyxtJzQAzgNu6ntcwjGD+Y9Raf/+3kLvnf+MFRHxpYj4x4j45baCGkBpxxMw\nl0dgPg9v5FxuuzgXKSKeC3wcuCgzvwO8H/gZ4BeA3cAftxTa6Zl5MvBa4G0R8Yr+H2ZvDKWVa+Mi\n4lDgHOCj1apSjtkztHmMDiYiLgP2AzdUq3YDL87MlwEXAzdGxPPaiq+rzOXhmM/DqyuX2y7Ou4Bj\n+x4vr9a1JiKeRS+Zb8jMTwBk5t7M/GFm/gi4ht7wXeMyc1f1fR/wySqOvTNDN9X3fW3ERu8/me2Z\nubeKsYhjVpnvGBXx7y8i3gScDbyx+s+GzPxeZj5WLW+jdy73Z5uObUBFHM8Z5vJIzOch1JnLbRfn\nO4ETImJF9U7tPGBzW8FERADXAg9k5tV96/vPW/w6cO/s5zYQ23Mi4vCZZXqTD+6ld7zWVJutAW5p\nOrbK+fQNgZVwzPrMd4w2AxdUszxPBb7dN1zWiIg4E1gPnJOZT/Wtf2FEHFItHw+cADzcZGxDKCaf\nzeWRmc8Dqj2XxzWbbbFf9GbYfYXeu4nLWo7ldHpDJF8G7qq+zgL+CrinWr8ZOKaF2I6nN/v1buC+\nmWMFvAC4DXgI+BxwVAuxPQd4DHh+37pWjhm9/1B2Az+gd85p7XzHiN6szj+v/u3dA6xsIbYd9M6T\nzfx7+0C17eurv/NdwHbgdU3/XYf8HYvIZ3N5pPjM5+HiqjWXvX2nJEmFaXtYW5IkzWJxliSpMBZn\nSZIKY3GWJKkwFmdJkgpjcZYkqTAWZ0mSCvP/ASpRAss5NWLXAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + } + ] +} \ No newline at end of file