GeoYolo-SLAM/MaskRCNN_ROS/include/MaskRCNN/samples/shapes/train_shapes.ipynb

1037 lines
99 KiB
Plaintext
Raw Normal View History

2025-04-09 16:05:54 +08:00
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Mask R-CNN - Train on Shapes Dataset\n",
"\n",
"\n",
"This notebook shows how to train Mask R-CNN on your own dataset. To keep things simple we use a synthetic dataset of shapes (squares, triangles, and circles) which enables fast training. You'd still need a GPU, though, because the network backbone is a Resnet101, which would be too slow to train on a CPU. On a GPU, you can start to get okay-ish results in a few minutes, and good results in less than an hour.\n",
"\n",
"The code of the *Shapes* dataset is included below. It generates images on the fly, so it doesn't require downloading any data. And it can generate images of any size, so we pick a small image size to train faster. "
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
}
],
"source": [
"import os\n",
"import sys\n",
"import random\n",
"import math\n",
"import re\n",
"import time\n",
"import numpy as np\n",
"import cv2\n",
"import matplotlib\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# Root directory of the project\n",
"ROOT_DIR = os.path.abspath(\"../../\")\n",
"\n",
"# Import Mask RCNN\n",
"sys.path.append(ROOT_DIR) # To find local version of the library\n",
"from mrcnn.config import Config\n",
"from mrcnn import utils\n",
"import mrcnn.model as modellib\n",
"from mrcnn import visualize\n",
"from mrcnn.model import log\n",
"\n",
"%matplotlib inline \n",
"\n",
"# Directory to save logs and trained model\n",
"MODEL_DIR = os.path.join(ROOT_DIR, \"logs\")\n",
"\n",
"# Local path to trained weights file\n",
"COCO_MODEL_PATH = os.path.join(ROOT_DIR, \"mask_rcnn_coco.h5\")\n",
"# Download COCO trained weights from Releases if needed\n",
"if not os.path.exists(COCO_MODEL_PATH):\n",
" utils.download_trained_weights(COCO_MODEL_PATH)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Configurations"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Configurations:\n",
"BACKBONE_SHAPES [[32 32]\n",
" [16 16]\n",
" [ 8 8]\n",
" [ 4 4]\n",
" [ 2 2]]\n",
"BACKBONE_STRIDES [4, 8, 16, 32, 64]\n",
"BATCH_SIZE 8\n",
"BBOX_STD_DEV [ 0.1 0.1 0.2 0.2]\n",
"DETECTION_MAX_INSTANCES 100\n",
"DETECTION_MIN_CONFIDENCE 0.5\n",
"DETECTION_NMS_THRESHOLD 0.3\n",
"GPU_COUNT 1\n",
"IMAGES_PER_GPU 8\n",
"IMAGE_MAX_DIM 128\n",
"IMAGE_MIN_DIM 128\n",
"IMAGE_PADDING True\n",
"IMAGE_SHAPE [128 128 3]\n",
"LEARNING_MOMENTUM 0.9\n",
"LEARNING_RATE 0.002\n",
"MASK_POOL_SIZE 14\n",
"MASK_SHAPE [28, 28]\n",
"MAX_GT_INSTANCES 100\n",
"MEAN_PIXEL [ 123.7 116.8 103.9]\n",
"MINI_MASK_SHAPE (56, 56)\n",
"NAME SHAPES\n",
"NUM_CLASSES 4\n",
"POOL_SIZE 7\n",
"POST_NMS_ROIS_INFERENCE 1000\n",
"POST_NMS_ROIS_TRAINING 2000\n",
"ROI_POSITIVE_RATIO 0.33\n",
"RPN_ANCHOR_RATIOS [0.5, 1, 2]\n",
"RPN_ANCHOR_SCALES (8, 16, 32, 64, 128)\n",
"RPN_ANCHOR_STRIDE 2\n",
"RPN_BBOX_STD_DEV [ 0.1 0.1 0.2 0.2]\n",
"RPN_TRAIN_ANCHORS_PER_IMAGE 256\n",
"STEPS_PER_EPOCH 100\n",
"TRAIN_ROIS_PER_IMAGE 32\n",
"USE_MINI_MASK True\n",
"USE_RPN_ROIS True\n",
"VALIDATION_STEPS 50\n",
"WEIGHT_DECAY 0.0001\n",
"\n",
"\n"
]
}
],
"source": [
"class ShapesConfig(Config):\n",
" \"\"\"Configuration for training on the toy shapes dataset.\n",
" Derives from the base Config class and overrides values specific\n",
" to the toy shapes dataset.\n",
" \"\"\"\n",
" # Give the configuration a recognizable name\n",
" NAME = \"shapes\"\n",
"\n",
" # Train on 1 GPU and 8 images per GPU. We can put multiple images on each\n",
" # GPU because the images are small. Batch size is 8 (GPUs * images/GPU).\n",
" GPU_COUNT = 1\n",
" IMAGES_PER_GPU = 8\n",
"\n",
" # Number of classes (including background)\n",
" NUM_CLASSES = 1 + 3 # background + 3 shapes\n",
"\n",
" # Use small images for faster training. Set the limits of the small side\n",
" # the large side, and that determines the image shape.\n",
" IMAGE_MIN_DIM = 128\n",
" IMAGE_MAX_DIM = 128\n",
"\n",
" # Use smaller anchors because our image and objects are small\n",
" RPN_ANCHOR_SCALES = (8, 16, 32, 64, 128) # anchor side in pixels\n",
"\n",
" # Reduce training ROIs per image because the images are small and have\n",
" # few objects. Aim to allow ROI sampling to pick 33% positive ROIs.\n",
" TRAIN_ROIS_PER_IMAGE = 32\n",
"\n",
" # Use a small epoch since the data is simple\n",
" STEPS_PER_EPOCH = 100\n",
"\n",
" # use small validation steps since the epoch is small\n",
" VALIDATION_STEPS = 5\n",
" \n",
"config = ShapesConfig()\n",
"config.display()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Notebook Preferences"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def get_ax(rows=1, cols=1, size=8):\n",
" \"\"\"Return a Matplotlib Axes array to be used in\n",
" all visualizations in the notebook. Provide a\n",
" central point to control graph sizes.\n",
" \n",
" Change the default size attribute to control the size\n",
" of rendered images\n",
" \"\"\"\n",
" _, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))\n",
" return ax"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Dataset\n",
"\n",
"Create a synthetic dataset\n",
"\n",
"Extend the Dataset class and add a method to load the shapes dataset, `load_shapes()`, and override the following methods:\n",
"\n",
"* load_image()\n",
"* load_mask()\n",
"* image_reference()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"class ShapesDataset(utils.Dataset):\n",
" \"\"\"Generates the shapes synthetic dataset. The dataset consists of simple\n",
" shapes (triangles, squares, circles) placed randomly on a blank surface.\n",
" The images are generated on the fly. No file access required.\n",
" \"\"\"\n",
"\n",
" def load_shapes(self, count, height, width):\n",
" \"\"\"Generate the requested number of synthetic images.\n",
" count: number of images to generate.\n",
" height, width: the size of the generated images.\n",
" \"\"\"\n",
" # Add classes\n",
" self.add_class(\"shapes\", 1, \"square\")\n",
" self.add_class(\"shapes\", 2, \"circle\")\n",
" self.add_class(\"shapes\", 3, \"triangle\")\n",
"\n",
" # Add images\n",
" # Generate random specifications of images (i.e. color and\n",
" # list of shapes sizes and locations). This is more compact than\n",
" # actual images. Images are generated on the fly in load_image().\n",
" for i in range(count):\n",
" bg_color, shapes = self.random_image(height, width)\n",
" self.add_image(\"shapes\", image_id=i, path=None,\n",
" width=width, height=height,\n",
" bg_color=bg_color, shapes=shapes)\n",
"\n",
" def load_image(self, image_id):\n",
" \"\"\"Generate an image from the specs of the given image ID.\n",
" Typically this function loads the image from a file, but\n",
" in this case it generates the image on the fly from the\n",
" specs in image_info.\n",
" \"\"\"\n",
" info = self.image_info[image_id]\n",
" bg_color = np.array(info['bg_color']).reshape([1, 1, 3])\n",
" image = np.ones([info['height'], info['width'], 3], dtype=np.uint8)\n",
" image = image * bg_color.astype(np.uint8)\n",
" for shape, color, dims in info['shapes']:\n",
" image = self.draw_shape(image, shape, dims, color)\n",
" return image\n",
"\n",
" def image_reference(self, image_id):\n",
" \"\"\"Return the shapes data of the image.\"\"\"\n",
" info = self.image_info[image_id]\n",
" if info[\"source\"] == \"shapes\":\n",
" return info[\"shapes\"]\n",
" else:\n",
" super(self.__class__).image_reference(self, image_id)\n",
"\n",
" def load_mask(self, image_id):\n",
" \"\"\"Generate instance masks for shapes of the given image ID.\n",
" \"\"\"\n",
" info = self.image_info[image_id]\n",
" shapes = info['shapes']\n",
" count = len(shapes)\n",
" mask = np.zeros([info['height'], info['width'], count], dtype=np.uint8)\n",
" for i, (shape, _, dims) in enumerate(info['shapes']):\n",
" mask[:, :, i:i+1] = self.draw_shape(mask[:, :, i:i+1].copy(),\n",
" shape, dims, 1)\n",
" # Handle occlusions\n",
" occlusion = np.logical_not(mask[:, :, -1]).astype(np.uint8)\n",
" for i in range(count-2, -1, -1):\n",
" mask[:, :, i] = mask[:, :, i] * occlusion\n",
" occlusion = np.logical_and(occlusion, np.logical_not(mask[:, :, i]))\n",
" # Map class names to class IDs.\n",
" class_ids = np.array([self.class_names.index(s[0]) for s in shapes])\n",
" return mask.astype(np.bool), class_ids.astype(np.int32)\n",
"\n",
" def draw_shape(self, image, shape, dims, color):\n",
" \"\"\"Draws a shape from the given specs.\"\"\"\n",
" # Get the center x, y and the size s\n",
" x, y, s = dims\n",
" if shape == 'square':\n",
" cv2.rectangle(image, (x-s, y-s), (x+s, y+s), color, -1)\n",
" elif shape == \"circle\":\n",
" cv2.circle(image, (x, y), s, color, -1)\n",
" elif shape == \"triangle\":\n",
" points = np.array([[(x, y-s),\n",
" (x-s/math.sin(math.radians(60)), y+s),\n",
" (x+s/math.sin(math.radians(60)), y+s),\n",
" ]], dtype=np.int32)\n",
" cv2.fillPoly(image, points, color)\n",
" return image\n",
"\n",
" def random_shape(self, height, width):\n",
" \"\"\"Generates specifications of a random shape that lies within\n",
" the given height and width boundaries.\n",
" Returns a tuple of three valus:\n",
" * The shape name (square, circle, ...)\n",
" * Shape color: a tuple of 3 values, RGB.\n",
" * Shape dimensions: A tuple of values that define the shape size\n",
" and location. Differs per shape type.\n",
" \"\"\"\n",
" # Shape\n",
" shape = random.choice([\"square\", \"circle\", \"triangle\"])\n",
" # Color\n",
" color = tuple([random.randint(0, 255) for _ in range(3)])\n",
" # Center x, y\n",
" buffer = 20\n",
" y = random.randint(buffer, height - buffer - 1)\n",
" x = random.randint(buffer, width - buffer - 1)\n",
" # Size\n",
" s = random.randint(buffer, height//4)\n",
" return shape, color, (x, y, s)\n",
"\n",
" def random_image(self, height, width):\n",
" \"\"\"Creates random specifications of an image with multiple shapes.\n",
" Returns the background color of the image and a list of shape\n",
" specifications that can be used to draw the image.\n",
" \"\"\"\n",
" # Pick random background color\n",
" bg_color = np.array([random.randint(0, 255) for _ in range(3)])\n",
" # Generate a few random shapes and record their\n",
" # bounding boxes\n",
" shapes = []\n",
" boxes = []\n",
" N = random.randint(1, 4)\n",
" for _ in range(N):\n",
" shape, color, dims = self.random_shape(height, width)\n",
" shapes.append((shape, color, dims))\n",
" x, y, s = dims\n",
" boxes.append([y-s, x-s, y+s, x+s])\n",
" # Apply non-max suppression wit 0.3 threshold to avoid\n",
" # shapes covering each other\n",
" keep_ixs = utils.non_max_suppression(np.array(boxes), np.arange(N), 0.3)\n",
" shapes = [s for i, s in enumerate(shapes) if i in keep_ixs]\n",
" return bg_color, shapes"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Training dataset\n",
"dataset_train = ShapesDataset()\n",
"dataset_train.load_shapes(500, config.IMAGE_SHAPE[0], config.IMAGE_SHAPE[1])\n",
"dataset_train.prepare()\n",
"\n",
"# Validation dataset\n",
"dataset_val = ShapesDataset()\n",
"dataset_val.load_shapes(50, config.IMAGE_SHAPE[0], config.IMAGE_SHAPE[1])\n",
"dataset_val.prepare()"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAywAAACnCAYAAADzEdgbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADL9JREFUeJzt3XusZWddx+Hvb6iNCFEpEkRDRFQCjQbU0DIUe1HuluCF\ngopEgQQMvRAkxWAUY4vWUsGkN6PSEjES2xgBiS0ItFhohxawiUJEsdGIQopt00CUtrb+/GOvY3dP\n51xmOuesd2Y/TzKZvdfaZ513z6w5cz77fdc+1d0BAAAY0Z65BwAAALARwQIAAAxLsAAAAMMSLAAA\nwLAECwAAMCzBAgAADGslgqWqvquqPrxu2xcO4jhXVdVTp9svrKrbl/adX1Uv38Yxzqmqf62qv17a\n9rSq+kRVfayqPlJVT5i2f3dV/U1VXVtVH62q79jkuE+sqk9X1Ver6plL23+vqvZV1Q1V9StL299c\nVTdV1Ser6g0H+mcBsB1V9diquuAAHn/tZl/rAFg9KxEsk/U/cOZgfgDNx5OcMN1+ZpLPVNWx0/0T\npv1buSTJyeu2fSnJ87r75CRvT3LOtP11Sd7Z3ackeXeSMzc57peSPDvJn6/bfnF3753G9+Ipgh6Z\n5JXdfdy0/Zeq6uHbGDsrpqpW6WsEO6C7b+3us9dv3+Tc8sPBAHiAVfpmpLZ8QNWlVfXztfDBqnr6\nuodcn+RZ0+2nJvn9JM+qqqOTPLa7/22rz9Hdt2bdf8jd/ZXu/q/p7t1J/me6/bkkj5puH5PkK1V1\ndFV9vKqeNL1yeWNVfXN339Xdd65/nt19y/R7J7k3yX1Jvp7kP6rqEUm+Kcl/L31ODiNVdew0e/bR\nqvqrqnrKdE58oKquqKq3TI/7wtLH/FFVnTjd/mBVXTPNtB0/bfuNqnpXVb0vyWlVdeI0+3dNVV06\nyxPlsFJV5y2dl69Zm+Hez7l18jS7fE1VvX3tw5eO89vTjMv1VfXCOZ4LAPM7au4B7KIfrqprptsb\nxcsbklyTRZR8pLs/tW7/jUkuq6qjkvxvkuuSvCOLsLgpSarqGUnOy4NfJTynuz+22QCngHhrkldN\nmz6S5ENV9eokRyc5rrvvqapXJvnjJHcmOau7v7rZcadjvzzJLWtRVVVXJ/nHLP4s3trd9251DIb0\nvCSXd/c7q6qSvDfJmd19U1X94dLjNnrV+ie7++tV9eQsZv9+bNp+V3f/RJJU1d8mOam7v1ZV76iq\nF3b3VTv0fDjMVdULkjy+u5853X9ikpcsPWT53PqHJD/S3bdN5+/ycZ6X5Fu7+5RpBnhfEucdwApa\npWD5dHc/d+1OVf3T+gd0991V9a4k5yd53Ab7v5Lkp5Lc3N23V9W3ZxE4n5ge88kkpxzo4KYI+rMk\n53X356fN5yf51e5+f1W9LIsQOqO7/7mq/iXJo7r7xm0c+9lJfiHJqdP970vy00mekORhSa6rqvd1\n95cPdNzM7vIkv1ZVf5Lk75N8b5K10L4xyXdOt5e/GawkqapvTHJhVT0piwBfvm7ghukx35bFefL+\n6RvKRyT5fGBj35/k2qX7963bv3ZuPSbJbd19W/L/s8DJ/XH9A0lOnl5oqiTfUFXHdPcdOzZyVk5V\nnZ5FUH+hu18z93hYTc7Dra3ykrAHzbJU1eOSvDqLWY7zNjjO9UneNP2eLK4dOS3T9StV9YxpCcPy\nr2uq6uR1n3t52UMl+dMk7+3uD6z7fGsX9t+WaXlYVT0ni9i8rapetNlznZb5nJPkJd19z9L+r3b3\nvd19d5K7kjxyg+fL2O7p7rO7+xVJnpPk1iRrSxmXlzTeOS0hfFiSp03bnp/k3u4+KYvrpZb/TdyX\nJNM3k7ckObW7T5mue7ps554OR4DPJjlp6f76/2fWzq3/THLMFMVrXweT+8/DzyX5UHf/6HQd31PF\nCodad18yfW3zTSKzcR5ubZVmWDa96H76z/LyLJZYfaqq3lNVL+juq9d93MezWDr2yen+9Ule3N2f\nTbaeYZkq+meSPLkW7xT22iQ/lOQFSR5TVa9I8nfd/fokv5XkD6rq3iz+rl47vSp5bpLnZvGq+Ier\n6jNJvpbkL5I8JcmxVXVVd/9mkndOz/X9VdVJ3tjdN1fVp6pq3zSsa7v7gN81jSH8bFX9YhZ/x1/O\nIrYvq6rbsojcNRck+XAW30zeOm3bl+TN03l4wyaf45eTfGD6N3JfFuf/Zw/lk+DI0d1XT9em3JDF\n9XFXbvLw05P8ZVXdleTmJG/M9LV5Os7eqrp22vbFLGaKAVgxdf8sPHAkma5b+p7uPmfLBwMADGqV\nloQBAACHGTMsAADAsMywAAAAwxIsAADAsGZ7l7C9173MWrQVtu/EKzb64Z276uE/eIbzcIV9/eaL\nZz8PnYOrbYRzMHEerjrnISPY7Dw0wwIAAAxLsAAAAMMSLAAAwLAECwAAMCzBAgAADEuwAAAAwxIs\nAADAsAQLAAAwLMECAAAMS7AAAADDEiwAAMCwBAsAADAswQIAAAxLsAAAAMMSLAAAwLAECwAAMCzB\nAgAADEuwAAAAwxIsAADAsAQLAAAwLMECAAAM66i5B3BY6uSEn7tw7lHsrOpc/57Xzz0KtvCqX3/d\n3EPYcZefe+ncQwAAZiRYDlKl5h7CjuqeewRsR9WRfh46EQFg1VkSBgAADEuwAAAAwxIsAADAsAQL\nAAAwLMECAAAMS7AAAADDEiwAAMCwBAsAADAswQIAAAxLsAAAAMMSLAAAwLAECwAAMCzBAgAADEuw\nAAAAwxIsAADAsAQLAAAwLMECAAAMS7AAAADDEiwAAMCwBAsAADAswQIAAAxLsAAAAMMSLAAAwLAE\nCwAAMCzBAgAADEuwAAAAwxIsAADAsAQLAAAwLMECAAAMS7AAAADDEiwAAMCwBAsAADAswQIAAAxL\nsAAAAMMSLAAAwLAECwAAMCzBAgAADEuwAAAAwxIsAADAsAQLAAAwLMECAAAMS7DMonPWGV+cexCs\nuO7OZedcMvcwWHG333jh3EMAYHBHzT2AI1vnrDP+fb97qrJhtFx48eN3clCsmO7O5edeuuH+jaLl\n1W85faeGxAq646aL9ru9qva7r7vz6OPP2ulhAXAYECyH3AMjpWrjR260bzlkxAsHY6tI2Y7lkBEv\nHIzlEKlNvhjub99yyIgXgNUmWA6Z+0Nls0jZjuWPX4sX4cJ2HIpQ2Z+1eBEubMdaaGwWKdux9vFr\n8SJcAFaTa1gOiUWsVD30WFlv7ZiueWErOxUry1zzwlbuuOmiVNVDjpX1qip79uxxzQvACjLD8pAc\nulmVrSxHi9kWlu1GqCwz28L+HKpZla3s2bPHbAvAijHDctB2blZlI2ZbWG+3Y2WZ2RbW7NSsykbM\ntgCsFsFykNZiZQ6ihTVzxcoa0cJarMxBtACsBkvCDlQnV+47e0dipdN594m/s+3Hf8tJh34My358\nZw/PwDZ6C9o5/O6LxhkLD7RTsdLdOea4Mw/5cQE4PAmWA7EWK9nBVxMP4NAzTfCwAuZ6xZzDx5wz\nKwCsFkvCDtCOxgrAYUKsALBbBMt2TbMrAKtupCWDABz5BMt27MZSMIDDgKVgAOw2wbJNYgXAUjAA\ndp9g2YqlYABJLAUDYB6CZRvMrgCYXQFgHoIFAAAYlmDZjOVgAEksBwNgPoJlC5aDAVgOBsB8BAsA\nADAswQIAAAxLsAAAAMMSLAAAwLAEy0a8QxhAEu8QBsC8BMtGKnnZ3gvmHgXA7B59/FlzDwGAFSZY\nNtFzDwBgAN2+GgIwH8ECAAAMS7AAAADDEiwAAMCwBAsAADAswbKZSl66921zjwJgdsccd+bcQwBg\nRQmWLXhvHADvFAbAfAQLAAAwLMGyFcvCAJJYFgbAPATLNlgIAWBZGADzECzbYZYFIIlZFgB2n2DZ\nphYtAOlu0QLArhIsB8BiCABLwwDYXYLlQJhlAUhiaRgAu0ewHCBLwwAsDQNg9wiWgyBaAEQLALtD\nsBwk0QIgWgDYeYLlIeh
"text/plain": [
"<matplotlib.figure.Figure at 0x7ff54c52e358>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAywAAACnCAYAAADzEdgbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADBtJREFUeJzt3X/M9XVdx/HXG4lFuCaQmRXLrJyyGlQDFE2gVILZ7Ieu\nmrkyN2gJNHTWbM0WWow5cRO1qYCrVqvWUmKBBoKJ/LrR2AqXRawmSwcBMUxvIOjdH+d74HB539fN\nfcN1nc91zuOxMc75nnN9z+fc9/c+13mez+ecU90dAACAER207AEAAADsjWABAACGJVgAAIBhCRYA\nAGBYggUAABiWYAEAAIa1FsFSVd9TVVdt2Hb7Aezniqo6Zjp9elXdu3DZBVX1+iexj/Oq6j+q6u8W\nth1bVZ+tqk9X1dVV9bxp+/dW1d9X1bVV9amq+s5N9vv8qvpcVT1QVScubH9vVd1YVTdU1W8tbH97\nVe2qqpuq6tz9/bMAeDKq6jlV9e79uP61mz3WAbB+1iJYJhu/cOZAvoDmuiQvnU6fmOTzVXX0dP6l\n0+X78oEkJ2/Y9uUkp3b3yUnek+S8afuvJ7m4u09J8sdJzt5kv19O8ookf7Vh+/u7+yXT+F4zRdAz\nk7yxu4+ftv9aVR36JMbOmqmqdXqMYAt0913d/baN2zc5tnw5GABPsE5PRmqfV6j6YFX9Us18oqqO\n23CV65O8bDp9TJI/TPKyqjokyXO6+0v7uo3uvisbfiF3993d/bXp7ENJ/nc6/YUkh0+nj0hyd1Ud\nUlXXVdULplcub66qb+3uB7v7/o33s7vvmP7fSR5J8miS3Un+s6oOS/ItSb6+cJvsIFV19DR79qmq\n+tuqetF0TFxeVX9RVe+Yrnf7ws98pKpePp3+RFVdM820nTBt+92q+mhVfTzJ66rq5dPs3zVV9cGl\n3FF2lKo6f+G4PGM+w72HY+vkaXb5mqp6z/zHF/bzB9OMy/VVdfoy7gsAy3fwsgewjX60qq6ZTu8t\nXs5Nck1mUXJ1d9+y4fKbk1xSVQcn+b8kn0lyYWZhsStJqurFSc7PN75KeF53f3qzAU4B8a4kvzpt\nujrJJ6vqTUkOSXJ8dz9cVW9M8kdJ7k9yTnc/sNl+p32/Pskd86iqqiuT/Etmfxbv6u5H9rUPhnRq\nkku7++KqqiQfS3J2d++qqg8vXG9vr1r/THfvrqoXZjb79xPT9ge7+6eTpKr+IclJ3f3Vqrqwqk7v\n7iu26P6ww1XVaUmO6u4Tp/PPT/LahassHlv/nOTHuvue6fhd3M+pSZ7V3adMM8A3JnHcAayhdQqW\nz3X3q+ZnqupfN16hux+qqo8muSDJc/dy+d1JfjbJrd19b1V9R2aB89npOjclOWV/BzdF0J8nOb+7\nvzhtviDJb3f3ZVX185mF0Fnd/W9V9e9JDu/um5/Evl+R5JeTvHo6/wNJfi7J85I8I8lnqurj3f2V\n/R03S3dpkt+pqj9J8k9Jvj/JPLRvTvJd0+nFJ4OVJFX1zUneV1UvyCzAF983cMN0nW/L7Di5bHpC\neViSLwb27geTXLtw/tENl8+PrWcnuae770kemwVOHo/rH0py8vRCUyX5pqo6orvv27KRs3aq6s2Z\nBfXt3X3GssfDenIc7ts6Lwn7hlmWqnpukjdlNstx/l72c32S35z+n8zeO/K6TO9fqaoXT0sYFv+7\npqpO3nDbi8seKsmfJvlYd1++4fbmb+y/J9PysKp6ZWaxeU9V/dRm93Va5nNektd298MLlz/Q3Y90\n90NJHkzyzL3cX8b2cHe/rbvfkOSVSe5KMl/KuLik8f5pCeEzkhw7bfvJJI9090mZvV9q8d/Eo0ky\nPZm8I8mru/uU6X1Pl2zd3WEF3JbkpIXzG3/PzI+t/0pyxBTF88fB5PHj8AtJPtndPz69j+8YscLT\nrbs/MD22eZLI0jgO922dZlg2fdP99Mvy0syWWN1SVX9WVad195Ubfu66zJaO3TSdvz7Ja7r7tmTf\nMyxTRf9CkhfW7JPCzkzyI0lOS/LsqnpDkn/s7t9I8vtJPlRVj2T2d3Xm9KrkO5O8KrNXxa+qqs8n\n+WqSv07yoiRHV9UV3f17SS6e7utlVdVJ3trdt1bVLVV14zSsa7t7vz81jSH8YlX9SmZ/x1/JLLYv\nqap7MovcuXcnuSqzJ5N3TdtuTPL26Ti8YZPbeEuSy6d/I49mdvzf9nTeCVZHd185vTflhszeH/eX\nm1z9zUn+pqoeTHJrkrdmemye9vOSqrp22nZnZjPFAKyZenwWHlgl0/uWvq+7z9vnlQEABrVOS8IA\nAIAdxgwLAAAwLDMsAADAsAQLAAAwrKV9Sti5h1xoLdowOoce9eE9XlKV7G3V4O47zzzgW3zvw2/Z\n25d3bqtDf/gsx+Ea233r+5d+HDoGx3Lfrov2uL2qsqcl1N2dI08454Bvb4RjMHEcrjvHISPY7Dhc\np4815gmeGCm1yUPV3i479KgPPXb6qcQLwDItRkpt8mC4p8uq6rGff6rxAsCeCZa183iobBYpT8bi\nz8/jRbgAO8U8NDaLlCdj/vPzeBEuAE8vwbI2nr5Q2ZP5PoULMLqnK1T2pKqEC8DTzJvu18IsVqq2\nJlYWzW9jcbkYwCju23XRY1GxlaoqBx10UO69+X1bejsA60CwrLzHY2U7iRZgNPNY2U6iBeCpEywr\nbTmxMidagFEsI1bmRAvAUyNYVtZyY2VOtADLtsxYmRMtAAdOsKykMWJlTrQAyzJCrMyJFoADI1hW\nzlixMidagO02UqzMiRaA/SdYVsqYsTInWoDtMmKszIkWgP0jWFbMoL+fAbbVqLEyN/r4AEYiWFbG\n418MOTKzLMBWm38x5MjmXy4JwL4JlhWyU16w2ynjBHamnTJ7sVPGCbBsgmUl7IzZFYCtZtYCYPUI\nlhXhhToAsxYAq0iwAAAAwxIsO57lYACJ5WAAq0qwrAArIAAsBwNYVYIFAAAYlmABAACGJVgAAIBh\nCRYAAGBYgmVH8wlhAIlPCANYZYJlR6vsvvOMZQ8CYOmOPOGcZQ8BgC0iWADY8bp72UMAYIsIFgAA\nYFiCBQAAGJZgAQAAhiVYAACAYQmWHa/y9S/5pDCAI44/e9lDAGALCBYAVoJPCgNYTYIFAAAYlmBZ\nCZaFASSWhQGsIsECwMqwLAxg9QiWlbEzZlm6syPGCexcO2GWpbtz+HFnLXsYADuCYGEJatkDAFaY\nWRaA1SJYVsrYsyzdye47xx0fsDpGnmXp7qHHBzCag5c9gHXQSb7239+9bbf3P3nntt3WPnXn2499\nx0KsmF1Zpvt2XbTsISxFd+fIE85Z9jDYRqMtuaqq3LfrIrECcAAEy3ap9X2iLlbGUWt8HMIydbdY\nAThAloSx5cQKwNjL1ABGJljYBmIFAIADI1gAAIBhCRYAAGBYggUAABiWYAEAAIYlWAAAgGEJFgAA\nYFiCBQAAGJZgAQAAhiVYAACAYQkWAABgWIIFAAAYlmABAACGJVgAAIBhCRYAAGBYggUAABiWYAEA\nAIYlWAAAgGEJFgAAYFiCBQAAGJZgAQAAhiVYAACAYQkWAABgWIIFAAAYlmABAACGJVgAAIBhCRYA\nAGBYggUAABiWYAEAAIYlWAAAgGEJFgAAYFiCBQAAGJZgAQAAhiVYAACAYQkWAABgWIIFAAAYlmAB\nAACGJVgAAIBhCRYAAGBYggUAABiWYAEAAIYlWAAAgGEJFgAAYFiCBQAAGJZgAQAAhiVYAACAYQkW\nAABgWIIFAAAYlmABAACGJVgAAIBhCRYAAGBYggUAABiWYAEAAIYlWAAAgGEJFgAAYFiCBQAAGJZg\nAQAAhiVYAACAYQkWAABgWIIFAAAY1sHLHsA6qCSHPevOZQ8DcvhxZy17CAAA+0WwbJNa9gAAAGAH\nsiQMAAAYlmABAACGJVgAAIBhCRYAAGBYggUAABiWYAEAAIYlWAAAgGEJFgAAYFiCBQAAGJZgAQAA\nhiVYAACAYQkWAABgWII
"text/plain": [
"<matplotlib.figure.Figure at 0x7ff46d360fd0>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAywAAACnCAYAAADzEdgbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADfNJREFUeJzt3X2wbWVdB/DvD+9oilmAhpqVCpnQNJoK5Eu+jIbooPZm\nLwOWSoONiY6oJY7lhJYxmM2I0GRenCKZmpqUwdQU0cS3exGYkvIF7+TohMGAw+goSuDTH3tt2R7O\nueeet7ufvdfnw5y5e6+9Xp51zsNe+7t+z1q7WmsBAADo0SHzbgAAAMBaBBYAAKBbAgsAANAtgQUA\nAOiWwAIAAHRLYAEAALo1isBSVT9RVR9cMe26TaznvVX1iOHxM6vq5pnXzqmqUw5gHWdX1Zeq6gMz\n0x5ZVR+rqo9U1WVV9eBh+kOq6t+q6sNV9aGqeuB+1vvQqvp0VX29qh43M/0vquqTVfWJqvqDmeln\nVdXeqvpUVb18o78LgKr6oap63hqvvbmqjtim7dzlPRyA8RhFYBms/MKZzXwBzRVJHj88flySq6rq\n2OH544fX13N+kievmHZ9kqe31p6c5M+TnD1Mf3GSt7fWnpLkb5OcsZ/1Xp/kaUn+acX0t7bWHju0\n7zlDCLp3khe01o4fpv9uVd3zANrOyFTVmN4j2LgfTvJbKydW1SGttTNbazevssxm+dIwgJEa04eR\nWneGqguq6tSaeH9VHbdilo8necLw+BFJ/jLJE6rq7kmObK19eb1ttNZuyIoDb2vtxtbaN4en30ny\nf8Pj/0xy2PD48CQ3VtXdq+qKqnpYVR1ZVXuq6j6ttW+31m5ZuZ+ttX3Dvy3J7UnuSHJrkv+pqkOT\n3CvJt2a2yQKpqmOH6tmHqupfquqYoU9cWlX/UFV/NMx33cwyf11VTxwev7+qLh8qbScM015XVe+o\nqncneW5VPXGo/l1eVRfMZUfp1ZlJHjX0jb0z/ebXhsrwA6vqiKFyfPnw3nV0kgzzvq2q3jP04fsO\n08+sqiur6u+Gdf747Aar6kHDMpcN/XxbqjgA9GvXvBtwED26qi4fHq8VXl6e5PJMQsllrbUrV7y+\nJ8nuqtqV5LtJPprkzZkEi71JUlU/l+SNuevZwLNbax/ZXwOHAPGGJC8cJl2W5F+r6rQkd09yfGvt\ntqp6QZK/SXJLkpe21r6+v/UO6z4lyb5pqKqq9yX5fCa/ize01m5fbx106elJLmytvb2qKsm7kpzR\nWttbVW+bmW+ts9O/1Fq7taoenkn176nD9G+31n4xSarq6iRPaq19Yxjm88zW2nt3aH9YLG9Ockxr\n7cSqel2S+8/0m9OHeW5JclJr7faqOinJq5P8zvData2106vqrExCzj8mOTXJY5IcmmTfKts8N5P3\n071V9exhfa/aqR0EYP7GFFg+3Vo7cfqkqr6wcobW2neq6h1JzknygDVevzHJLye5prV2c1XdP5OA\n87Fhnk8lecpGGzeEoL9P8sbW2ueGyeckeU1r7ZKq+vVMgtBLWmtfrKr/TnJYa23PAaz7aUl+O8nJ\nw/OfTPIrSR6c5G5JPlpV726tfXWj7WbuLkzy2qq6KMlnkhydZBq09yT50eHxbEivJKmqH0jylqp6\nWCYBfPYaqU8M89w3k35yyRCIDk3yucDqPjHzeNrnDktyflUdmeQeSWZPsFw1/PvlJA9N8pAkn2mt\nfTfJN6rq86ts42eS/NmkO2ZXki9uX/MZm6r6vSS/muS61trp680PO0E/XN+YAsvKqspdqixV9YAk\np2VS5Xhjklessp6PJ/n9JGcNz69P8twkzx/WMa2wzGr5/gpLzW5/+CD4ziTvaq1dumLZ6RjwmzIM\nD6uqX8jkb3dTVT1rlWVm131CJtfEnNRau23m9a8PVZXbq+rbSe69yr7Sv9taa69KkuGi5BuSHJdJ\nxe+4TPpnktwyfGC8KckjM7km6qQkt7fWnlRVxyS5ZGa9dyRJa+2mqtqX5OTW2reG7dxt53eLBXFb\nvv84cscq85ya5OrW2jlV9YxMKtlTs5W/SvKlJD89XDt1aJKfWmV912ZyYuffk++d7IFNaa2dn0l1\nGeZGP1zfmN7o93vR/RAaLsxkiNWVVXVxVT2jtfa+FctdkckB91PD848neU5r7dpk/QrLkKJ/I8nD\na3KnsBcleVSSZyS5X03uuPMfrbWXJfmTJH9VVbdn8rd6UVXdL8nrk5yYyVnxD1bVVUm+keSfkxyT\n5Niqem9r7Y+TvH3Y10uqqiV5RWvtmmGM+CeHZn24tbbhu6bRhd+squdn8jf+aiZhe3dV3ZRJOJk6\nN8kHM/mwd8Mw7ZNJzhr64eyZ8ZXOTHLp8P/IHZn0/2u3cydYWP+b5NZhKNePJJl9H5m+x34gycVV\n9fNJPrvK63dOaO3Gqro4k+rgF5J8JZNQdI+Z2V6ZScXm3sM6Lkxy8fbsDgA9qsm12MCyGa5bOqq1\ndva6M0MnqmrXcL3LDya5OsnDmgMVwKiNqcICQP9eXVVPTXKfJK8VVgBQYQEAALo1pu9hAQAAFozA\nAgAAdGtu17C85pSTjUUbsT9953vW+vLOg+qeP/sS/XDEbr3mrXPvh/rguPXQBxP9cOz0Q3qwv36o\nwgIAAHRLYAEAALolsAAAAN0SWAAAgG4JLAAAQLcEFgAAoFsCCwAA0C2BBQAA6JbAAgAAdEtgAQAA\nuiWwAAAA3RJYAACAbgksAABAtwQWAACgWwILAADQLYEFAADolsACAAB0a9e8NnzMV86Y16bn5rM/\ndt68m8AKL/zDF8+7CQfdha+/YN5NAAA4YHMLLJWa16bnoqXNuwmsompk/bDphwDAYjEkDAAA6JbA\nAgAAdEtgAQAAuiWwAAAA3RJYAACAbgksAABAtwQWAACgWwILAADQLYEFAADo1ty+6X7xtDz62c/b\n0hoes00t2Q4XXXP8vJvAJp178sO3tPybnnXeNrVka1prOeKEl867GQBA5wSWDaiadwu2R2vzbgFb\nUcvSEQEADoAhYQAAQLcEFgAAoFsCCwAA0C2BBQAA6JbAAgAAdEtgAQAAuiWwAAAA3RJYAACAbgks\nAABAtwQWAACgWwILAADQLYEFAADolsACAAB0S2ABAAC6JbAAAADdElgAAIBuCSwAAEC3BBYAAKBb\nAgsAANAtgQUAAOiWwAIAAHRLYAEAALolsAAAAN0SWAAAgG4JLAAAQLcEFgAAoFsCCwAA0C2BBQAA\n6JbAAgAAdEtgAQAAuiWwAAAA3RJYAACAbgksAABAtwQWAACgWwILAADQLYEFAADolsACAAB0S2AB\nAAC6JbAAAADdElgAAIBuCSwAAEC3BBYAAKBbAgsAANAtgQUAAOiWwAIAAHRLYAEAALolsAAAAN0S\nWAAAgG4JLAAAQLd2zbsBi6Py6Usu2sLyLY89/GWb2u7uQ4/awnZZNq+89LObX7i17H79BdvXGNiE\nr+09b8PLtNZyxAkv3YHWANA7FZYNqS39bO6/5LRv7tvytu/6wxi1eTcAklTVhn8OOeSQ3LznLfNu\nOgBzILAAsBCqnGwBGKO5DQn7rwct95my0775xW1bVw3r233o0du2TiZ2n33+vJsAS20zw7/WUlW5\nec9bDA0DGJn5XcOyrCfKWstp39oXJwKBsfva3vO2vSqiygIwPoaE7YCdOJxOqywAi2InwsW0ygLA\neAgs22morgCM3XYOBVtJlQVgXASWbbaTh1FVFmBR7GSoUGUBGBeBZbscpOqK0AL0bierK1Nucwww\nHgLLNjJIAeDgDdkyNAxgHASW7XCQr11RZQF6dTCqK1OGhgGMg8CyVdPbGM+7HQBzthO3MV6PKgvA\n8hNYtsE8DpeqLEBv5hEeVFkAlp/AshVuYwyQ5OAOBVtJlQVguQksWzTPw6QqC9CLeYYGVRaA5Saw\nbJbqCkCS+VZXplRZAJaXwLIZHV1or8oCzNM8LrRfjSoLwPISWDZp/ofnOwktwLz0EFamfJkkwHIS\nWDbKUDCAJH0MBVuppwA
"text/plain": [
"<matplotlib.figure.Figure at 0x7ff46d445a90>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAywAAACnCAYAAADzEdgbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADmNJREFUeJzt3XuMrHddx/HPt1QiQowtEkRjRFRSiIYWQ8tNaG0LltTg\nBeIFiVIMl9LWIBbFCMaCViBgYqGg9BI1EjVGwCIFekOgt8PlGIWAItFohBTbhkCUgi1f/9hnT4ft\n2d1z9ja/2Xm9mk1nZmdnf3P6nO2+5/s8z1R3BwAAYETHzHsBAAAA6xEsAADAsAQLAAAwLMECAAAM\nS7AAAADDEiwAAMCwliJYqup7quqaNbd9ZguP856qesx0+RlVdcfM515bVc85gse4qKr+vareP3Pb\niVX14ar6QFVdW1UPn27/3qr6+6q6oaquq6rv3OBxH1FVH62qL1XVE2du/4OqurmqbqqqX5+5/RVV\ndaCqbqmqlx7tnwXjq6qHVtXrj+L+N2y0jQEAzMNSBMtk7RvObOUNaD6U5EnT5Scm+VhVPXq6/qTp\n85t5c5JT19z2uSRP7+5Tk7whyUXT7ecmuay7T0vyp0nO3+BxP5fkjCR/veb2N3X3E6b1PXOKoAcl\neV53nzzd/qKqesARrJ0F0t23dfeFa2+vqvX+3ntTJoa0wTYLwBJYpv8J1KZ3qLq0qn6hVry3qh63\n5i43JnnydPkxSd6S5MlVdf8kD+3u/9jse3T3bVnzi2F3f6G7/2e6+tUk/zdd/mSS46bLxyf5QlXd\nv6o+VFWPnF5Bv7WqvrW77+ruL659nt392enfneTuJPck+UqS/6qqByb5liT/O/M9WWBVdfE0Tbuu\nql6wOlmsqt+uqiur6p1Jnl1Vp05Tveur6g2rXz7zOL83TVxurKpnzOO5sDiq6tEz293fVdWjpp9N\nV1XVX1bVq6b7fWbma95WVU+ZLr932hZvqapTptvWbrNPmabQ11fVpXN5ogDMxbHzXsAe+uGqun66\nvF68vDTJ9VmJkmu7+yNrPn9rksur6tgkX0/ywSRvzEpYHEiSqnp8kotz31erL+ruD2y0wCkgXpPk\nnOmma5O8r6qen+T+SU7u7q9V1fOS/EmSLya5oLu/tNHjTo/9nCSfXY2qqro6yT9n5c/iNd1992aP\nwdiq6qwk393dT5yuPyLJs2bucld3/8T0uU8l+ZHuvr2qas3jPD3Jt3X3adPk7eYk79mTJ8GienqS\nK7r7sml7ekeS87v7QFX98cz91pvi/WR3f6WqTsjKFPr06fbZbfbjSZ7a3V+uqjdW1TO623YJsASW\nKVg+2t1PW71SVf+y9g7d/dWqujLJa5M8bJ3PfyHJTyU52N13VNV3ZCVwPjzd55Ykpx3t4qYI+osk\nF3f3p6ebX5vkN7v7XVX1M1kJofO6+1+r6t+SHNfdtx7BY5+R5BeTnD1d/4EkP53k4Unul+SDVfXO\n7v780a6bofxgkhtmrt+z5vM3JUlVPSTJ7d19e3Jo+pbc+8vkDyU5dQr8SvJNVXV8d9+5aytn0V2R\n5Leq6s+S/FOS70+y+oLPrUm+a7o8G8eVJFX1zUn+sKoemZUXgmaPo1rdZr89Kz+v3jUF0QOTfDqw\nTVX1kqy8sPOZ7n7BvNfDcrIdbm6Zdwm7z5Slqh6W5PlZmXJcvM7j3Jjk5dO/k5VjR56d6fiVqnr8\ntCvN7Mf1VXXqmu89u/tNJfnzJO/o7qvWfL/VA/tvz7R7WFWdmZXYvL2qfnyj5zrtXnFRkmd199dm\nPv+l7r67u7+a5K4kD1rn+bI4PpHkqTPX1/79vidJuvu/kxw//RK4uv0l9243n0zyvu7+0en4qceI\nFTbxte6+sLufm+TMJLclWd2ldnbX2i9Ou7LeL8mJ020/luTu7n5qVo7bm/3ZvLrN3p7ks0nO7u7T\npuPvLt+9p8Oy6O43T9uUXxKZG9vh5pZpwrLhQffTL21XZGUXq49U1dur6qzuvnrN130oK7uO3TJd\nvzHJM7v7E8nmE5apon82yQm1cqawFyZ5bJKzkjykqp6b5B+7+1eS/G6SP6qqu7Py3+qF06vjr07y\ntKy8GnlNVX0syZeT/E2SRyV5dFW9p7t/J8ll03N9V1V1kpd198Gq+khV3Twt64buPuqzpjGW7r56\nOjblpqwcl/RXG9z9JUn+tqruSnIwycsy/Z2YHucJVXXDdNt/ZmVCB+v5uar6paxsL5/Pyos+l1fV\n7Vl5sWXV65Nck5W4vm267eYkr5h+Ht60wff41SRXTT+r78nKz+FP7OSTAGBMde/eIACws6bj576v\nuy/a9M4AcBjLtEsYAACwYExYAACAYZmwAAAAwxIsAADAsOZ2lrCzLnm7fdGW2NXn//x6b965px5w\n0nm2wyX2lYNvmvt2aBtcbiNsg4ntcNnZDhnBRtuhCQsAADCsZXofliPTnV9+/e/PexXbdtnLXzHv\nJQAL7s4Dl8x7CdvS3XnwKRfMexkAbJNgOYwh5qLbYJ4K7ISV92gEgPmySxgAADAswQIAAAxLsAAA\nAMMSLAAAwLAECwAAMCzBAgAADEuwAAAAwxIsAADAsAQLAAAwLMECAAAMS7AAAADDEiwAAMCwBAsA\nADAswQIAAAxLsAAAAMMSLAAAwLAECwAAMCzBAgAADEuwAAAAwxIsAADAsAQLAAAwLMECAAAMS7AA\nAADDEiwAAMCwBAsAADAswQIAAAxLsAAAAMMSLAAAwLAECwAAMCzBAgAADEuwAAAAwxIsAADAsAQL\nAAAwLMECAAAMS7AAAADDEiwAAMCwBAsAADAswQIAAAxLsAAAAMMSLAAAwLAECwAAMCzBAgAADEuw\nAAAAwxIsAADAsAQLAAAwLMECAAAMS7AAAADDOnbeCxhOVd524W8c5Rd1zjjpxbuynCNfQXLdwbfO\ndQ3M350HLpnr9+/uPPiUC+a6BnbOcY87b95LAAATlsOqOoqP5PSTXpxU5vpRh9YxXWHp3HngklTV\nXD+OOeaY3HHrH877jwIA2EcEyw4Ypw965lLn6yddMce1sNdqkA1x7TrOeeW5c1oJALAf2CVsWzqn\nn/iieS/ikKrk9BNfmGvqlOmGbBgtxxw8Z49Wxm6b965gs6oqd9x6SS5896cOXd8oWq549aV7tTQA\nYAEJlm0a5EXte63uJjZ7fR2zMSNeFtso05VVK3sm1sz19dc3GzPiBQBYyy5hWzbWdGVVJTmzbz3y\nO08fXz/pCruQLaiRpiurqiqvO/uEI77v6sc5rzzXLmQAwDcwYdmGwV7U3p7puaxGi4nL4hhturJq\nK6tafS6r0WLiAgCYsGzJmNOVVUc1ZTncF29y7AvjGHG6supopiyH+9rNjn0BAJaDYNmiQV/U3jl2\nE1sIo05XVm13dXYTAwAEC+szbWEApi0AsNwEC5sTLQxAtADAchIsHBnRwgBECwAsH8HCkRMtDEC0\nAMByESxHbewzhK3a1pnCNnlg0TKGkc8Qtmo7Zwrb7HFFCwAsB8Fy1CrX/cNb5r2ITXWSa3LyvJfB\nLnrwKRfMewmb6u68/N2fnvcyAIAFJli2ZOxTyR6yW6e8NWUZQnfPewlzZcoCAMtBsLA1ooUBiBYA\n2P8ECwAAMCzBwtaZsjAAUxYA2N8ECwAAMCzBsiWVaw+Oe6awPT1DmCnLXB1/8vnzXsK6ujsX7tEZ\nwkxZAGD/EixbNviZwnbrDGEMZdnPFAYA7H+CZcvGnLJ4/5XlM+KUZS+nKwDA/iZYtmXQKcZeT1fs\nFjZXpiwr7BYGAPuTYNmWsaYspivLa6Qpi+kKALCTBMu2DTZlcezKUjJlAQD2K8GybWNMWUxXGGHK\nYroCAOw0wbIjjplrtByKFdOVpdbdc40WsQIA7AbBsmPmEy1ihVnzihaxAgDsFsGyo/Y2WsQKh7PX\n0SJWAIDdJFh23N5Ey3Cx4tTGQ9mraBktVpzaGAD2n2PnvYD96Zhce/CtSTpnnPTiHX3kbzi4fpRY\nSZJO6uDz5r0KZnR3jnv
"text/plain": [
"<matplotlib.figure.Figure at 0x7ff46d208358>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Load and display random samples\n",
"image_ids = np.random.choice(dataset_train.image_ids, 4)\n",
"for image_id in image_ids:\n",
" image = dataset_train.load_image(image_id)\n",
" mask, class_ids = dataset_train.load_mask(image_id)\n",
" visualize.display_top_masks(image, mask, class_ids, dataset_train.class_names)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Create Model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Create model in training mode\n",
"model = modellib.MaskRCNN(mode=\"training\", config=config,\n",
" model_dir=MODEL_DIR)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": true,
"scrolled": false
},
"outputs": [],
"source": [
"# Which weights to start with?\n",
"init_with = \"coco\" # imagenet, coco, or last\n",
"\n",
"if init_with == \"imagenet\":\n",
" model.load_weights(model.get_imagenet_weights(), by_name=True)\n",
"elif init_with == \"coco\":\n",
" # Load weights trained on MS COCO, but skip layers that\n",
" # are different due to the different number of classes\n",
" # See README for instructions to download the COCO weights\n",
" model.load_weights(COCO_MODEL_PATH, by_name=True,\n",
" exclude=[\"mrcnn_class_logits\", \"mrcnn_bbox_fc\", \n",
" \"mrcnn_bbox\", \"mrcnn_mask\"])\n",
"elif init_with == \"last\":\n",
" # Load the last model you trained and continue training\n",
" model.load_weights(model.find_last(), by_name=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Training\n",
"\n",
"Train in two stages:\n",
"1. Only the heads. Here we're freezing all the backbone layers and training only the randomly initialized layers (i.e. the ones that we didn't use pre-trained weights from MS COCO). To train only the head layers, pass `layers='heads'` to the `train()` function.\n",
"\n",
"2. Fine-tune all layers. For this simple example it's not necessary, but we're including it to show the process. Simply pass `layers=\"all` to train all layers."
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"scrolled": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Checkpoint Path: /deepmatter/mask_rcnn/logs/shapes2017102802/mask_rcnn_{epoch:04d}.h5\n",
"Starting at epoch 0. LR=0.002\n",
"\n",
"Selecting layers to train\n",
"fpn_c5p5 (Conv2D)\n",
"fpn_c4p4 (Conv2D)\n",
"fpn_c3p3 (Conv2D)\n",
"fpn_c2p2 (Conv2D)\n",
"fpn_p5 (Conv2D)\n",
"fpn_p2 (Conv2D)\n",
"fpn_p3 (Conv2D)\n",
"fpn_p4 (Conv2D)\n",
"In model: rpn_model\n",
" rpn_conv_shared (Conv2D)\n",
" rpn_class_raw (Conv2D)\n",
" rpn_bbox_pred (Conv2D)\n",
"mrcnn_mask_conv1 (TimeDistributed)\n",
"mrcnn_mask_bn1 (TimeDistributed)\n",
"mrcnn_mask_conv2 (TimeDistributed)\n",
"mrcnn_mask_bn2 (TimeDistributed)\n",
"mrcnn_class_conv1 (TimeDistributed)\n",
"mrcnn_class_bn1 (TimeDistributed)\n",
"mrcnn_mask_conv3 (TimeDistributed)\n",
"mrcnn_mask_bn3 (TimeDistributed)\n",
"mrcnn_class_conv2 (TimeDistributed)\n",
"mrcnn_class_bn2 (TimeDistributed)\n",
"mrcnn_mask_conv4 (TimeDistributed)\n",
"mrcnn_mask_bn4 (TimeDistributed)\n",
"mrcnn_bbox_fc (TimeDistributed)\n",
"mrcnn_mask_deconv (TimeDistributed)\n",
"mrcnn_class_logits (TimeDistributed)\n",
"mrcnn_mask (TimeDistributed)\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/usr/local/lib/python3.5/dist-packages/tensorflow/python/ops/gradients_impl.py:95: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n",
" \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n",
"/usr/local/lib/python3.5/dist-packages/keras/engine/training.py:1987: UserWarning: Using a generator with `use_multiprocessing=True` and multiple workers may duplicate your data. Please consider using the`keras.utils.Sequence class.\n",
" UserWarning('Using a generator with `use_multiprocessing=True`'\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/1\n",
"100/100 [==============================] - 73s - loss: 2.2164 - rpn_class_loss: 0.0242 - rpn_bbox_loss: 1.0638 - mrcnn_class_loss: 0.2426 - mrcnn_bbox_loss: 0.3006 - mrcnn_mask_loss: 0.2385 - val_loss: 1.8454 - val_rpn_class_loss: 0.0232 - val_rpn_bbox_loss: 0.9971 - val_mrcnn_class_loss: 0.1398 - val_mrcnn_bbox_loss: 0.1343 - val_mrcnn_mask_loss: 0.2042\n"
]
}
],
"source": [
"# Train the head branches\n",
"# Passing layers=\"heads\" freezes all layers except the head\n",
"# layers. You can also pass a regular expression to select\n",
"# which layers to train by name pattern.\n",
"model.train(dataset_train, dataset_val, \n",
" learning_rate=config.LEARNING_RATE, \n",
" epochs=1, \n",
" layers='heads')"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"scrolled": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Checkpoint Path: /deepmatter/mask_rcnn/logs/shapes2017102802/mask_rcnn_{epoch:04d}.h5\n",
"Starting at epoch 0. LR=0.0002\n",
"\n",
"Selecting layers to train\n",
"conv1 (Conv2D)\n",
"bn_conv1 (BatchNorm)\n",
"res2a_branch2a (Conv2D)\n",
"bn2a_branch2a (BatchNorm)\n",
"res2a_branch2b (Conv2D)\n",
"bn2a_branch2b (BatchNorm)\n",
"res2a_branch2c (Conv2D)\n",
"res2a_branch1 (Conv2D)\n",
"bn2a_branch2c (BatchNorm)\n",
"bn2a_branch1 (BatchNorm)\n",
"res2b_branch2a (Conv2D)\n",
"bn2b_branch2a (BatchNorm)\n",
"res2b_branch2b (Conv2D)\n",
"bn2b_branch2b (BatchNorm)\n",
"res2b_branch2c (Conv2D)\n",
"bn2b_branch2c (BatchNorm)\n",
"res2c_branch2a (Conv2D)\n",
"bn2c_branch2a (BatchNorm)\n",
"res2c_branch2b (Conv2D)\n",
"bn2c_branch2b (BatchNorm)\n",
"res2c_branch2c (Conv2D)\n",
"bn2c_branch2c (BatchNorm)\n",
"res3a_branch2a (Conv2D)\n",
"bn3a_branch2a (BatchNorm)\n",
"res3a_branch2b (Conv2D)\n",
"bn3a_branch2b (BatchNorm)\n",
"res3a_branch2c (Conv2D)\n",
"res3a_branch1 (Conv2D)\n",
"bn3a_branch2c (BatchNorm)\n",
"bn3a_branch1 (BatchNorm)\n",
"res3b_branch2a (Conv2D)\n",
"bn3b_branch2a (BatchNorm)\n",
"res3b_branch2b (Conv2D)\n",
"bn3b_branch2b (BatchNorm)\n",
"res3b_branch2c (Conv2D)\n",
"bn3b_branch2c (BatchNorm)\n",
"res3c_branch2a (Conv2D)\n",
"bn3c_branch2a (BatchNorm)\n",
"res3c_branch2b (Conv2D)\n",
"bn3c_branch2b (BatchNorm)\n",
"res3c_branch2c (Conv2D)\n",
"bn3c_branch2c (BatchNorm)\n",
"res3d_branch2a (Conv2D)\n",
"bn3d_branch2a (BatchNorm)\n",
"res3d_branch2b (Conv2D)\n",
"bn3d_branch2b (BatchNorm)\n",
"res3d_branch2c (Conv2D)\n",
"bn3d_branch2c (BatchNorm)\n",
"res4a_branch2a (Conv2D)\n",
"bn4a_branch2a (BatchNorm)\n",
"res4a_branch2b (Conv2D)\n",
"bn4a_branch2b (BatchNorm)\n",
"res4a_branch2c (Conv2D)\n",
"res4a_branch1 (Conv2D)\n",
"bn4a_branch2c (BatchNorm)\n",
"bn4a_branch1 (BatchNorm)\n",
"res4b_branch2a (Conv2D)\n",
"bn4b_branch2a (BatchNorm)\n",
"res4b_branch2b (Conv2D)\n",
"bn4b_branch2b (BatchNorm)\n",
"res4b_branch2c (Conv2D)\n",
"bn4b_branch2c (BatchNorm)\n",
"res4c_branch2a (Conv2D)\n",
"bn4c_branch2a (BatchNorm)\n",
"res4c_branch2b (Conv2D)\n",
"bn4c_branch2b (BatchNorm)\n",
"res4c_branch2c (Conv2D)\n",
"bn4c_branch2c (BatchNorm)\n",
"res4d_branch2a (Conv2D)\n",
"bn4d_branch2a (BatchNorm)\n",
"res4d_branch2b (Conv2D)\n",
"bn4d_branch2b (BatchNorm)\n",
"res4d_branch2c (Conv2D)\n",
"bn4d_branch2c (BatchNorm)\n",
"res4e_branch2a (Conv2D)\n",
"bn4e_branch2a (BatchNorm)\n",
"res4e_branch2b (Conv2D)\n",
"bn4e_branch2b (BatchNorm)\n",
"res4e_branch2c (Conv2D)\n",
"bn4e_branch2c (BatchNorm)\n",
"res4f_branch2a (Conv2D)\n",
"bn4f_branch2a (BatchNorm)\n",
"res4f_branch2b (Conv2D)\n",
"bn4f_branch2b (BatchNorm)\n",
"res4f_branch2c (Conv2D)\n",
"bn4f_branch2c (BatchNorm)\n",
"res4g_branch2a (Conv2D)\n",
"bn4g_branch2a (BatchNorm)\n",
"res4g_branch2b (Conv2D)\n",
"bn4g_branch2b (BatchNorm)\n",
"res4g_branch2c (Conv2D)\n",
"bn4g_branch2c (BatchNorm)\n",
"res4h_branch2a (Conv2D)\n",
"bn4h_branch2a (BatchNorm)\n",
"res4h_branch2b (Conv2D)\n",
"bn4h_branch2b (BatchNorm)\n",
"res4h_branch2c (Conv2D)\n",
"bn4h_branch2c (BatchNorm)\n",
"res4i_branch2a (Conv2D)\n",
"bn4i_branch2a (BatchNorm)\n",
"res4i_branch2b (Conv2D)\n",
"bn4i_branch2b (BatchNorm)\n",
"res4i_branch2c (Conv2D)\n",
"bn4i_branch2c (BatchNorm)\n",
"res4j_branch2a (Conv2D)\n",
"bn4j_branch2a (BatchNorm)\n",
"res4j_branch2b (Conv2D)\n",
"bn4j_branch2b (BatchNorm)\n",
"res4j_branch2c (Conv2D)\n",
"bn4j_branch2c (BatchNorm)\n",
"res4k_branch2a (Conv2D)\n",
"bn4k_branch2a (BatchNorm)\n",
"res4k_branch2b (Conv2D)\n",
"bn4k_branch2b (BatchNorm)\n",
"res4k_branch2c (Conv2D)\n",
"bn4k_branch2c (BatchNorm)\n",
"res4l_branch2a (Conv2D)\n",
"bn4l_branch2a (BatchNorm)\n",
"res4l_branch2b (Conv2D)\n",
"bn4l_branch2b (BatchNorm)\n",
"res4l_branch2c (Conv2D)\n",
"bn4l_branch2c (BatchNorm)\n",
"res4m_branch2a (Conv2D)\n",
"bn4m_branch2a (BatchNorm)\n",
"res4m_branch2b (Conv2D)\n",
"bn4m_branch2b (BatchNorm)\n",
"res4m_branch2c (Conv2D)\n",
"bn4m_branch2c (BatchNorm)\n",
"res4n_branch2a (Conv2D)\n",
"bn4n_branch2a (BatchNorm)\n",
"res4n_branch2b (Conv2D)\n",
"bn4n_branch2b (BatchNorm)\n",
"res4n_branch2c (Conv2D)\n",
"bn4n_branch2c (BatchNorm)\n",
"res4o_branch2a (Conv2D)\n",
"bn4o_branch2a (BatchNorm)\n",
"res4o_branch2b (Conv2D)\n",
"bn4o_branch2b (BatchNorm)\n",
"res4o_branch2c (Conv2D)\n",
"bn4o_branch2c (BatchNorm)\n",
"res4p_branch2a (Conv2D)\n",
"bn4p_branch2a (BatchNorm)\n",
"res4p_branch2b (Conv2D)\n",
"bn4p_branch2b (BatchNorm)\n",
"res4p_branch2c (Conv2D)\n",
"bn4p_branch2c (BatchNorm)\n",
"res4q_branch2a (Conv2D)\n",
"bn4q_branch2a (BatchNorm)\n",
"res4q_branch2b (Conv2D)\n",
"bn4q_branch2b (BatchNorm)\n",
"res4q_branch2c (Conv2D)\n",
"bn4q_branch2c (BatchNorm)\n",
"res4r_branch2a (Conv2D)\n",
"bn4r_branch2a (BatchNorm)\n",
"res4r_branch2b (Conv2D)\n",
"bn4r_branch2b (BatchNorm)\n",
"res4r_branch2c (Conv2D)\n",
"bn4r_branch2c (BatchNorm)\n",
"res4s_branch2a (Conv2D)\n",
"bn4s_branch2a (BatchNorm)\n",
"res4s_branch2b (Conv2D)\n",
"bn4s_branch2b (BatchNorm)\n",
"res4s_branch2c (Conv2D)\n",
"bn4s_branch2c (BatchNorm)\n",
"res4t_branch2a (Conv2D)\n",
"bn4t_branch2a (BatchNorm)\n",
"res4t_branch2b (Conv2D)\n",
"bn4t_branch2b (BatchNorm)\n",
"res4t_branch2c (Conv2D)\n",
"bn4t_branch2c (BatchNorm)\n",
"res4u_branch2a (Conv2D)\n",
"bn4u_branch2a (BatchNorm)\n",
"res4u_branch2b (Conv2D)\n",
"bn4u_branch2b (BatchNorm)\n",
"res4u_branch2c (Conv2D)\n",
"bn4u_branch2c (BatchNorm)\n",
"res4v_branch2a (Conv2D)\n",
"bn4v_branch2a (BatchNorm)\n",
"res4v_branch2b (Conv2D)\n",
"bn4v_branch2b (BatchNorm)\n",
"res4v_branch2c (Conv2D)\n",
"bn4v_branch2c (BatchNorm)\n",
"res4w_branch2a (Conv2D)\n",
"bn4w_branch2a (BatchNorm)\n",
"res4w_branch2b (Conv2D)\n",
"bn4w_branch2b (BatchNorm)\n",
"res4w_branch2c (Conv2D)\n",
"bn4w_branch2c (BatchNorm)\n",
"res5a_branch2a (Conv2D)\n",
"bn5a_branch2a (BatchNorm)\n",
"res5a_branch2b (Conv2D)\n",
"bn5a_branch2b (BatchNorm)\n",
"res5a_branch2c (Conv2D)\n",
"res5a_branch1 (Conv2D)\n",
"bn5a_branch2c (BatchNorm)\n",
"bn5a_branch1 (BatchNorm)\n",
"res5b_branch2a (Conv2D)\n",
"bn5b_branch2a (BatchNorm)\n",
"res5b_branch2b (Conv2D)\n",
"bn5b_branch2b (BatchNorm)\n",
"res5b_branch2c (Conv2D)\n",
"bn5b_branch2c (BatchNorm)\n",
"res5c_branch2a (Conv2D)\n",
"bn5c_branch2a (BatchNorm)\n",
"res5c_branch2b (Conv2D)\n",
"bn5c_branch2b (BatchNorm)\n",
"res5c_branch2c (Conv2D)\n",
"bn5c_branch2c (BatchNorm)\n",
"fpn_c5p5 (Conv2D)\n",
"fpn_c4p4 (Conv2D)\n",
"fpn_c3p3 (Conv2D)\n",
"fpn_c2p2 (Conv2D)\n",
"fpn_p5 (Conv2D)\n",
"fpn_p2 (Conv2D)\n",
"fpn_p3 (Conv2D)\n",
"fpn_p4 (Conv2D)\n",
"In model: rpn_model\n",
" rpn_conv_shared (Conv2D)\n",
" rpn_class_raw (Conv2D)\n",
" rpn_bbox_pred (Conv2D)\n",
"mrcnn_mask_conv1 (TimeDistributed)\n",
"mrcnn_mask_bn1 (TimeDistributed)\n",
"mrcnn_mask_conv2 (TimeDistributed)\n",
"mrcnn_mask_bn2 (TimeDistributed)\n",
"mrcnn_class_conv1 (TimeDistributed)\n",
"mrcnn_class_bn1 (TimeDistributed)\n",
"mrcnn_mask_conv3 (TimeDistributed)\n",
"mrcnn_mask_bn3 (TimeDistributed)\n",
"mrcnn_class_conv2 (TimeDistributed)\n",
"mrcnn_class_bn2 (TimeDistributed)\n",
"mrcnn_mask_conv4 (TimeDistributed)\n",
"mrcnn_mask_bn4 (TimeDistributed)\n",
"mrcnn_bbox_fc (TimeDistributed)\n",
"mrcnn_mask_deconv (TimeDistributed)\n",
"mrcnn_class_logits (TimeDistributed)\n",
"mrcnn_mask (TimeDistributed)\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/usr/local/lib/python3.5/dist-packages/tensorflow/python/ops/gradients_impl.py:95: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n",
" \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n",
"/usr/local/lib/python3.5/dist-packages/keras/engine/training.py:1987: UserWarning: Using a generator with `use_multiprocessing=True` and multiple workers may duplicate your data. Please consider using the`keras.utils.Sequence class.\n",
" UserWarning('Using a generator with `use_multiprocessing=True`'\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/1\n",
"100/100 [==============================] - 86s - loss: 11.4006 - rpn_class_loss: 0.0184 - rpn_bbox_loss: 0.8409 - mrcnn_class_loss: 0.1576 - mrcnn_bbox_loss: 0.0902 - mrcnn_mask_loss: 0.1977 - val_loss: 11.4376 - val_rpn_class_loss: 0.0220 - val_rpn_bbox_loss: 1.0068 - val_mrcnn_class_loss: 0.1172 - val_mrcnn_bbox_loss: 0.0683 - val_mrcnn_mask_loss: 0.1278\n"
]
}
],
"source": [
"# Fine tune all layers\n",
"# Passing layers=\"all\" trains all layers. You can also \n",
"# pass a regular expression to select which layers to\n",
"# train by name pattern.\n",
"model.train(dataset_train, dataset_val, \n",
" learning_rate=config.LEARNING_RATE / 10,\n",
" epochs=2, \n",
" layers=\"all\")"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Save weights\n",
"# Typically not needed because callbacks save after every epoch\n",
"# Uncomment to save manually\n",
"# model_path = os.path.join(MODEL_DIR, \"mask_rcnn_shapes.h5\")\n",
"# model.keras_model.save_weights(model_path)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Detection"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"class InferenceConfig(ShapesConfig):\n",
" GPU_COUNT = 1\n",
" IMAGES_PER_GPU = 1\n",
"\n",
"inference_config = InferenceConfig()\n",
"\n",
"# Recreate the model in inference mode\n",
"model = modellib.MaskRCNN(mode=\"inference\", \n",
" config=inference_config,\n",
" model_dir=MODEL_DIR)\n",
"\n",
"# Get path to saved weights\n",
"# Either set a specific path or find last trained weights\n",
"# model_path = os.path.join(ROOT_DIR, \".h5 file name here\")\n",
"model_path = model.find_last()\n",
"\n",
"# Load trained weights\n",
"print(\"Loading weights from \", model_path)\n",
"model.load_weights(model_path, by_name=True)"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"original_image shape: (128, 128, 3) min: 108.00000 max: 236.00000\n",
"image_meta shape: (12,) min: 0.00000 max: 128.00000\n",
"gt_bbox shape: (2, 5) min: 2.00000 max: 102.00000\n",
"gt_mask shape: (128, 128, 2) min: 0.00000 max: 1.00000\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAeMAAAHaCAYAAAAzAiFdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd0nPd95/vPM33QCwGCIAmwgE2d6pJNilTvvVm+G9vy\ntRPnJutcRyebXFuK4+TEZzeK4rNJ7t21N9r1buyVZVlUoSrVKDmSVWjRFCl2EgRIAiAAogymzzy/\n+8eAICUSrAB/U96vc3jCzAwGX8oz85nf9/k+v8cxxggAANjjsV0AAACljjAGAMAywhgAAMsIYwAA\nLCOMAQCwjDAGAMAywhgAAMsIYwAALCOMAQCwjDAGAMAywhgAAMsIYwAALCOMAQCwjDAGAMAywhgA\nAMsIYwAALCOMAQCwjDAGAMAywhgAAMsIYwAALCOMAQCwjDAGAMAywhgAAMsIYwAALCOMAQCwjDAG\nAMAywhgAAMsIYwAALCOMAQCwjDAGAMAywhgAAMsIYwAALPPZLuB02fHYQzWSzrFdBwDAurVzvvPo\nsO0iDlUyYaxcEK+2XQQAwLpLJb1vu4hD0aYGAMAywhgAAMsIYwAALCOMAQCwjDAGAMAywhgAAMsI\nYwAALCOMAQCwjDAGAMAywhgAAMsIYwAALCOMAQCwjDAGAMAywhgAAMsIYwAALCul6xkDp5Uxkg78\ncUb/SHIcezUByE+EMTBJTEIyccmNO3ICRp6w5JRpLJQB4ADa1MAkMUnJ7EvLs2VQbr8jNybJtV0V\ngHzEyhiYQMYc+IvkDCY041dPKTC0X7svuUWJ8+dI1UbGI8kZXSCzSgYgwhjHMPVfVhzx9p6v38Hj\nj/T4rGSyUve1N2j6k08pVjtDu2ddrdkfrdDe4HWqW/k7ySs5XklejYVx3tRv+fFAqaJNjXGN90GK\n8RlXUjKr6U88pWjVDO2du1zxyma1n3eHmv/tFXmGYlI6F9gAcIBjxvpqxW3HYw8tlbTadh2F5EAY\ns4o5fm57QtN/9pRGymaoa/aVnxmdDg/v1ewNT2vPRdcrccEc+ZqNnIDFYoHSdemc7zz6vu0iDkWb\nGuMihE9c04svKdbQrK5pyyX3sweE41XN2nnmnZr90a/U0fZlmeZqS1XmF770AbSpgQnlHxrW4Nxz\nxj2ZOF7VrFRlrXzR6GmuDEA+Y2UMnCqT29fjpH70kB9kMxCgdLEyBk6Rm5TcQcm0J+WJxeUmvUdN\nZ2M88nYPKtPtyO2XDOcfAyWPMAZOkUlI6kmp+X//SsNT5ynhqztqGHfPXaqpH65W8ON2ZfsduVER\nxkCJI4yBU+QMJTV9xVOKhZq0Z8bVMnFHMuP3nKPhGWpfdIdmvPeiQhvaZaIOYQyUOI4ZY1xMuR6f\nphdeVKJ2qvY2X6Xc9lrHFquervYz7tCsD59Wx+wHZJprJrnK/MXrC2BlDJwy/8Cg9i8474QnsGLV\n05WsqpcvOjJJlQEoFIQxAACW0aYGToKbyA1uOYNJeeNJmcTRJ6jHY4xH3n3Divc48pQbOSHl/ngn\nvmYA+YuVMXASTEIyPWk1/++nNTy1TQlv7UmFcc/sL2jqh28puHaXsn1MVgOlijAGTsZQSjOefkrx\nQIN2t1yTm6A+CdGyGWpfdLtmvLtSoU93yUSd3MUmAJQU2tQYF1OunzXWmh5KavoTTytR0aA9Ldfo\n1L7TOopVz1T7Gbdr1gfPqKP1AbnTSmuymql9gJUxcNxMXLnNPX7+tBLlDdqz4BqNXZD4FMWqZypR\n3SB/ZHhCng9AYWFlDBwnZyip5qefVjzYoD0zr5FOsjV9NKVxQVMAn8fKGMdUc+k1kjP+S2X6l/9E\n8k7O+K+vslatf/D9SXnuE9X0wotK1DRoz9wDrWlHE7UyliQZR97+EWV7HWX3S25UMtmJe3oA+Ysw\nxjHVXnaNnCOF7egmF3t+9iMpW/ypEejvV/+iiybt8ko9sy7L7Vn9uw4mq4ESQ5saR1V/ZW6opvn+\nP5KMUSYyoGw8Kn9tozz+gPb87Eea/X//J7X/43dlMmnVLb1Zoelz5Hi9ysaj6n3lF8qODMlXWavp\nX/62htf9RmWzF8rx+dX76pNKdu2SJFWd9wVVLf6i3ERc8fZNqjr3cu36L98/rJ5g00zVffEmOYGg\nJGngvVcU37nptP33mEzRspnatehWtf76We3O3KLkuS0yVUaO33ZlACYbYYxxTf2XFdK/rJDWb9Xe\nJ/5RJpNRw7X3KdDQrK5f/L8y2UzugYcc6Bz84HW5iZWSpMqzLlb90pu178WfSZI84TIl9rZr4N2X\nVb5wseqX3qy9v/hnBaZMU81Fy7X7f/293ERc9VfcesR6nEBIU666S90r/puysRF5yyo1/cvfVudP\n/04mlZyU/wZuQjJJScMpeZIpucnJbCY5ila3aNei29T6/rPaHbpFSV+LzIHNQILFuRkIU9QAYYzj\ndrA1G92y7mAQf/Yulc1epKpzL5cnEDzsOLNJJRVvz61ik1275Ft6syQpNGOOYjs3yk3EJUmRDR+q\nYtH5h1UQap4lX3Wdmu74P8daxcZ15a+ZotS+PRPyr/y8A5t7zFjxtCJN85RyqiZ9yipa06KOs29T\ny+rcCjkxv0WeGiOvT1IRhjEAwhgnwaRTn7sh93+8lTWqv+JW7f7Zj5SNDCo4rVWNNzxw8GGHHld2\njRzPaLI4zvEFnCOlervU9cv/79T+ASdiKKUZK55W3FevPa3XSokJHtoax0jFTO064za1/vo5dWZu\nUercmbmW9aT/ZgA2MMCFY3KTSXmCofEfMJoQnkBIJptRNhaR5Kjq3MuO6/kTndtVNnuhPKEySVLF\nGRce8XHJve3y105RaMbcsdsCU2cc1+84WdNWvqBkZZ32tF2buzziUa5TPKGMo2hVi3YtulUz339O\n/v79nPcEFDFWxjimoTWrNe2eb8mkU8oMDxz+gNGQSPd3K7plnWZ+5c+UjUcV27lRoebZx3z+VF+X\nBj96S833/5HcVFKJzm1yk/HDHucmE+p+5nHVX3GLPMFb5Xh9Sg/2q+fZx0/1nziuYG+v9i67Whqy\nsyaN1rQoXjtN/qEBJVVrpQYAk88xpjS+bu947KGlklbbrqOQnM5tCh1/YKz9XXPpNfLX1Kv35Scm\n/fcey8zv/1g7l31JiaFqnY729JHM3vyUBi84V8nL58gTtlICUGwunfOdR9+3XcShWBljXKdzyrVu\nyU0KNc+S4/EqPdSvvlVPnbbf/RlGcpOjE9SRtJxUWm7a7tEcYyTPYFzZfic3WR0srslq9qYGCGPk\nif43VtguQVKu427ikulLa/qKFYo0tSltKmRrVSxJfTMvUstHzytdVq1E20x5qpmsBooNA1zA5w3n\ngjjpqVbn7OtkEo5sTk+NVLSoY9HNmrH6eQU/3Z27zGLxb3gGlBTCGPicaStXKl1Wrd1t10uud3SC\n2uJJRcbRSPUsdSy8RTN/85z8ff1MVgNFhjAGPifU06Oes784aXtQn6yR2lbF6poVGNxvuxQAE4ww\nBo4kz4IYQHFjgAvjKpUpV2MkkxqdoB5Jy0lnZFLH/DE7jOQZSig74MikjJxA4U9WF/vrCzgerIwB\njU5Q92TU/LNnFGmaq7RbbrukI+qbfr4a17yjwCe7le115I5IYpgLKHiEMWAkDafV/MwKpZwKdcy9\nQSaZn23qkcpWdSy6UTPfek7BTXuYrAaKBGGMkmRGN/dwhyW3O63mXz6jjL9CnW03Slmv5FqeoB6P\ncTRSPUcdC27SzHeflb+XyWqgGBDGKE1GMjHJ3ZdR88+fUdpXoc4zbjjsso/5aqRutqL10xXc32e7\nFAATgAEulK7htJpXPKOUU6HOthukZGEE8UF5uHIHcFIIY4yrqKdcjTTt+ZXKBCvU2XKjlCm0IJZk\nJGckJXdAMmnlJqsDhTdZXSpT+8DRFOAnEDAxwnv3qPu8KwumNf15+5vPVeOad+Tf0MVkNVDgCvNT\nCJggpoA394hUzspNVr/
"text/plain": [
"<matplotlib.figure.Figure at 0x7ff36b5de278>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Test on a random image\n",
"image_id = random.choice(dataset_val.image_ids)\n",
"original_image, image_meta, gt_class_id, gt_bbox, gt_mask =\\\n",
" modellib.load_image_gt(dataset_val, inference_config, \n",
" image_id, use_mini_mask=False)\n",
"\n",
"log(\"original_image\", original_image)\n",
"log(\"image_meta\", image_meta)\n",
"log(\"gt_class_id\", gt_class_id)\n",
"log(\"gt_bbox\", gt_bbox)\n",
"log(\"gt_mask\", gt_mask)\n",
"\n",
"visualize.display_instances(original_image, gt_bbox, gt_mask, gt_class_id, \n",
" dataset_train.class_names, figsize=(8, 8))"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Processing 1 images\n",
"image shape: (128, 128, 3) min: 108.00000 max: 236.00000\n",
"molded_images shape: (1, 128, 128, 3) min: -15.70000 max: 132.10000\n",
"image_metas shape: (1, 12) min: 0.00000 max: 128.00000\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAeMAAAHaCAYAAAAzAiFdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XecZGd95/vPCZWrc5ico3KOKCKJIBACAzYLi+3FYBv7\ncteA7Ot1YHdt37XvWmDvYi9e27D2LpikCAIEYlBAQhmDENJIGk1OPZ2rK57wPPeP6kma6ZmRNNMV\n+vvmNYym+3T3Mz3V9a3nd37ndxxrLSIiItI4bqMXICIiMtcpjEVERBpMYSwiItJgCmMREZEGUxiL\niIg0mMJYRESkwRTGIiIiDaYwFhERaTCFsYiISIMpjEVERBpMYSwiItJgCmMREZEGUxiLiIg0mMJY\nRESkwRTGIiIiDaYwFhERaTCFsYiISIMpjEVERBpMYSwiItJgCmMREZEGUxiLiIg0mMJYRESkwRTG\nIiIiDaYwFhERaTCFsYiISIMpjEVERBpMYSwiItJgCmMREZEGUxiLiIg0mMJYRESkwRTGIiIiDaYw\nFhERaTC/0QuYLZs/c0s3cHaj1yEiIg33k5WfuLXQ6EUcas6EMfUgfrDRixARkYa7FHi80Ys4lMrU\nIiIiDaYwFhERaTCFsYiISIMpjEVERBpMYSwiItJgCmMREZEGUxiLiIg0mMJYRESkwRTGIiIiDaYw\nFhERaTCFsYiISIMpjEVERBpMYSwiItJgc+muTfIazPv8nUd9+9CvvUvH6/hZOX6mY0XaiXbGIiIi\nDeZYaxu9hlmx+TO3XIXuZyyzqP6j5WJxAIuDrf/uNHZdrWL/Llk7YzkFLl35iVub6n7GKlPLjPRk\n+PqENk/NdFIzXfhOmZRbIOVO4mAavbSWoMedzCUKY5FTJDA5itECpqJFZLwx8HeSdIugMBaRV9A5\nY5GTyFqw1sFYl8B0UIznMx6uphAtpmp6MNbHWmf6uEavVkSahXbGIifRwdJ0J6VoPjXThcUltDmK\n0XwcYtLuBEm3QMotqGQtIoDCWOSkCkz+QGm6ZroIbEc9jE2OEvMJbY68t5cOfxdJt4RK1iICCmOR\nkyq0OYrxfMbCNRiSh709jHOU4vkY65Nwi+TsHjwnbOBqRaRZKIxlRupmlUZSN7/MJWrgEhERaTDt\njEVeLwv2kP8++Pbjt0sfeoiGgYjMXQpjkdfJ1MBW6r+cqEQ62ktn5GHxjnp8MlMizKaZyK0i6RVI\nuVMk3Skc4lleuYg0C4WxyOtkq2D3haRf2IEf7yFnN9JvMtijnAWq5AcpLlxNaPJU/AEyyTE6/F34\nbhlXYSwyZymMRV4nZ7LGojtuxwYOkZ855rHZqT28cOGH2X76O5jMrCR08yTcMjk7BCpTi8xZCmOZ\nkbpZZ+ZUA9xagDc5Rf9Xvk+lcz47Fr+J4/VEZid3sO7pzxMlcxjXJ5OfIJmfxPFitVO+gh53Mpco\njEVeA7cWkNg3Sv/tG6h19jO89lIoc3gD11GUu5aw+cxf4oxHP4sX1yivXUHSL+DkVKIWmcv0Wlzk\nNfAKRfpv34DJpCgsX4tbreGc4LDpar6fbae9g/WP/x2DL/6QZGUcxyiMReYy7YzluLovvYGJxzeA\nnXl046IP/A67vvJZiE9+qPgdPSz6wL9n29/9p1f5kQ59b3wn2eXrsNYy+eT9TD37xFGP7L7ojeRP\nOw8cl9qe7Qx//+tgDKkFy+i76u24qfq54PKW5xn74bfo/cYPid79i/gf/yQDXgrreIx+58eMfO3h\nE1gVlLsWs+30m1n+xN1sXPoxigNL8b2ApDNF0i3iOgpnkblEYSzH1XPZDUw+9QA2OkoYOw5Yy64v\n/fXsL+w48qedR6Krjx1f+AvcdJbFH/wE5W0vEk9NHHZcZukacuvOYdeX/hs2jui//j10nX8Vk089\ngKlV2XfvV4gmR8F1WfCe3yS//jz88c8zmu2l+gd/QzBSxWS7WPn3v0f5uR2Un912Qusrdy2h3DEP\nU/AYC9eQ8qbo8Hfju1V1VovMMQpjOabkH/5HsLDwff8XWMvur3+O/mveibUxiZ5B3ESSXV/6a1Z8\n/L+y9bN/iI1Ceq96O+lFK3E8j7hSYvi7XyUuTh7Y4RaeeYzsivU4foLh732N2p56eHWe+wY6z7sC\nU61Q2bqRznMuP+puODV/Cb1XvA0nmQJg/NHvUtmy8YjjcuvOpfCzxwAw1TKlTc+SX3sOk08/ePjf\ncWAh1V1bsHEEQHnrRnouexOTTz1AODZ08EBjCIZ34Xf2AA6lF/dSKfZhbRJKUNu2j+T8nhMOYwCL\nRyXuYTxcQ9YfIeFWyNp96qwWmWMUxjKjoV97F+x9hhUO7P7KZ7FRdOB9yYGF7Pnq/zgQYIc2Lk08\nsQFTvQeAjjMvpu+qt7Pv218CwM1kqe7eyviP7iW3/jz6rno7u7/6tyT7F9B90bXs/D+fxlQr9F39\njqOuyUmm6b/u3ey98x+Jy0W8bAeLPvDv2fHPf4kNaocd63d0ExXGD/w5mprA7+g64nPW9u2k46xL\ncNMZTK1Gfu05+B09R35tN09u1dns/tzn6KzUMDUPax3AIbV0gMxpS9h56+0n9L09wFo6xraQG9tO\npmOSRL6Aq85qQN38MrcojOUEHb5VK734zMEgfsW7sytOo/Ocy3GTKXAOTxUb1Khsre9ia3u24V/1\ndgDSi1dS3vI8ploBYOrnT5I/7fwjVpFeuBy/q5f57/rwgfmR1hgS3f0E+3a9pr9ZdcfLFH7yCAve\n/RvYKKSy/SUyS9cc/rdPpJh/868yfu8G+m/9LIUFa6i53WDB7+1g2Z/9Mrv+6i6iseKr+tp7l1/F\nmn/9Z6JEtt5Z7U3iZFSiFplrFMbymtgweMUb6r95Hd30Xf0Odn7pr4mnJkgtWMbgW99/8LBDG7yM\nxXGnR0Y6znEvC6ofB8HwHvZ8/XPHPTSamsDv7DkQ0q/cKR+q8JNHKPzkEQBya84mGNt38Ev6PvPf\n+SHKP3uO3G9/jEpqgF1LboCKg9edY+WnP8zwlx+k8NCzJ/AXOFw1P8i2027m9Mf/lu3OzQTZJTjH\naJQTkfakYpgcl6nVcFPpYx80vTN2k2lsHBGXpwCHznMuO6GvUd3xMtkV63HTWQDyp1941ONqu7eS\n6OknvXjVgbcl5y0+6rGlF5+h86xL6+vK5MitOoPSS88c9Vgvm68fl8rQffEbmXzqgel3eMx/569R\n3bOV1Mc+RrV7gF2rbgBcvM4cK2/9CCN3/Ijx7zx1Qn/PV3Kw9c7q025m6eN3k35hG/E+h3gMTAms\nNskic4J2xnJck08/yIL3fhQbBuz++uc46hZ2+k3h6F5KLz7Dkl/5PeJKifKW50kvXHHcrxGM7GHi\nqQdY+L7/CxPUqO7YhKlVjjjO1KrsvesL9F19E27qHTieTzgxytDdXzji2OJzT5Oav5QlH/p9sJbx\nx+47sDPuOOtS/Hwn449+D4D57/51HMcB16Pwrw9T3vwcAJ1nXkJ60UrcVIbEnXfi57oY2PAcw196\ngIH3X01qcT9977iEvpsvAQsjtz3M+Hd/fILf2YNK3UvZftbNLH3wbnZGN1FduxS32+IlYYb7TYhI\nG3HsCQ4qaHWbP3PLVcCDxz1QGsZJJA+Uv7svvYFEdx/D936lwas6aPGf/CPbrnovlUIP2FPR7mzJ\nTe5g2XN3s/PSm6idsxRvgcU9TlGiXamBS06hS1d+4tbHG72IQ2lnLDOa7SfD3ivfRnrhchzXI5wc\nZeS+22bl6x6LqYKtAYX6LGpTO5VndhxKXUvZdtrNLHv8G7y46Dcp9i/H90MSTmnODQNRCMtcojCW\npjH6gzsbvYQj2CrYoZDFd97B1Pw1BE7niTWavQ6l7qWUuhZhJmA8XE3SL5P39+A71TkVxiJzicJY\n5FgmAxbfeQcVv49dy94E1fp1xaeaxaVquhkPV5NOTOI7NbLeCFA77seKSOtRGIscylqcIMSthbjF\nEgu+9n2quV52LXkT2Fm
"text/plain": [
"<matplotlib.figure.Figure at 0x7ff36b5de080>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"results = model.detect([original_image], verbose=1)\n",
"\n",
"r = results[0]\n",
"visualize.display_instances(original_image, r['rois'], r['masks'], r['class_ids'], \n",
" dataset_val.class_names, r['scores'], ax=get_ax())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Evaluation"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"mAP: 0.95\n"
]
}
],
"source": [
"# Compute VOC-Style mAP @ IoU=0.5\n",
"# Running on 10 images. Increase for better accuracy.\n",
"image_ids = np.random.choice(dataset_val.image_ids, 10)\n",
"APs = []\n",
"for image_id in image_ids:\n",
" # Load image and ground truth data\n",
" image, image_meta, gt_class_id, gt_bbox, gt_mask =\\\n",
" modellib.load_image_gt(dataset_val, inference_config,\n",
" image_id, use_mini_mask=False)\n",
" molded_images = np.expand_dims(modellib.mold_image(image, inference_config), 0)\n",
" # Run object detection\n",
" results = model.detect([image], verbose=0)\n",
" r = results[0]\n",
" # Compute AP\n",
" AP, precisions, recalls, overlaps =\\\n",
" utils.compute_ap(gt_bbox, gt_class_id, gt_mask,\n",
" r[\"rois\"], r[\"class_ids\"], r[\"scores\"], r['masks'])\n",
" APs.append(AP)\n",
" \n",
"print(\"mAP: \", np.mean(APs))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}