GeoYolo-SLAM/MaskRCNN_ROS/include/MaskRCNN/samples/coco/inspect_weights.ipynb

280 lines
1.2 MiB
Plaintext
Raw Normal View History

2025-04-09 16:05:54 +08:00
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Mask R-CNN - Inspect Weights of a Trained Model\n",
"\n",
"This notebook includes code and visualizations to test, debug, and evaluate the Mask R-CNN model."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
}
],
"source": [
"import os\n",
"import sys\n",
"import numpy as np\n",
"import tensorflow as tf\n",
"import matplotlib\n",
"import matplotlib.pyplot as plt\n",
"import keras\n",
"\n",
"# Root directory of the project\n",
"ROOT_DIR = os.path.abspath(\"../../\")\n",
"\n",
"# Import Mask RCNN\n",
"sys.path.append(ROOT_DIR) # To find local version of the library\n",
"from mrcnn import utils\n",
"import mrcnn.model as modellib\n",
"from mrcnn import visualize\n",
"from mrcnn.model import log\n",
"\n",
"%matplotlib inline \n",
"\n",
"# Directory to save logs and trained model\n",
"MODEL_DIR = os.path.join(ROOT_DIR, \"logs\")\n",
"\n",
"# Local path to trained weights file\n",
"COCO_MODEL_PATH = os.path.join(ROOT_DIR, \"mask_rcnn_coco.h5\")\n",
"# Download COCO trained weights from Releases if needed\n",
"if not os.path.exists(COCO_MODEL_PATH):\n",
" utils.download_trained_weights(COCO_MODEL_PATH)\n",
"\n",
"# Path to Shapes trained weights\n",
"SHAPES_MODEL_PATH = os.path.join(ROOT_DIR, \"mask_rcnn_shapes.h5\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Configurations"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Run one of the code blocks\n",
"\n",
"# Shapes toy dataset\n",
"# import shapes\n",
"# config = shapes.ShapesConfig()\n",
"\n",
"# MS COCO Dataset\n",
"import coco\n",
"config = coco.CocoConfig()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Notebook Preferences"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Device to load the neural network on.\n",
"# Useful if you're training a model on the same \n",
"# machine, in which case use CPU and leave the\n",
"# GPU for training.\n",
"DEVICE = \"/cpu:0\" # /cpu:0 or /gpu:0"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def get_ax(rows=1, cols=1, size=16):\n",
" \"\"\"Return a Matplotlib Axes array to be used in\n",
" all visualizations in the notebook. Provide a\n",
" central point to control graph sizes.\n",
" \n",
" Adjust the size attribute to control how big to render images\n",
" \"\"\"\n",
" _, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))\n",
" return ax"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Load Model"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": true,
"scrolled": false
},
"outputs": [],
"source": [
"# Create model in inference mode\n",
"with tf.device(DEVICE):\n",
" model = modellib.MaskRCNN(mode=\"inference\", model_dir=MODEL_DIR,\n",
" config=config)\n",
"\n",
"# Set weights file path\n",
"if config.NAME == \"shapes\":\n",
" weights_path = SHAPES_MODEL_PATH\n",
"elif config.NAME == \"coco\":\n",
" weights_path = COCO_MODEL_PATH\n",
"# Or, uncomment to load the last model you trained\n",
"# weights_path = model.find_last()\n",
"\n",
"# Load weights\n",
"print(\"Loading weights \", weights_path)\n",
"model.load_weights(weights_path, by_name=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Review Weight Stats"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"text/html": [
"<table><tr><td>conv1/kernel:0 </td><td>(7, 7, 3, 64) </td><td> -0.8616 </td><td> +0.8539 </td><td> +0.1314 </td></tr><tr><td>bn_conv1/gamma:0 </td><td>(64,) </td><td> +0.0843 </td><td> +2.6420 </td><td> +0.5087 </td></tr><tr><td>bn_conv1/beta:0 </td><td>(64,) </td><td> -2.4174 </td><td> +5.4189 </td><td> +1.9981 </td></tr><tr><td>bn_conv1/moving_mean:0 </td><td>(64,) </td><td>-172.9685 </td><td> +94.5717 </td><td> +42.0063 </td></tr><tr><td>bn_conv1/moving_variance:0<span style='color:red'>*** Overflow?</span></td><td>(64,) </td><td> +0.0000 </td><td>+110557.9688 </td><td>+16228.7607 </td></tr><tr><td>res2a_branch2a/kernel:0 </td><td>(1, 1, 64, 64) </td><td> -0.6603 </td><td> +0.3208 </td><td> +0.0768 </td></tr><tr><td>bn2a_branch2a/gamma:0 </td><td>(64,) </td><td> +0.2189 </td><td> +1.8654 </td><td> +0.4149 </td></tr><tr><td>bn2a_branch2a/beta:0 </td><td>(64,) </td><td> -2.1375 </td><td> +3.7690 </td><td> +1.1904 </td></tr><tr><td>bn2a_branch2a/moving_mean:0 </td><td>(64,) </td><td> -6.3118 </td><td> +7.4370 </td><td> +2.4037 </td></tr><tr><td>bn2a_branch2a/moving_variance:0 </td><td>(64,) </td><td> +0.0000 </td><td> +8.8091 </td><td> +2.1498 </td></tr><tr><td>res2a_branch2b/kernel:0 </td><td>(3, 3, 64, 64) </td><td> -0.3813 </td><td> +0.5123 </td><td> +0.0323 </td></tr><tr><td>bn2a_branch2b/gamma:0 </td><td>(64,) </td><td> +0.3195 </td><td> +1.7454 </td><td> +0.3143 </td></tr><tr><td>bn2a_branch2b/beta:0 </td><td>(64,) </td><td> -1.9530 </td><td> +4.5882 </td><td> +1.5261 </td></tr><tr><td>bn2a_branch2b/moving_mean:0 </td><td>(64,) </td><td> -6.7890 </td><td> +4.2754 </td><td> +2.2064 </td></tr><tr><td>bn2a_branch2b/moving_variance:0 </td><td>(64,) </td><td> +0.0000 </td><td> +5.5464 </td><td> +1.1573 </td></tr><tr><td>res2a_branch2c/kernel:0 </td><td>(1, 1, 64, 256) </td><td> -0.4412 </td><td> +0.3600 </td><td> +0.0411
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Show stats of all trainable weights \n",
"visualize.display_weight_stats(model)"
]
},
{
"cell_type": "markdown",
"metadata": {
"collapsed": true
},
"source": [
"# Histograms of Weights\n",
"\n",
"TODO: cleanup this part"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAncAAE3nCAYAAABZ72SxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3X2YZVV55/3vDxSYiAikCUGgadRGJb6gdkAv40uCYosZ\nMNFokziCQ6bHFzQvj3mC8XmU4DiDyUSiEY1EO4ITAUOidBSDCBITA0oTEQVFWkBpQGkFEQKiwD1/\n7FXkUFZ1n6quOqdq1/dzXeeqs9dee5971Tl9+q699lorVYUkSZL6YbtxByBJkqS5Y3InSZLUIyZ3\nkiRJPWJyJ0mS1CMmd5IkST1icidJktQjJneSJEk9YnKn3klyUZLfXgBxHJPkX8YdhyRpaTG504KX\nZK8k65PclKSSrJiizg5Jvpdk59FHODtJViT5bJK7knw9yfPGHZMkafEzudNicD/wj8BLtlDn2cDl\nVXXnXL1okofM1bmmcQbwJeBngTcDZyfZY55fU5LUcyZ3mpUk+yb5+ySbk3w/yXuSbJfk/0vyrSS3\nJDk9ySNa/RXtqtvRSb7drrK9ue17ZJK7k+w+cP6ntDoPrarvVtV7gUu3ENLhwLlTxLlXkiuS/EHb\nfkSSDya5OcmNSf5Hku3bvmOSfD7JyUm+D5ww0bWa5H8nuS3JdUleOHD+ac+3ld/fAcBTgbdW1d1V\n9XfAV9hyAitJ0laZ3GnGWvLyCeBbwApgb+BM4Jj2+GXgUcDOwHsmHf5LwGOBQ4G3JHl8Vd0EXMyD\nE5vfBM6uqp8MGdbhwCcnxbk/8E/Ae6rqT1vxh4B7gccATwEOAwbvzzsEuBbYE3j7QNnVwDLgT4AP\nJsmQ5xuM5xNJjm+bvwBcW1V3DFT5ciuXJGnWTO40GwcDjwT+oKr+vap+VFX/AvwW8M6qurZ1j74J\nWDOpe/OP25WqL9MlM09u5R8BjgJoidOaVrZVSR4NPKSqrh4oPhD4LN2VsVNbvT3pksDfbXHfApzc\nXmvCTVX1F1V1b1Xd3cq+VVV/VVX3AacBewF7Dnm+B1TVr1bVSW1zZ+D2SVVuBx4+TJslSZrOfN9T\npH7aly7huXdS+SPpruZN+BbdZ2zPgbLvDDy/iy7JAfg74C+S7AUcQHef3T8PGc/hwKcmlf0WsBE4\ne6BsP+ChwM3/ceGN7YAbBuoMPv+pmKvqrnbszsDuQ5xvOncCu0wq2wW4Y4q6kiQNzSt3mo0bgOVT\nDDi4iS6BmrCcrsvyu1s7YVXdBnwaeDldl+yZVVVDxjPV/XYnAN8DPjJwD9wNwD3AsqratT12qarB\nrtBhX3PY803nSuBRSQav1D25lUuSNGsmd5qNLwI3AycleViSnZI8k2705+8l2b9NSfI/gbOmuMI3\nnY8ArwReyqQu2SQ7ATu2zR3bNkl+hq6b+LOTzvUT4DeAhwGnJ9muqm6mSyD/LMkubQDIo5M8Z2bN\n72zL+arqG8DlwFvb7+/XgCfRXcGUJGnWTO40Y+3es/9MN4jg28Amuitu64APA58DrgN+BLx+Bqde\nD6wEvtPuyRt0N11XJsDX2zbArwAXV9WPpojzx8Cv03ULr0uyHV3yuANwFXAbXbftXjOIcbKhz5fk\nU0n+aKBoDbCqHXcS8NKq2rwNsUiSRIbv+ZIWniTvBb7apkqRJGnJ88qdFrvLgY+NOwgtLUnWtbkc\nvzrN/iR5d5KNbZ7Fpw7sOzrJNe1x9OiilrRUeOVOkmYoybPpbhM4vaqeMMX+w+luSTicbp7Ed1XV\nIW2i7g103fEFXAY8rQ0okqQ54ZU7SZqhqvoccOsWqhxJl/hVVV0C7Nqm+XkBcH5V3doSuvOB1fMf\nsaSlxOROkube3jx4vsNNrWy6ckmaM0tmEuNly5bVihUrxh2GpBG67LLLvldVe4w7jtlIshZYC/Cw\nhz3saY973OPGHJGkUdqW768lk9ytWLGCDRs2jDsMSSOU5FtbrzUvbqRbyWXCPq3sRuC5k8ovmuoE\nbdm8UwFWrVpVfn9JS8u2fH/ZLStJc2898Mo2avbpwO1t0uvzgMOS7JZkN+CwViZJc2bJXLmTpLmS\n5Ay6K3DLkmwC3kq3zjBV9Zd0y+EdTre+8V3Aq9q+W5O8Dbi0nerEqtrSwAxJmjGTOy1qK47/5IO2\nrz/pRWOKREtJVR21lf0FvG6afevoVnORpHlht6wkSVKPmNxJkiT1iMmdJElSj5jcSZIk9YjJnSRJ\nUo+Y3EmSJPWIyZ0kSVKPmNxJkiT1iMmdJElSj5jcSZIk9YjJnSRJUo+Y3EmSJPWIyZ0kzUKS1Umu\nTrIxyfFT7D85yeXt8Y0kPxjYd9/AvvWjjVxS3z1k3AEAJNkXOB3YEyjg1Kp6V5LdgbOAFcD1wMuq\n6rYkAd4FHA7cBRxTVf82jtglLT1JtgdOAZ4PbAIuTbK+qq6aqFNVvzdQ//XAUwZOcXdVHTSqeCUt\nLQvlyt29wP9TVQcCTwdel+RA4HjggqpaCVzQtgFeCKxsj7XA+0YfsqQl7GBgY1VdW1U/Bs4EjtxC\n/aOAM0YSmaQlb0Ekd1V188SVt6q6A/gasDfdl+VprdppwIvb8yOB06tzCbBrkr1GHLakpWtv4IaB\n7U2t7Kck2Q/YH7hwoHinJBuSXJLkxVMdJ0mztSC6ZQclWUHXffEFYM+qurnt+g5dty1M/8V6M5K0\nsKwBzq6q+wbK9quqG5M8CrgwyVeq6puDByVZS9czwfLly0cXraRFb0FcuZuQZGfg74DfraofDu6r\nqqK7H28m51vb/jresHnz5jmMVNISdyOw78D2Pq1sKmuY1CVbVTe2n9cCF/Hg+/Em6pxaVauqatUe\ne+wxFzFLWiIWTHKX5KF0id3fVNXft+LvTnS3tp+3tPKhvlj9cpQ0Ty4FVibZP8kOdAncT416TfI4\nYDfg4oGy3ZLs2J4vA54JXDX5WEmarQWR3LXRrx8EvlZV7xzYtR44uj0/GjhnoPyV6TwduH2g+1aS\n5lVV3QscB5xHd4/wR6vqyiQnJjlioOoa4MzW8zDh8cCGJF8GPgucNDjKVpK21UK55+6ZwH8BvpLk\n8lb2R8BJwEeTHAt8C3hZ23cu3TQoG+mmQnnVaMOVtNRV1bl030WDZW+ZtH3CFMf9K/DEeQ1O0pK2\nIJK7qvoXINPsPnSK+gW8bl6DkiRJWoQWRLesJEmS5obJnSRJUo+Y3EmSJPWIyZ0kSVKPmNxJkiT1\nyIIYLSsNY8Xxnxx3CJIkLXheuZMkSeoRkztJkqQeMbmTJEnqEZM7SZKkHjG5k6RZSLI6ydVJNiY5\nfor9xyTZnOTy9vjtgX1HJ7mmPY4ebeSS+s7RspI0Q0m2B04Bng9sAi5Nsr6qrppU9ayqOm7SsbsD\nbwVWAQVc1o69bQShS1oCvHInSTN3MLCxqq6tqh8DZwJHDnnsC4Dzq+rWltCdD6yepzglLUEmd5I0\nc3sDNwxsb2plk70kyRVJzk6y7wyPlaRZsVtWvTLVRMfXn/SiMUQi8Q/AGVV1T5L/DpwG/MqwBydZ\nC6wFWL58+fxEKKmXvHInSTN3I7DvwPY+rewBVfX9qrqnbX4AeNqwx7bjT62qVVW1ao899pizwCX1\nn8mdJM3cpcDKJPsn2QFYA6wfrJBkr4HNI4CvtefnAYcl2S3JbsBhrUyS5oTdspI0Q1V1b5Lj6JKy\n7YF1VXVlkhOBDVW1HnhDkiOAe4FbgWPasbcmeRtdgghwYlXdOvJGSOotkztJmoWqOhc4d1LZWwae\nvwl40zTHrgPWzWuAkpYsu2UlSZJ6xOROkiSpRxZEt2ySdcCvArdU1RNa2e7AWcAK4HrgZVV1W5IA\n7wIOB+4CjqmqfxtH3FocnB5FkrSULJQrdx/ip2doPx64oKpWAhe0bYAXAivbYy3wvhHFKEmStOAt\niOSuqj5HN5ps0JF0k37Sfr54oPz06lwC7DppygFJkqQla0Ekd9PYs6pubs+/A+zZnrt0jyRJ0jQW\ncnL3gKoqoGZ6XJK1STY
"text/plain": [
"<matplotlib.figure.Figure at 0x7f95b256f128>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Pick layer types to display\n",
"LAYER_TYPES = ['Conv2D', 'Dense', 'Conv2DTranspose']\n",
"# Get layers\n",
"layers = model.get_trainable_layers()\n",
"layers = list(filter(lambda l: l.__class__.__name__ in LAYER_TYPES, \n",
" layers))\n",
"# Display Histograms\n",
"fig, ax = plt.subplots(len(layers), 2, figsize=(10, 3*len(layers)),\n",
" gridspec_kw={\"hspace\":1})\n",
"for l, layer in enumerate(layers):\n",
" weights = layer.get_weights()\n",
" for w, weight in enumerate(weights):\n",
" tensor = layer.weights[w]\n",
" ax[l, w].set_title(tensor.name)\n",
" _ = ax[l, w].hist(weight[w].flatten(), 50)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}