diff --git a/classification/evaluation/eval-test-model.ipynb b/classification/evaluation/eval-test-model.ipynb index f3682f2..94e72b3 100644 --- a/classification/evaluation/eval-test-model.ipynb +++ b/classification/evaluation/eval-test-model.ipynb @@ -119,7 +119,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 33, "id": "63f675ab", "metadata": {}, "outputs": [ @@ -127,7 +127,7 @@ "name": "stdout", "output_type": "stream", "text": [ - " 100% |█████████████████| 640/640 [8.7m elapsed, 0s remaining, 1.4 samples/s] \n" + " 100% |█████████████████| 640/640 [8.9m elapsed, 0s remaining, 1.4 samples/s] \n" ] } ], @@ -151,7 +151,7 @@ " bounding_box=rel_box,\n", " confidence=int(row['cls_conf'])))\n", "\n", - " sample[\"predictions_yolo_resnet_final\"] = fo.Detections(detections=detections)\n", + " sample[\"predictions_model_optimized_relabeled\"] = fo.Detections(detections=detections)\n", " sample.save()" ] }, @@ -167,7 +167,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 34, "id": "68cfdad2", "metadata": {}, "outputs": [ @@ -176,17 +176,17 @@ "output_type": "stream", "text": [ "Evaluating detections...\n", - " 100% |█████████████████| 640/640 [2.2s elapsed, 0s remaining, 278.4 samples/s] \n", + " 100% |█████████████████| 640/640 [2.9s elapsed, 0s remaining, 242.0 samples/s] \n", "Performing IoU sweep...\n", - " 100% |█████████████████| 640/640 [2.4s elapsed, 0s remaining, 270.2 samples/s] \n" + " 100% |█████████████████| 640/640 [2.8s elapsed, 0s remaining, 235.9 samples/s] \n" ] } ], "source": [ "results = dataset.view().evaluate_detections(\n", - " \"predictions_yolo_resnet_final\",\n", + " \"predictions_model_optimized_relabeled\",\n", " gt_field=\"ground_truth\",\n", - " eval_key=\"eval_yolo_resnet_final\",\n", + " eval_key=\"eval_model_optimized_relabeled\",\n", " compute_mAP=True,\n", ")" ] @@ -203,7 +203,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "id": "86b90e80", "metadata": {}, "outputs": [], @@ -216,7 +216,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "id": "e34a18f4", "metadata": {}, "outputs": [], @@ -237,6 +237,146 @@ "The code for the LaTeX table of the classification report can be printed by first converting the results to a pandas DataFrame and then calling the `to_latex()` method of the DataFrame. This code can then be inserted into the LaTeX document." ] }, + { + "cell_type": "code", + "execution_count": 20, + "id": "f7ad63b0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\\begin{tabular}{lrrrr}\n", + "\\toprule\n", + "{} & precision & recall & f1-score & support \\\\\n", + "\\midrule\n", + "Healthy & 0.679 & 0.525 & 0.592 & 766.0 \\\\\n", + "Stressed & 0.646 & 0.447 & 0.529 & 494.0 \\\\\n", + "micro avg & 0.667 & 0.494 & 0.568 & 1260.0 \\\\\n", + "macro avg & 0.663 & 0.486 & 0.560 & 1260.0 \\\\\n", + "weighted avg & 0.666 & 0.494 & 0.567 & 1260.0 \\\\\n", + "\\bottomrule\n", + "\\end{tabular}\n", + "\n", + "0.3374377395168513\n" + ] + } + ], + "source": [ + "results_df = pd.DataFrame(results.report()).transpose().round(3)\n", + "\n", + "# Export DataFrame to LaTeX tabular environment\n", + "print(results_df.to_latex())\n", + "# YOLO second hyp with Resnet optimized and relabeled dataset\n", + "print(results.mAP())" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "d73cca50", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\\begin{tabular}{lrrrr}\n", + "\\toprule\n", + "{} & precision & recall & f1-score & support \\\\\n", + "\\midrule\n", + "Healthy & 0.653 & 0.604 & 0.628 & 766.0 \\\\\n", + "Stressed & 0.566 & 0.492 & 0.527 & 494.0 \\\\\n", + "micro avg & 0.620 & 0.560 & 0.589 & 1260.0 \\\\\n", + "macro avg & 0.610 & 0.548 & 0.577 & 1260.0 \\\\\n", + "weighted avg & 0.619 & 0.560 & 0.588 & 1260.0 \\\\\n", + "\\bottomrule\n", + "\\end{tabular}\n", + "\n", + "0.36171308664990176\n" + ] + } + ], + "source": [ + "results_df = pd.DataFrame(results.report()).transpose().round(3)\n", + "\n", + "# Export DataFrame to LaTeX tabular environment\n", + "print(results_df.to_latex())\n", + "# YOLO original with Resnet optimized and relabeled dataset\n", + "print(results.mAP())" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "7ba5cd14", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\\begin{tabular}{lrrrr}\n", + "\\toprule\n", + "{} & precision & recall & f1-score & support \\\\\n", + "\\midrule\n", + "Healthy & 0.665 & 0.554 & 0.604 & 766.0 \\\\\n", + "Stressed & 0.639 & 0.502 & 0.562 & 494.0 \\\\\n", + "micro avg & 0.655 & 0.533 & 0.588 & 1260.0 \\\\\n", + "macro avg & 0.652 & 0.528 & 0.583 & 1260.0 \\\\\n", + "weighted avg & 0.655 & 0.533 & 0.588 & 1260.0 \\\\\n", + "\\bottomrule\n", + "\\end{tabular}\n", + "\n", + "0.35812991936475147\n" + ] + } + ], + "source": [ + "results_df = pd.DataFrame(results.report()).transpose().round(3)\n", + "\n", + "# Export DataFrame to LaTeX tabular environment\n", + "print(results_df.to_latex())\n", + "# YOLO optimized with Resnet optimized and relabeled dataset\n", + "print(results.mAP())" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "f2b178e8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\\begin{tabular}{lrrrr}\n", + "\\toprule\n", + "{} & precision & recall & f1-score & support \\\\\n", + "\\midrule\n", + "Healthy & 0.711 & 0.555 & 0.623 & 766.0 \\\\\n", + "Stressed & 0.570 & 0.623 & 0.596 & 494.0 \\\\\n", + "micro avg & 0.644 & 0.582 & 0.611 & 1260.0 \\\\\n", + "macro avg & 0.641 & 0.589 & 0.609 & 1260.0 \\\\\n", + "weighted avg & 0.656 & 0.582 & 0.612 & 1260.0 \\\\\n", + "\\bottomrule\n", + "\\end{tabular}\n", + "\n", + "0.38379973332791195\n" + ] + } + ], + "source": [ + "results_df = pd.DataFrame(results.report()).transpose().round(3)\n", + "\n", + "# Export DataFrame to LaTeX tabular environment\n", + "print(results_df.to_latex())\n", + "# YOLO original with Resnet original and relabeled dataset\n", + "print(results.mAP())" + ] + }, { "cell_type": "code", "execution_count": 10, @@ -383,7 +523,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 35, "id": "da05e2ba", "metadata": {}, "outputs": [ @@ -397,7 +537,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAACoCAYAAADtjJScAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAxZUlEQVR4nO3de3BT55kG8McQA75JghiIi48DDobYshMwoV3LuylsaFd4trA4jcVs6A6kMe7SGbwl0G3Z4iQmk83ETilM2lmiJEy3zay12TiQTLHCpSEztczN3GzZ4bZ2JHEztEgy2OayaP9wz6mOLrasu+XnN8OApE/Sa8nn5T3f+S5JLpfLBSIiIqIEMi7WARARERGFGwscIiIiSjgscIiIiCjhsMAhIiKihMMCh4iIiBIOCxwiIiJKOCxwiIiIKOE8FOsAIunkyZNwuVxITk6OdShEY8K9e/eQlJSE+fPnxzqUqGGeIYqekeSYhO7BcblcCGQdQ5fLhbt37wbUNhYYX2jiPT4g/mMMNL5Aj7lEwjwTHYwvNIkS30hyTEL34IhnVEVFRUO26+vrQ2dnJ2bPno3U1NRohDYijC808R4fEP8xBhpfW1tbFKOKD8wz0cH4QpMo8Y0kx0S9wHE6nTAYDACAyspKn22MRiMAwOFwQBAEaDSaqMVHRKMf8wwRRf0Slclkgt1u9/u41WqFyWSCVquFTqeDXq+PXnBElBCYZ4go6j04Wq0WDocDTqfT5+MmkwkZGRnS7YyMDJhMpqDPrlwuF/r6+oZs09/fj7a2Nhw7dgwTJkyQ7lcoFMjKysLdu3fR1dXl9by5c+cCACwWC/r7+2WPZWVlQaFQwG6349q1a9L9qampKCgowOzZswP+GcTX9nyPeMH4QhfvMQYan8vlQlJSUjRCGtJYyTN5eXlYsGBBUDH7is/973jD+EKTKPGNJMfE3Rgci8UClUol3VapVH6TVCDu3buHzs7OIdvY7Xa88MILURt89eSTT+K9994b8fO6u7vDH0wYMb7QxXuMgcTn/p93vEqkPLNr1y50dnbiW9/6FiZPnhzy6yXC72AsMb7QhDPHxF2B44vD4Qj6ucnJycP2lvT39+P999/HwMBARHtwvvrqK9TW1uJf//VfMWPGDKSlpWH8+PHD/gz9/f3o7u7GzJkzkZKSMmz7aGN8oYv3GAON78KFC1GMKrxGW54R88mDBw/w5ptvYtmyZcjPzw/6Z0iU38FYYXyhiUSOibsCJycnR3YmZbfbIQhC0K+XlJQU0IjxoqIi5Ofn+227ePFiv88tLS0NKJYTJ05ICSkrKwutra0oLi4O6LkAkJKSEpej30WML3TxHuNw8cXD5alAJEKeEfPJxIkTAYTvd2e0/w7GGuMLTThzTNysgyMmG41GI5sGZrPZOLuBiMKCeYZo7Ih6D47JZEJzczN6e3shCAK0Wi0AoLy8HI2NjRAEAWVlZTAajXA4HFi7dm20QySiUY55hoiiXuBoNBqfZ0oHDhyQ/i0mIyKiYIylPJOWloZvf/vbsllhRBSHY3DGAnEg4XCzLoDB6aszZsyIdEhENErl5OTgs88+i3UYRHGHBU4UiWdYW7ZsAQCsWrUqoOf993//N+7evYuBgQFMmzYNeXl5EYuRiEaX//u//4PT6Qx4VibRWMECJ4ry8vJw7tw59Pb2BtTeZrNh+fLlqKiokN2/Z88eZGdny+7LyMhg4UM0Bp07dw5f//rXRzwrkyjRscCJsry8PFy8eBE/+tGPUFVVhaysLADAjBkzMH36dNy8eRNdXV3IyMjAsmXLcO7cOfT09KC7uxsTJkxARUUFli9f7vO13QsfFjxERDSWscCJgQcPHuDTTz/Fp59+Kt331ltvYcOGDThw4IDUY3Pu3Dnk5eVhxowZmDRpEvLz8332AIk9PZ6Fz549e5Cfn89Ch4iIxhwWODHg61KVOJB4yZIl+O1vf4tVq1b5vJTlq1gpLi6WvZ5nwSMWSkRERGMFC5wY8VdwTJ482Wu59VOnTqGrqwsDAwPSEtb5+fmy5azdX08seI4ePeq3UCIiIkpkLHDikDh+Rlw6fvHixbh7966sTXt7O9Rqtd/XyMvLkwob9+noHJtDlFjy8vLQ09Mj2zyUiFjgxKW8vDz8/ve/x9SpUwEAn3/+Obq6umSbkCkUCpw4cUJ6jq/CRZyW7jkdnZesiBLHQw89JOUKIvoLFjhxyn0a+Lx58zBx4kRpk74//elPWLduHQwGg9Rm3Lhx+PLLL/HII4/g/PnzUsHjPjans7MTq1atwtGjR/1etmIPD9HoYrVa8corr2Dbtm147LHHYh0OUdxggTMKTZkyBVu3bsWPf/xjAIODivfv3w+VSoXW1lZpR2LPnhp/PTqeON2caPS4desWPv30U7zyyiuxDoUorrDAGaU8BxUvWrQICoUCCxYs8DsLa7iFBjndnIiIEgULnAShUCgADPa4eM7Camtrw6RJk5CXlzdkkTLcdHP27BAR0WjBAicBZWRkQKlUSrOwli5dikuXLgU0uNjXdPPOzs4he3a4GSgREcWbcbEOgMIvLy8Pp06dkmZWbNu2DQBkg4udTmfAryVuGdHa2orW1lbs2bMHALB8+XLMmTMHv/vd72CxWCLwkxDRcKZNm4a33nqLJxpEHljgJKiZM2dK/543bx6AwcHFra2tuH79Ov7t3/4N58+fD/j18vLyUFxcjOLiYqngEQudiooKlJeX48KFC+H8EYgoAA8//DA2bNiA6dOnxzoUorjCAmcMEAcXt7a2YsGCBbBarfjVr36Fo0eP4sSJE/jqq68AAP39/QEXPe49O++99x4A4Pjx4yMqmogodE6nEx9++CFu3rwZ61CI4kpYx+DYbDbZ+i0UPzyniz948ECaLv79738f7777Lo4cOYLFixePaNZUXl4e+vv7pdcBOBiZIoc5xtulS5ek3tnJkyfHOhyiuBFSgdPZ2Qm73S7dNhgM+MUvfhFiSBRpntPFH374YQCD6+sA8Nqks7OzUypifBUss2fPRmNjI+7fv4+Kigqfg5FZ8FAwmGOIKFhBFzjV1dXo7e2VFo8D5HseUXzzVWQ88cQTsllT4t42zz//PE6ePCm1O3LkCL7+9a/LnpuTk4P8/Pwhp5mLWPBQIJhjiCgUQRc4paWlqKiokN332WefhRwQxZa4Vs758+elWVgffPAB+vv7pYLl+PHjePTRR3Hp0iUAg2N3uru7MXHiRBQWFkqvNdy6OiIuJEi+MMcE5+TJk3C5XAB4AkFjW9AFjrjGirucnJyQgqH4MXv2bOnf4sKBYsGSmZmJXbt24aWXXpI9p6ysDLt370ZbW5uUWH2tqxPIQoIiJuixizkmMFevXoUgCLh58yZOnDiBb3zjG7h//770+Mcff+z3c+PxRYks6ALHarXCYDCgqKgIAOByudDU1ISPPvoobMFR/BGT4fPPP49FixYB+EsPzpNPPgm73Y4FCxYA8N0zM5KFBEXc/XxsYo4ZmnjpbuPGjQCAJUuW+Gy3YsWKIV+HC3ZSogq6wGloaIBGo5G6QgHI/k2Jbfr06dK6G319fZg0aRJyc3ORnJyMPXv2yAqWc+fOAYDXHliCICAvLw8PP/wwPv74Y9y+fVt6LCUlBf39/Vi1ahUMBgNmzZoFAEhLS0NOTg4KCgowadIkdHV1eU2PzcrKQlZWVsR+dooO5pihDbe33MDAAC5duoQZM2bgzTfflNatEj377LP46KOPpOP09OnTEY+ZKJqCLnA2bdqEkpIS2X0ajSbkgGh0S05OltbHsdvtmDJlCmbOnImysjLs27dP1vbtt9/GD3/4Q+zduxff+973ZI/91V/9Ff7zP/8TALBlyxav99m/fz+WLFmCLVu24IMPPpA99vLLL2PDhg3SwoP9/f3o6enx2qOL4htzzPAC7dncvn07ampqZPelpaXhH/7hHzAwMIDKykocP34ckydP5nFCCSPoAqekpAS3bt1CU1MTgMH9jgoKCsIWGI1unon37bff9tmDAwyO3WltbZU9lpaWJp2hHj16FADQ09ODDRs2AAC+9a1v4dy5c9i6dat0nygrKwstLS3QarWy+0+fPo0nnngCZ86ckcYocAxC/GKOCZ9HH30Ujz76qOy+EydO4Hvf+x7q6+sB/GUdK/E4IRrtQhqDU11dLf0npdfrsX37dlb/5NNQRcSUKVOkNXh8Pc/9uX//93+Po0ePYtWqVejt7UVxcbF0+cpdWlqaVDRdvHgR//zP/wylUgkAeOaZZ3Djxg2p7YkTJzB//nzYbDb09/ez4IkTzDGR5TmG5/XXX8fmzZtx/PhxpKSk8DigUS/oAmffvn1obGyU3ffWW28x+VBE5eXl+R1z4E6hUKC4uBgA8Pjjj+PXv/61NO394MGDuH//vjSLy2q1Yv78+fj5z3+Obdu2cVBznGCOiSyxh7SzsxP/9E//JF0OdF+RnMs30GgWdIHja7l09zVQiOLJI488Iv1b7H4XZ3GJA5LLysqwbdu2gAooijzmmMgTe0hPnTqFnp4eTJkyBT/96U+xadOmIZdvEPESL8WzkC5RebLZbCEFQxRt7snZ32Uyig3mmOiZOXMmLl26hD/96U+YMmUKPv74Y9y5cwcrV670u3yDiD2eFK+CLnA0Gg1eeOEFqNVqAIDJZJKu5RIRhYo5JrrEMTniJarPP/98yGnonZ2dWLVqFY4ePSq1YY8OxZOgC5yCggK8+uqrMBgMAICtW7dyhgONapmZmVi3bh0yMzNjHQqBOSbaxE1zp02bhpSUFKSlpeH999/HkiVLkJOT41W4iAXRqlWrZPdz7A7Fi5B2ExcEQXZGZbPZ/F6rJYp3OTk5+OUvfxnrMMgNc0x0iZvmpqam4vz583jjjTfwxhtvAPC+FOW50KCvrVdY6FAsBVzg7Nu3DxqNBunp6QCADz/8UPa40+mEyWTCe++9F94IiaKkr68PX375JR5//HGkpqbGOpwxhzkmvrivQyUuy+Crjcjf1issdChWAi5w/uM//gMZGRnSVML/+q//QllZmayN55L5RKPJl19+iQULFqC1tVWaYk7RwxwTf8SiRKfTYerUqTh06BCSk5ORkpICAEhKSsL8+fNl7d2nn7PQoVgKuMDxXI/itdde87oezmXUiShYzDHxKS8vD7/61a/wxz/+EYsXL5Y9NmHCBNy5cwfXr1+H1WqVBhmz0KF4EPQYnL1798JsNmPp0qWorq5GRkYGysrKOAiQiMKCOSZ+iKuNe86qSkpKgs1mw9/+7d/i/PnzAORjdYYqdDi9nCJtXLBPLCoqwnPPPYeGhgbk5+fjF7/4Bex2exhDI6KxjDkm/uTl5aG4uFj6M3/+fGRnZ+N3v/sdfvvb3wIALBaLz+eJm/CK7Y4ePSoVRUSREHQPjkKhAAA0NTXhtddeAwBprx+i0WjcuHHIyMjAuHFB1/0URswxo4d7T8zZs2dRWFiIS5cuydoolUpZO3F6OS9ZUaSEvJKx1WpFfn4+rFYrnE5n2AIjirZ58+bxdziOMMeMLuKlqMzMTOzatQsvvfSS7PHvfOc7+OSTT4Ycm+O5BAAXDqRQBF3gLF26FAaDAR999BF6e3thMBgwefLkgJ5rNBoBAA6HA4Ig+Bw4uH79elRVVQEYvBa/adOmYEMlolEolBwDMM/EgliMPP/881i0aJHssbS0NHR0dKC3txfJycnIzs7Gxx9/DABYsWKF3y0hOFaHghV0gZORkYEXX3xRur1x48aA9omxWq0wmUyora0FAKxZs8Zn4rHZbFi9ejUKCwuxffv2YMMkClhHRweee+45fPjhhxzIGgeCzTEA80ysTZ8+HdOnT5fdd/78eTz++ONebR0OBw4fPowLFy7I7nc6nVi3bp20FUR/fz+6u7sxMDCAadOmseihYUV9oT+TySQt8Q0MJjGTyeSVfNauXQutVhtoeH65XC709fUN2aa/v1/2d7xhfP7fb7jv1rP9UG7evImOjg7cvHkzoNcNp0T5jl0uF5KSkoJ6j3Au9Mc8M3KRjm/GjBk4ffq014KB169fx7/8y7/g8OHDsvt/+tOfAvDeCkJ0+vRpAINbTMSDsf79hioSOSbqC/1ZLBaoVCrptkql8nldva2tDcBgdQ8MLjQVjHv37qGzszOgtt3d3UG9R7QwPvn7dHd3Y9KkSSN+XrhfN5wS4TueMGFCUK8dzoX+mGeCF+n4PI8tu92OH//4x14F4vTp09HX1+fVs5adnQ2bzYbm5masW7cOb731FmbNmoWcnJyIxh2osf79hiqcOSYuFvoTk4s792vhS5YswdKlS6VZFSORnJw8bIUvdn3OnDlTWqEznjA+uYGBAQDAzJkzkZ+fP2z7QOMb6euGU6J8x56XGUYi0gv9Mc8MLZbx+TveHn/8cTz33HMAgDt37uDKlSu4f/8+Vq9ejSlTpgCANJj56NGjUKvV6O7ulgrhjIyMqPXw8PsNTSRyTNBjcARBwHvvvQedTof09HS0tLSgqKho2Ofl5OTIzqTsdjsEQZC1MRqNaGtrk5KPQqGA1WqFWq0ecZxJSUkB7yuUkpIS13sQMb6/vE8w7zdc+2BfN5xG+3cc7OUpX4LNMQDzTCjiKb7U1FRpZlVfXx86OzulExHP2Vj3799HamoqXn/9dXzwwQfSa0R7kHI8fX6+jPb4RpJjgl7wo6mpSdZdXFJSApPJNOzzNBqN1C0MDA7yE8/KxIQkCAJKS0ulNk6nM6ikQzQSubm52LNnD3Jzc2MdCiH4HAMwz4wV7gsIintibd26Fa2trdKCgr42CaWxIegeHJVKhYqKihE/TxAElJWVwWg0wuFwYO3atdJj5eXlaGxshFqthtFolM6wdu3aFWyYRAFTqVRYtmxZrMOgPws2xwDMM2ONew/NrFmzMGvWLGRlZeHll19GVlZWDCOjWAq6wDlz5oxsxgMwOGDv29/+9rDP9Tdr4cCBA15twjHDgSgQV69exa5du7BmzRo88sgjsQ5nzAslxwDMM2NdVlYWXnnllViHQTEUdIGj0+mwYsUK5OTkICMjAx0dHXj11VfDGRtRVF2+fBmbN2/G3/3d37HAiQPMMRQKp9OJlpYWlJSUBDVwnEa/kAYZNzY2oqmpCU6nEy+99JLXID4iomAxx1AoLly4AK1Wi9bWVhQXF8c6HIqBkHYVNBgMMJlMePHFF2Gz2XDr1q1wxUVExBxDREELusCpr6+HQqGQZiaMZIYDEdFwmGOIKBRBFzhFRUWoqKhglzElDJVKhe9+97uyFXApdphjiCgUQRc4vja9c193gmi0yc3NxYcffsh1cOIEcwyFYuLEiXjssccwceLEWIdCMRL0IOOCggKUl5dj8uTJMJlMMJlM2LhxYzhjI4qqu3fvoqenB9OmTQt6PyUKH+YYCoVarQ5p6xAa/YLuwSkpKcH27duRn58Pl8uFrVu3SpvkEY1G7e3tEAQB7e3tsQ6FwBxDRKEJugfn2WefRVVVFc+oiCgimGMoFGfOnMEzzzyDgwcP4oknnoh1OBQDQffg6HQ6rxVFW1paQg6IiAhgjqHQ3L9/Hzdu3MD9+/djHQrFSNA9OElJSXj55ZeRk5MDQRDgcDhgNBrZhUxEYcEcQ0ShCLrAeeedd1BSUoKbN29KO/7a7fZwxUVEYxxzDBGFIugCp7a21utMit3H4bF+/Xrs2LFDdl9XVxfef/99XLlyBTqdDgBgsVhQWloqLYQWDkajEQDgcDggCILP116/fj2qqqoAAHv37sUPf/hDAMD+/fsxMDAAs9kMrVYbVFxmsxnV1dVobGyM+v4x8+bNw8DAAJKTk6VYdu7cCZvNho0bN0Kj0cBoNGLLli2oqKhAVVXViGIc7mfr6upCVVUVfv/733PvHDDHRFq85xmj0Sgb9P+d73wHANDX14fJkyfjk08+CXoLhljmGV+xRDPPdHZ24gc/+AEOHjwY85890kZU4HR2dmLv3r3IycnBc8895/U4u45DZzQa0dLSAqvVKlvgbNasWUhOTkZra6uUeABg7ty5OHDgQFgWQ7NarTCZTKitrQUArFmzxmfisdlsWL16NQoLC7F9+3YAg4kxOTkZOp0OTqcTzzzzDI4dOzbiGNRqdcwWdhs3bpxszQy1Wo3S0lKYzWbpc9Bqtaivr8fKlStHnBw8fzaDwSD7LmfNmoWCgoIQf4rRjTkmOuI9zzidTrzzzjtobGyEIAhYuHChVOB0dHRg/Pjxw77PnDlzYDKZMGfOHK/HYplnPEU7z+Tn56OwsDA8wce5gAcZt7S0YMWKFTAajairq8OPfvSjSMY1ZjkcDlRUVKChoSGg9gqFAk6nMyzvbTKZkJGRId3OyMjwuTT+2rVrcezYMezatUs6+G7duoUjR45IMSmVSpjN5rDEFS3nzp3DokWLcO7cuai8n8FgiMr7jBbMMdET73lGoVCgsbERwGBB5F4APfXUU7h3796w75Oeno6SkhKkp6eHJe7RaiznmYB7cAwGA44dOyb9YtbX18NmsyE7OztiwY01TqdT6q4tLy/Hpk2bhmxvMBhQUlICtVodlve3WCyybQpUKpXPpCauJutwOAAMdh2Ly+qLHA5HQHEZjUbpdZRKJbRarezxuro6lJaWorm5GStXroQgCOjq6kJ6ejra2tqwe/du1NbWwmw2w2q1QqFQwGg0SmeHI3Hr1i188cUXXhs6Wq1WqUvd/ed2j7GoqEhKxGq12mfc7kwmE5xOJwwGAwRBwLx582SPAYOX/3bs2CF1V2/fvh0ajQbr16+HIAjD/n6MNswx0TFa8oz43s3NzVJP8UjYbDb8/Oc/x4YNG9De3j7iPOMrp4Qjz/gT6TwzdepUKJVK2WNAYueZgAuc7OxsWdVdVVWFlpaWhEk+NpsNAwMDSElJCevrqlSqgJf+N5lM0oEnCAJMJpNX1217e7t0EGg0GlnXo8jpdGLnzp1+38fXgeCP50EGQPYLv2TJEixatEj2eE1NDbZu3Trsa5vNZungslqtqKur80o8KpVK+gz0ej1qa2vR0tKC8ePHIzc3F0899RQASJc1NBqN7CAW3b59G9u3b8dDD/n+lV+5cqXfOAVBkMVVX18v/dtgMEClUkmPr1mzBrt27fIZtzuNRgOFQiF9f319fdJj4gaTYsLTarWyM9zS0lKf3/tol+g5BmCe8cdXngEGlwoQBAH19fX4yU9+EtBriXp6erBt2zaUlpbiiy++GHGe8ZVTwpFn/H0mkc4zfX196OzslB4bC3km4AInJydHdjsjIwMul0t2X2dnJ/Lz88MTWRTduHED5eXlePDgQdhfe/z48bh69SoyMzOHbeu+z05hYSEaGhq8Ek92drbXwelJoVAEVXXn5OTIzqTsdrvXwWg0GtHW1ia9vkKhkO0ZZDQaodFoho0RGEwWpaWlAAYPbs8BjyKDwQCn0ynNoFm+fDl+85vfYPPmzZg/fz527NiBqqoq1NfXQ6/Xo6CgwOu10tLSUF1djdTUVL/xXL9+fdiYPZnNZlkXu/uidJ5xB8pXAtTpdNi5c6ffxJoIEjnHAMwzokDyDDBYQIn/CVdXV2PRokVB/e63tLQElWd85ZRw5JlgMM8EJ+ACx2q1enXd22w26T673Y6Ghga8+uqr4Y0wCjIzM9HY2IjMzMyInFkFknTMZrOsutdqtVi4cGFQ7xnsmZVGo0FdXZ1022azSYlPTDaCIMgGvTmdTuTn56OzsxOHDx+WEpLZbJbai8/1pFKpYLFYZK/l3s5gMMBut6OyshJmsxltbW0wm804fPgwenp60NTUhN27d3sNWqypqfEaPBlKD85Q1Go1LBaL9DmZzWa/cfvq4nc6nTCZTHj66ael+3wlFrVaDZvNBoPBgKVLlwYVa7xL5BwDMM+IAskzBoMBFotFKqCUSuWw/+H6yzPp6elB5Zn29navnBKOPBPM4OZw5JnPP/9c1hs6FvJMwAWOXq/Hu+++K7vP5XJJ3WgulwtJSUmjNvlkZ2cjPz8/7JV3IEwmE+rr66HT6aQuQXGAbk1NDVatWoVr165h3759uHLlis8uZXfBnlkJgoCysjJpXMzatWulx8rLy9HY2Ai1Wg2j0Sj15OzatQsAcO3aNdTU1CApKQnA4AF19uxZ6bm1tbVeMVdWVqKurg56vR6CIEgHvtVqhcFgkAol925Tq9WKa9euIT09HUeOHJGeZ7FYpC5199cSBXJmlZKSAr1eL/UkWK1WNDc3w2azSZ+5+Nk0NDSgqqoKOp0Oer0eBoMBSqUSgiCgsLDQZ9zuP1tlZSV0Oh0MBoOUkLq6unDp0iVp1oM4GFPsZl66dCnMZvOo7zb2J9FzDMA8AwSWZ5YuXSptsNrc3AydTiedSJnNZqSmpqKtrU26tCI+11eeWbZsGQ4ePDjiPOMrp4Qjz3iKVp557LHHAAz2goqPJ3yecQVoy5YtLqfT6fePw+Fw1dXVBfpyUXHmzBnXmTNnhm13+/Zt1/Hjx123b9+OQlQjlwjxNTc3h+39WltbXQBcra2tAbWP98/P5Qosxvb29rB+jiMR6GcY6DHny2jMMS4X80y0iPH94Q9/8Hv8ux8fX331lWvdunWur776KqrxxfvnN1x8scozkcgxAU8TX7lyJTIyMvz+USgUKCsri1wlRqOW2WyOmzUnhnLjxg28++67uHHjRqxDkRHPGNvb28O62Fq8YY6hUHjmmZycHPzyl7/0GttFviVingm4wAlkAbKxvkgZ+eZ5nTpeWSwWVFZWyq7XxwNxlkOiL87FHEOh8MwzfX19OHHihGyGIvmXiHkm6K0aiAIVyIwq8q+ysjLWIRDFPc888+WXX2LBggVobW0NekuHsSQR80zAPThEREREowULHCIiIko4LHCI/iw9PR3f/OY3x/zeNUREiYBjcIj+bM6cOTh06FCswyCiMBg3bhwyMjIwbhzP48cqFjhxaP369V5LgHd1deH999/HlStXpAWYLBYLSktLwzqlT5wq6HA4pA35hmqrUChkG0WKi2kBwQ0uNpvNqK6uRmNjo89VSSPpwYMHuHfvHpKTk6Wk6L4Tr1KphMPhiIsFsKxWK6qrq7Fx48aEmdJJ0RXveUZc7M5sNkOr1Up55tChQ0hPT8cnn3yCiRMn+t0EdN68eX438YxlnvGFeSYyWODEGaPRiJaWFq8pj7NmzUJycjJaW1tlv/hz587FgQMHwjIN23Mp8jVr1vj9pXY6nXjnnXdkq5D+4Ac/wNtvvw2FQoHy8vKgChy1Wh2zKeWnTp2Szbowm81wOp3S7AKr1YqGhgapvbgSaCwIgoCSkpKYvDeNfvGeZ8QVlnU6HZxOJ5555hl88cUX6OrqQkdHB27duoVly5ahvr5eWk19JGKZZzwxz0QO++7ijMPhQEVFhewXfCgKhcLvWcpIiUt2i9w3d/PU1NQk26ukq6tLeq7ZbEZjY2NYYoolh8Mh25hQEARp0z5AftZFNJrEe55xOBzSfQqFAkqlEp2dnWhvb8fUqVNlbcViyFNHRwfUajU6OjrCEnekMM9EDntw/sxms2FgYCAim+Dl5uYG1NbpdErdteXl5cPu82IwGFBSUuK3i3akLBYLVCqVdFulUvlMamazWdovRfS///u/uHTpkrQXSk1NjXSGNhSxGxoY7Jr17PWpq6tDaWkpmpubpY3qurq6kJ6ejra2NuzevRu1tbUwm82wWq1QKBQwGo0BvfdwNBoN9Ho9Fi5ciKVLl0Kn00lnmiaTCU6nEwaDQfrOTCaT9HPr9Xps374dCoUCdXV1KCoqgtVqlZ7vGWtnZyeOHDkCh8OBQ4cOoba21ut5arUaer1eSvgdHR2yREjxj3kmsDyj0WhkvToOhwP5+fk4cuQIzp8/L91vtVphtVp9xjYwMICOjg4MDAwElWd85ZTRnGd+8pOfoKurCzabDVOnTpXiT+Q8wwIHg0v0l5eX48GDB2F/7fHjx+Pq1asB7fRrMpmkA08QBJ+b3bW3t0uFhUaj8dl1Gewuv76IScGd1Wr1ShB9fX1QKBRSomlvb/e7s63IbDZj79692LFjB6xWK+rq6rxeV6VSSZ+BXq9HbW0tWlpaMH78eOTm5uKpp54CAOzduxc5OTnQaDQ+d8kNdjfxXbt2SRvaideitVqttDGd++cv3qdUKqWkYzAYoFKppJ9rzZo1KCgo8Ip13759GD9+PNRqNaZPn+7zeTqdDlarVUqq/nrXKD4xz/jnK8+IampqsHXrVgDAN77xDTQ3N2PcuHFSr8dwY2i6urrwxRdfjDjP+Mop4cgzvj6TaOWZw4cP48knn8TixYuhVCoTPs+wwAGQmZmJxsZGZGZmRuTMKpCkA0DWTVlYWIiGhgavxJOdnT3s2JZgd/nNycmRnUnZ7Xavg1EcRCzuJm61WjF16lRMmzYNd+/eldoplUq/Z1aivXv3SmcGgiB4DXgUGQwGOJ1O2O12AMDy5cvxm9/8Bps3b8b8+fOxY8cOVFVVob6+Hnq9HgUFBV6vFcguv9evX5fdFgs08U9lZSXWrFkz7Ofv/jObzWZZF/zGjRshCIJXrC+88AJeeeUV/OM//iPUajVUKpXX89x3Hgcg6+an+Mc8MyiQPCMyGo3QaDTQarXSlgvr1q3DRx99hNzcXCgUimG3FmhpaQkqz/jKKeHIM56ilWfeeOMNLFu2DEajEb/+9a9RUFCQ8HmGBc6fZWdnIz8/f0S/mOFkNptl1b1Wq8XChQuDeq1gz6w0Gg3q6uqk2zabTUp8TqcTCoVCtpx3W1sbioqKkJ+fj9u3b+Pw4cPSY+7dpOJzPalUKtm+T57tDAYD7HY7KisrYTab0dbWBrPZjMOHD6OnpwdNTU3YvXu316DFmpoar8GTgZxZFRYWwmq1Ytq0adLP4HA4ZMnf82B3Op2yM2JParUaFotFeg2z2YympiavWA8dOoQXX3wR+fn5eOONN/DII4/g9u3bsudpNBrZf069vb0+35PiF/NMYHkGGOw5UCgU0Gg0MJvNSE5Oxu3bt7Fz507cuXMHt27dQmFhodTeX55JT08PKs+0t7d7HafhyDOen0m08ozNZsPhw4fxs5/9DKmpqaipqUn4PMMCJw6YTCbU19dDp9NJXZHiwLmamhqsWrUK165dw759+3DlyhWfXcrugj2zEgQBZWVl0vVq9xlS5eXlsimVJpNJmoWRm5uLtLQ0PPvss9JZ0MaNG6W25eXlqK2t9Yq5srISdXV1Uq+QeOBbrVYYDAYpsbl3kVqtVly7dg3p6ek4cuSI9DyLxSJ1qbu/lijQM6vs7GzZbXEDOofDAYvFgtdee016TKfTyc52xOvz7rMedDod9Ho9DAYDlEql31htNht6e3ths9kgCAIqKyu9nqfVaqV4xNh8nX0T+TKa8ozD4UB1dbV0v9PpxMmTJ9Hb24uCggLp+HcfA+OZZ3Jzc7Fnzx48/fTT0Ov1I84zvo7TcOUZT9HIM9nZ2ejp6cH+/fsxceLEsZFnXAnszJkzrjNnzgzb7vbt267jx4+7bt++HYWoRi4R4mtubg7b+7W2troAuFpbWwNqH+jnd/HiRdd3v/td18WLF8MR5ogkwnfscgV+zCUS5pnoEOP7wx/+4Pf4D2eeGanR8vmN9vhGkmM4TZwizmw2x82aE0Ox2+34n//5H+kaPBGNHp555urVq/j3f/93XL16NYZRUSyxwKGI87xOTUQUbp555vLly9i8eTMuX74cw6golljgUMQFs6IxEdFIMM+Qp5gMMg50H5Lh2hAR+cM8QzS2Rb0HR5xqp9VqpZHfwbQhCrevfe1reP311/G1r30t1qFQiJhniCjqPTj+9iFxP3MKpE2gXC6XtECUP/39/bK/4w3j8/1+p06dCug979y5gytXrsDhcGDixIlDti0uLsbevXtl902cOBEzZ84EAJw/f95rJdpHH30UkyZNwrVr17wGKE+ZMgVTp05Ff3+/bC0OYHD12dmzZwMAzp49i8uXL+PYsWOYMGECgMEp62lpabhx4wb++Mc/yp6rUCiQlZWFu3fvoqury+vnmDt3LoDBZfE9P6OsrCwoFArY7XZcu3ZN9lhaWhqys7Px4MED2ZL4d+/exaRJk6TPwR+Xy4WkpKQh20QD88zIjZb47ty5A2D44//y5ctYvHgxzp49i1OnTnk9PmfOHCQlJcFqtXp9d9OnT4dKpfJ5jKSmpkIQBLhcLpw7d066/+7du7hx4wZu3LiB9PR0XL582WsNmczMTDz88MO4desWLl26JHss0nnmzp07OHr0qCzHAPGTZyKRY6Je4ASyD0mgeyIF4t69e+js7AyobXd3d1DvES2Mb1BPTw8A4Pvf/35U3o8GJSUlYf/+/bJj0xf35BkrzDPBi/f4xG0dAj3+P//880iGQ2EU7hwTFwv9DbUPyUja+JKcnCydJfvT39+P7u5uzJw5M+xLqIcD45PLz8/H6dOnA15hU+zBycrKGrYHJxZnVsBfenAyMzPjugdn3rx5Q37HFy5c8PtYrDHPDG20xPf000+P6Pi/f/8+Ll686HV/pHpwFi5cGNc9OCqVKq57cMKZY6Je4ASyD8lI9ioZTlJSUsCrSqakpMRsCfVAML6/eOKJJwJu29fXh87OzrAskR+pXXXnz58fdIyLFy/2+1go8f7N3/yN9G/xMxzuO46Hy1MA80woRkN8Izn+AeCb3/xmRGL567/+a+nf8Z5n+vr6oFQqg44v0nkmEjkm6oOMPfe58NyHZLg2RETDYZ4hoiSXy+WK9pu6T81UKpXS+gVLliyR9jvy12YkTpw4AZfLNez1OpfLhXv37iE5OTluzkDdMb7QxHt8QPzHGGh8d+/eRVJSEoqLi6MYnW/MMyPD+ELD+EITiRwTkwInWk6ePAmXy4Xk5ORYh0I0Jty7dw9JSUmYP39+rEOJGuYZougZSY5J6AKHiIiIxiZu1UBEREQJhwUOERERJRwWOERERJRwWOAQERFRwmGBQ0RERAmHBQ4RERElHBY4RERElHBY4BAREVHCYYFDRERECYcFDhERESUcFjhERESUcFjgEBERUcJ5KNYBRJvRaAQAOBwOCIIAjUYTVJtYx+dwOGA2m6HVauMuPve2CoUiLuPT6/UQBAEAoNVq4yo+sY0oWvE5nU4YDAYAQGVlpc82sTw2RgvmmOjE6N6WeWbk8Y2JPOMaQywWi2vLli3S7dWrVwfVJlICee/29nZXU1OTy+VyuRwOh+upp56Kq/hEDofDtWLFCinWaAg0vtWrV7scDofL5XK5VqxYEZXYXK7A4nM4HK533nlHuu3ePtKamppcb775puz93cXy2BgtmGNCxzwTGuaZvxhTl6hMJhMyMjKk2xkZGTCZTCNuE8v4HA6HdJ9CoYBSqYTZbI6b+ERNTU1YunRpVOISBRKf2WyW2pjNZjQ2NsZVfAqFAgaDQfpO3dtHmlarRU5Ojt/HY3lsjBbMMdGJUcQ8E1x8YyXPjKkCx2KxQKVSSbdVKhWcTueI28QyPo1Gg9raWum2w+GAWq2Om/iAwQM6FpcuAomvvb0dNpsNVqsVAFBTUxNX8QHAxo0bUV5ejvLyclRVVUUtvuHE8tgYLZhjQsc8E/n4gLGRZ8ZUgeOLw+EIS5tIGeq9a2pqsHXr1ihG481XfFarVbruHGue8TmdTiiVSqjVaqjVarS3t0f17NSTr8+vra0NjY2NUCqVWL16dfSDGoFYHhujBXNM6JhnQjNW88yYKnA8u8XsdrvXARJIm0gZyXsbjUZoNJqoDlwLJD69Xi/F19bWBpPJFLUDO5D4BEGQ3adUKqWzrHiIz2g0orS0FGq1Grt27UJhYWHcXAaK5bExWjDHhI55JvLxjZU8M6YKHI1Gg7a2Num2zWaTujjFLrCh2sRDfMDgNUqFQgGtVguz2Ry1AyeQ+CorK6HVaqHVaqXR79Hq3g70+3X/vKxWa1x9vw6HA0qlUvYc99uxEA/HxmjBHBOdGJlnQotvrOSZJJfL5QpLdKOE+/QzpVIpnZ0sWbIEjY2NUCgUftvEQ3wOhwPl5eVSe6fTibNnz8ZNfAqFAsBggqyvr0d2djY2bdoUtTPUQL9fh8MBp9MJQRDi6vtVKBTQ6/XS5xjN3z+TyYSGhgb09vZCp9PF3bExWjDHRD5G5pnQ4xsLeWbMFThERESU+MbUJSoiIiIaG1jgEBERUcJhgUNEREQJhwUOERERJRwWOERERJRwWOBQWJnNZtTU1GDu3Lmoq6uDXq+HXq9HTU1NxNbSMJlMKC8vl3ao9bxNRImFeYYCwWniFHZOpxMLFy7EsWPHZOtVVFdX4+DBg9J94SSu6aDT6XzeJqLEwjxDw2EPDkWFRqOB0+mMm+XAiSjxMM+QOxY4FBXiPjHR3JWYiMYW5hly91CsA6DEJe5nYzabYbfbceDAAdlS6uIGeYIgoK2tDZs2bQIwuG9LQ0MDioqK4HA4sHTpUmn5boVCAavVCovFIrUnorGLeYb8YQ8ORYxGo5H+tLS0yDZzs1qtqK+vlzbNy8nJgV6vh9PpxJo1a1BVVQWtVguLxSIN4quuroYgCNDpdOjt7ZX2KyGisYt5hvxhDw5FnFqtRmFhIerr61FbWwsAaGhogFKplF0rb2trg0KhgCAI0gDBqqoq6XFxMKHVaoXdbo/qDsdEFN+YZ8gTCxyKioyMDHz22Wey+woKCqDRaKTbOp0Oer0eGRkZ0n3uMyF27twJlUoFrVYbtV2DiWj0YJ4hd7xERVGRk5MjnQmZzWaUlZWhpaVF1sZkMkGr1aKjo8PrfpPJhI6ODlRWVkIQBPT29kqPiZxOp+x5nreJKLExz5A7roNDYWU2m7F3715YrVYUFRVBo9FIMxrWr1+PoqIiad0Ik8mE5uZmFBUVARi8lq5QKHzeDwA/+9nPsHLlSum9GhoaUFZWBkEQsGXLFgDA1q1bAUB2mzMqiBIL8wwFggUOERERJRxeoiIiIqKEwwKHiIiIEg4LHCIiIko4LHCIiIgo4bDAISIiooTDAoeIiIgSDgscIiIiSjgscIiIiCjhsMAhIiKihMMCh4iIiBIOCxwiIiJKOCxwiIiIKOH8P/sn2nxl1xapAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAACoCAYAAADtjJScAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAwg0lEQVR4nO3dfXRUZX4H8G/ACAHmBUyMQG7KW8DJJC2EcrqZtO6yRDvQXSNhJR6L5yRijNoWlMLW7llSDbZqE1+gui7OrmCP22bWY3bZnkNGXtS2h4nABhaSIZCgYecOIgFhZkJeytv0j/Tezk0myWReb2a+n3M4Mvdl5sck9+tzn3vv86T4fD4fiIiIiBLIhHgXQERERBRpbOAQERFRwmEDh4iIiBIOGzhERESUcNjAISIiooTDBg4RERElHDZwiIiIKOHcEe8Coun48ePw+XxITU2NdylESeHGjRtISUnBkiVL4l1KzDBniGJnLBmT0D04Pp8PwYxj6PP5cP369aC2jQfWFx611weov8Zg6wv2mEskzJnYYH3hSZT6xpIxCd2DI51R5efnj7hdb28v2trasGDBAkyZMiUWpY0J6wuP2usD1F9jsPW1tLTEsCp1YM7EBusLT6LUN5aMiXkDx+v1wmq1AgAqKysDbmOz2QAAHo8HgiDAZDLFrD4iGv+YM0QU80tUdrsdbrd72PWiKMJut8NsNqOsrAwWiyV2xRFRQmDOEFHMe3DMZjM8Hg+8Xm/A9Xa7HRqNRn6t0Whgt9tDPrvy+Xzo7e0dcZu+vj60tLTg6NGjuPPOO+XlWq0WM2fOxPXr19HZ2Tlkv0WLFgEAnE4n+vr6FOtmzpwJrVYLt9uNixcvYsqUKRAEARqNBgsWLBjTv0F678GfoRasL3xqrzHY+nw+H1JSUmJR0ogSLWc0Gg1u376Nnp4eud6x5shoEuV3MF5YX3iikTGquwfH6XRCr9fLr/V6/bAhFYwbN26gra1txG3cbjcef/zxmN181dDQgOzs7DHvd+7cucgXE0GsL3xqrzGY+vz/561W4zFnFi5ciPb2dvl1qDkymkT4HYwn1heeSGaM6ho4gXg8npD3TU1NHfVMp6+vD++99x76+/uj0oNz5MgR1NTU4NVXX8W+ffswf/78MZ199fX14dy5c5gzZw7S0tKC3i9WWF/41F5jsPWdPXs2hlVFllpzBgDWr1+PF154AdnZ2Thz5gzWr1+Pu+++GwaDIeSaA9WXCL+D8cL6whONjFFdAyc7O1txJuV2uyEIQsjvl5KSEtQd4/n5+TAYDMNuu3z58mH3LSoqGvG9Z86ciZqaGhQXF+OHP/zhqLUMJy0tTZV3v0tYX/jUXuNo9anh8lQwxlPOHDt2DMDACVVBQQEMBgPuvPPOEd8nHOP9dzDeWF94IpkxqhkHRwobk8mkeAzM5XIlzNMNt27dgtfrxa1bt+JdClFSSoScmTFjBtatW4cZM2bEuxQiVYvLU1SHDh2C3W6XH9MEgNLSUni9XgiCgFWrVsFms8FqteLJJ5+MdYlR097eDp1OhxMnTsS7FKKElsg5c+nSJbz99tu4dOlSvEshUrWYX6IymUwBz5QOHDgg/91sNseyJCJKMImcM6Io4q//+q9RWFiIjIyMeJdDpFqquURFREREFCmqu8k4kUlPSEjX/kVRhM/nC/iop0ajQU5OTkzrIyIiShRs4MSANKDY1q1bAQDl5eVB7dfe3s5GDhERUQjYwImBnJwctLe3o7u7G06nE7NmzcIdd9yB//7v/8azzz6LDz74QDGeRVtbG9atW4cjR46gu7sbfX196OrqiuiYF0Q0Pmk0GjzwwAOKkZiJaCg2cGJE6okpKCgYss5gMCiWS8G1bt06xXa//OUvsXjxYvbqECWxnJwcfPzxx/Eug0j12MBRIf8eHwD44osvsHbtWqxduxYAsGfPHhgMBjZ0iJLQrVu30NPTg6lTp2LixInxLodItdjAiaPZs2fjtddew+zZs4es82+83HvvvWhoaMDNmzexdu1alJSUAOA9OkTJ6MSJE1i6dCmam5sD9ggT0QA2cOIoMzMTFRUV6OzsxPnz5+XlaWlp8v02Z8+exaxZs5CdnQ2DwYD29nYcOXJEvkcHABs5REREg7CBE2cHDhyQLz1JlixZgmPHjuHSpUvIyclRjHzs35iR7tFhTw4REZESGzhxVlxcjObmZsUyaSZVURQBAN3d3Zg8ebK8XrpHR+rJke7VISIiogFs4MTZ9OnTMX369DHvl5OTIzds2tra5OUcIJCIiIgNnHGhv78fkydPhtPpRG9vL4CBhsxwj5PzkhVR4srPz0dXVxf0en28SyFSNTZwVExqwNy4cQMA8Morr+D999+X17e3tyseJ5cGCOQlK6LElZqaykk2iYLAyTZVTLrXZtmyZQCA559/Hs3Nzfjggw8wYcIEdHd345577gEw0BiSnrxqa2vDsWPHcOzYMXR0dMStfiKKvC+++AIPPvggvvjii3iXQqRq7MFRuZycHPmyVHZ2NqZMmQKNRoNnnnkGgiCgubkZy5cvBwC88847AIZestqzZw+ysrIUy3ivDtH45PF48B//8R944YUX4l0KkaqxgTMO5eTk4B//8R+h1WqxdOlS7NmzByUlJXj66aeRmZmJvXv34urVqzh58iQ2bdokDww4GBs+RESUqNjAGae0Wi2AgQbJgw8+KN+Lk5qaCo1Gg+9+97vweDyKfXbu3In58+ePqeHDBg8REY1HEW3guFyuIT0CFBuDGyFHjx4dcrPxPffcg5dffhk/+clPFMtLSkrwwx/+EIcPHw7Y8GGDh9SCGUNEwQqrgdPW1ga32y2/tlqtePPNN8MsiSJhuEbIhg0bUFFRoVh211134dq1a9i0aZNi+TvvvIOnn356SIOHj6FTrDBjhhppDjsi+n8hN3A2btyI7u5u+VFmQDngHKnTcA2Tvr4+eURll8uFkpISrFmzBitWrMD3v/99nDlzBs899xzeeOMNPoZOMcGMCSwzM3PIyQgRDRVyA6eoqGjIHEoff/xx2AVRfKSlpckzExcUFKCjowMZGRnIyMjAW2+9hfvvvx9vvPEGAODTTz/lpSqKOmZMYFevXsWBAwdQXFwc0ijoRMki5HFwBEEYsiw7OzusYkg9FixYIP+9uLgY7e3t2L17NwBg8+bNWLhwId544w38+te/xvHjx+FyueJUKSUqZkxgnZ2dWLt2LTo7O+NdCpGqhdyDI4oirFYr8vPzAQA+nw+NjY346KOPIlYcqUdOTg5ycnJgMpnw8ssvY9euXYpu8iVLlkCv1+PSpUvo6enBrFmzcP/998exYhrvmDFEFI6QGzj19fUwmUzw+XzyMv+/U2LKycnByy+/jAcffBCdnZ1yI+f48eP4zne+o9j2Rz/6ER566CFMnDgR33zzDe666y7FekEQkJGRgStXruDcuXOKdbwERswYIgpHyA2cLVu2oLCwULHMZDKFXRCpX2ZmJh566CEAwPe+9z10dXXh3LlzmDNnDi5duoSjR4/in/7pn+Q/w/m7v/s7rF+/HocPH8Zjjz2mWJeXl4ePP/4YX3/9NQA2eJIRM4aIwhFyA6ewsBDXrl1DY2MjAGDlypXIzc2NWGE0PuTk5GD27NmYPHkyDAYDpkyZgoceegjf+c530NXVhYULF+LChQsBBxV89dVX8eqrr+KDDz7Ar371K8X9FVLvjv+jsHv27MEDDzyAyZMno7OzE1evXpXXsQGUeJgxgaWlpWHJkiVIS0uLdylEqhbWPTgbN26UbwS0WCzYvn27POEjJbfB99/4z3oukR5Hl+bOCjS+TnNzs7xdSUkJOjo6sGDBAmzduhW/+MUvhnwGGzmJgxkTmMFgwLFjx3D8+HF0dHTwd55oGCE3cPbt24eGhgbFstdeey3pw4cCCxTCBQUFaG9vx5EjR7Bu3TocOXJkyLYFBQXydt3d3fIottu2bZPv/3G73fiv//ovTJs2LQb/EooVZszIvvWtb+H69ets2BMNI+QGTqDh0vPy8sIqhpKPfzBLPTmBpoYYHOBz587F3Llz5dff/e53Y1AtxRIzZmS7d+/Go48+yoE3iYYR1iWqwTgWCoUiJycH7e3taGtrky9F+RvtDNXr9aKpqQmFhYXyJKQ0/jFjRrZo0aJ4l0CkaiE3cEwmEx5//HEYjUYAgN1ux+bNmyNWGCUXqZfG/16dtrY2rFu3btQz1LNnz8JsNqO5uVkejZnGP2YMEYUj5AZObm4uXnzxRVitVgAD90TwCQcKF+8lIAkzZmQGgwGtra2YN29evEshUqWwZhMXBEFxRuVyuQJeNyciCgUzZnhpaWm4++67FROQcrgEov8XdANn3759MJlM8pMqH374oWK91+uF3W7Hz3/+88hWSERJgRkzNleuXMHf/M3fyD1cEj5VRTQg6Mk2f/rTn6KlpUV+/e///u/weDzyH5/Ppxh4jShWJk2ahPnz52PSpEnxLoXCwIwZmxkzZmDbtm1obm5Gc3MzDhw4gOeffx5TpkyJd2lEqhB0D87g8SheeumlIdfDOYw6xYPRaMTZs2fjXQaFiRkzdoN7alasWBGnSojUJ+genMH27t2LDz/8ENeuXcP69evx7LPP8hFOIooYZszYdHd347PPPuO4OET/J+QGTn5+Ph5++GHU19fDYDDgzTffhNvtjmBpRME5efIkMjIycPLkyXiXQhHEjBmbjo4OLF++HL/5zW/Q0dER73KI4i7kp6ikAdUaGxvx0ksvAQB0Ol1kqiIag5s3b+Ly5cu4efNmvEuhCGLGKPk/LTWYRqOBRqMBgBHndiNKJmGPZCyKIgwGA0RRhNfrjVhhRJTcmDEDBjdchtPe3o729nb87ne/w3PPPYf+/n6cO3cOc+bMiUGVROoTcgNn5cqVsFqt+Oijj9Dd3Q2r1Yrp06cHta/NZgMAeDweCIIQ8MbBDRs2oKqqCsDAtfgtW7aEWioRjUPhZAyQODkzeITvwaQRv48cOQKDwYD58+fj008/hV6vR05ODo4ePcqeHEpKITdwNBoNnnjiCfn15s2bg7oBUBRF2O121NTUAAAqKioCBo/L5UJ5eTny8vKwffv2UMskonEq1IwBEi9nRmqgDNfD86tf/Qoej4c3HVPSivlAf3a7XT4ggYGD0263DwmfJ598EmazOdjyhuXz+dDb2zviNn19fYr/qk2y1uf/viP9DLOysvDJJ58gKysr4HZq//4A9dcYbH0+nw8pKSkhfUYkB/pLppyZPXs2Tpw4ITdkzpw5g/Xr1+PKlSvy541WWzTrixTWF55EqW8sGRN0A+enP/0pNBoNCgsLAQwMwrVq1SrFNsEMwuV0OqHX6+XXer0+4HV1acAvj8cDACgrKwu2VIUbN26MeHOev3PnzoX0GbGSbPVJ73fu3DlMnjx5xG21Wm3A2acDvZ+aqb3GYOq78847Q3rvSGUMkJw5Ix0j0oCXX331FQDA4XCMevz4S4TfwXhifeGJZMaoYqA/KVz8+V8LLy4uxsqVK+WnKsYiNTUVCxYsGHGbvr4++Wa8tLS0MX9GtCVrff39/QCAOXPmwGAwDLvd+fPnsWPHDmzYsAGzZ8+OWX2RpPYag60vnAEXoz3QX7LkjHTcSJe1pkyZgvT0dLhcLmg0mmHrTJTfwXhhfeGJRsaEfA+OIAj4+c9/jrKyMkybNg1NTU3Iz88fdb/s7GzFmZTb7YYgCIptbDYbWlpa5PCRzs6NRuOY60xJSQl66PK0tDRVD3OebPVJv+SjvW93dzfeeustVFRUjLid2r8/QP01jlZfqJenAgk1Y4DkzhnpuJFuTp46dSpefvll/OQnP8Ht27dHfXx8vP8OxhvrC08kMybkgf4aGxsV3cWFhYWw2+2j7mcymRTzzbhcLvmsTAokQRBQVFQkb+P1ekMKHSIav0LNGIA5I8nJycGsWbOwYcMG/Ou//isA4Msvv4xzVUSxEXIPjl6vx9q1a8e8nyAIWLVqFWw2GzweD5588kl5XWlpKRoaGmA0GmGz2eQzrF27doVaJhGNU6FmDMCcAYYODDh16lQAAzdv33fffYr1Go0m4OVdovEs5AbOyZMnFU88AAM37D3wwAOj7jvcUwsHDhwYsk0knnAgovEnnIwBkjdnRhsY8Fvf+hY+/fRT/MVf/IVi+YkTJ6JeG1EshdzAKSsrw+rVq5GdnQ2NRoNTp07hxRdfjGRtREFJT0/HM888g/T09HiXQhHEjAnNcAMDulwulJSUoLKyEgDw+uuv4+6770ZXVxc2bdoEURThdDpx4sQJCIKA+++/Px7lE0VMWDcZNzQ0oLGxEV6vF3/7t3875CY+oljIzs7G22+/He8yKMKYMaELdBNxQUEB2tvb0dbWhpKSEmzatEmx/nvf+57i9TvvvIMVK1ZwFGQat0Ju4ACA1WpFa2sr3nzzTTQ1NWH69OmK7mSiWOjt7cXp06dx7733qvrpABo7Zkxk5eTkDNvD09fXh6amJjidTvzLv/wLnn76aQDAnj17YDAY2NChcSfkp6jq6uqg1WrlJxPG8oQDUSSdPn0aS5cuxenTp+NdCkUQMyZ6cnJyUFBQoPizZMkSLF++XB7s8PXXXwcAlJSUYOHChejo6IhnyURjFnIPTn5+Pv78z/8cTU1NkayHiAgAMyZe5syZg1/+8pcoLi5GV1cXXnnlFQADl6y+/e1vQxAEaDQa9uiQ6oXcgxNo0jv/cSeIiMLBjImP6dOn4+GHH8b06dPx7LPP4p133gEAvPHGG3jooYewdOlSLFy4EA0NDezVIVULuQcnNzcXpaWlmD59Oux2O+x2OzZv3hzJ2ogoiTFj4i8zMxNPPfUU5s+fj66uLkybNg1ffvklNm3ahDVr1gDAqCMjE8VLyD04hYWF2L59OwwGA3w+H7Zt2yZPkkcUSxMmTIBGo8GECSH/OpMKMWPU4/7778df/uVfoqSkZMjTVg6HI05VEY0s5B6cNWvWoKqqimdUFHeLFy8OOFM0jW/MGHWSnsKyWq3YunUrWltbYTQa2YtDqhPyKW9ZWdmQEUV5MyARRQozRr1ycnLwgx/8AACwdetWLFy4EL/+9a95Tw6pSsg9OCkpKfiHf/gHZGdnQxAEeDwe2Gw2diFTzJ06dQoPP/wwPvzwQ+Tm5sa7HIoQZoy63XvvvWhvb8eaNWvQ0tKC1atXA+A9OaQeIffgvPvuu/D5fLh69SpOnjyJ3//+93C73REsjSg4/f39OHXqFPr7++NdCkUQM0b9cnJyUFNTAwDYtm0bAAwZQJAoXkLuwampqRlyJsXu48jYsGEDduzYoVjW2dmJ9957DxcuXJAH4nI6nSgqKpIHQosEm80GAPB4PBAEYcT3ttls0Gq1WLx4MQBgy5YteOaZZwAAe/fuxZYtW8b8+Q6HAxs3bkRDQ8PYi48wh8OBnTt3wuVyYfPmzTCZTLDZbNi6dSvWrl2LqqoqaLXaMb2f9G8LtF9nZyeqqqrwySefjOl9ExUzJroilTPZ2dkABm4Kf+utt4KaTiOYnPF6vbBarQAgz58FAPv378ekSZPk16FMlDrasRhLsc6ZtrY2PPXUUzh48GDc/+3RNqYGTltbG/bu3Yvs7Gw8/PDDQ9az6zh8NpsNTU1NEEVRERRz585Famoqmpub5eABgEWLFuHAgQMRmaNHFEXY7Xb5jKyiomLYUPN6vXj33Xfx5JNPysvOnz+P8vJy5OXlYfv27SHVYDQaVTPfkNFoRFFRERwOh/w9mM1m1NXV4ZFHHhlzOAz+t1mtVsXPcu7cuUl/iY0ZExvRyJnp06fjr/7qr0b97GBzxm63w+12Q6/Xy8t6enpw/vx5+USquro6pAZOMueMwWBAXl5eZIpXuaAvUTU1NWH16tWw2Wyora3Fc889F826kpbH48HatWtRX18f1PZarTZiTxDZ7XZoNBr5tUajGXZo/MbGRqxcuVKxrKKiAkePHsWuXbsS/swgEqSzUxrAjImdaOSMx+PBBx98gCtXroy4XbA5Yzab5d4hydSpU/HRRx/Jj6b7vw8Flsw5E3QPjtVqxdGjR+VfqLq6OrhcLmRlZUWtuGTj9Xrl7trS0tJRL/FYrVYUFhbCaDRG5POdTqfibEmv1wcMNelMQ+pmlpw6dQqTJk2Cx+MBAMVZw3BsNpu8vU6nG3I2lp6ejpaWFhw8eBCPPPIIBEGAw+GAKIrQarWw2WzYtGkT3n77bXR2dqK/vx82m00+O4wEURQV/1apXkltbS3y8/MhiiJMJhOMRiNqa2tRVFSEQ4cOyXX7s9vtche8IAjyZT5pHTBwmW/Hjh1yd/X27dthMpmwYcMGCIIQ0iVANWPGxEa0cuarr77CY489hubmZsyYMWPY7YLNmeFs2LABpaWlMBqN2L17d1D7jJYzgY7XwTlTU1MTcFmkRDtnMjIyoNPpFOuAxM6ZoBs4WVlZitZyVVUVmpqaEiZ8XC4X+vv7kZaWFtH31ev1mDdvXlDb2u12+cATBAF2u31I121ra6t8EJhMpoCNCK/Xi507dw77OYEOhOEMPsiAgQMxULfwxo0b5dm8i4uLsXLlyhF7chwOh3xwiaKI2traIe9769Yt5Ofno7+/HxaLBTU1NfIlDJPJBJ1OB71ej56eHty8eRN5eXmKg1jS09OD7du34447Av/Kj/SdCIKgqKuurk7+u9VqhV6vl9dXVFRg165d0Ov18s9OqtufyWSCVquVf369vb3yOmmCSSnwzGaz4gy3qKgoqMbjeJPoGQMkZs5cvHgR6enpOHDgANLT0/Fv//ZvihOSYATKmeGcOnUKDQ0NqKurQ3l5+aj36wWTM4GO18E5AyDgMn9qzpne3l60tbXJ65IhZ4Ju4AzuKtRoNPD5fIplbW1tMBgMkakshi5fvozS0lLcvn074u89ceJEfP3110hPTx91W/95dvLy8lBfXz8keLKyska95qzVakNqdWdnZyvOpNxu95CD0WKxQBAE2Gw2tLS0QBRFZGRk4PDhw9i3bx/+/u//Xq5BFMURz/r27t2LoqIiAAMH9+AbHiUHDx6EXq+Xn6CpqqpCXV0dLBYLcnNz8aMf/Qg9PT04evSovGzwe02dOlXRAIsUh8Oh6GL3H5TOarXC6/WO+cmfQAFYVlaGnTt3DhusiSCRMwZI3Jw5duwYXnvtNRQXF2P37t149NFHUVBQMOx+weTMcA4fPow/+ZM/gdFoxK5du1BdXR2wgeYv2JwZfLwOzpkdO3YEXOaPOaMuQTdwRFHEtWvXFMtcLpe8zO12o76+Hi+++GJkK4yB9PR0NDQ0ID09PSpnVsGEjsPhULTuzWYzli1bFtJnhtqDYzKZUFtbK792uVxycHi9Xmi1WsXTDC0tLcjPz4fBYEBnZyfuvfdeRQ1S40badzC9Xg+n06nYx3+7gwcPYuLEiVixYgUmTZqElpYWOBwOtLa2ymcq1dXVOH78ON5880188sknqKurQ3V19ZCbJ8M5sxqJ0WiE0+mUvyeHwwGr1Qq3243Kyko4HA657kCNPa/XC7vdjvvuu09eFihYjEYjXC4XrFbrkHufEkUiZwyQuDkz1h6cYHJmONeuXVMcH/7/Iw41Z4Y7XgfnzOCbo8dbznz66aeK3tBkyJmgGzgWiwU/+9nPFMt8Pp/cjebz+ZCSkjJuwycrKwsGgyHiLe9g2O121NXVoaysTO4SlG6iq66uxrp163Dx4kXs27cPFy5cGPWMJdQeHEEQsGrVKvl6tf8TUqWlpYrHDu12u/wUxrx58zB37ly4XC65Z2fXrl2KfWtqaobUXFlZidraWrlXSDrwRVGE1WrFvHnzMGHCBLS0tMgDh4miCKfTKXefC4KAzMxMpKam4vDhw+jq6lK8lySUMytRFHHo0CG4XC75O5e+m/r6elRVVaGsrAwWiwVWqxU6nQ6CICAvLw8Oh0PR3SuKouLfVllZibKyMlitVjmQOjs7cf78efmpB+lmTKmbeeXKlXA4HOO+23g4iZ4xQGLmjNSD8/3vfx+nT59GZWUlFi1aNOx+weaM3W7HoUOH0N3dDUEQcN9992HFihU4fPgwzp49C2Dgf9LS8RNqzphMpoDH6+CcEQQh4DJ/as6Z+fPnAxjoBZXWJ3zO+IK0detWn9frHfaPx+Px1dbWBvt2MXHy5EnfyZMnR92up6fH99vf/tbX09MTg6rGLhHqO3To0Jjft7m52QfA19zcHNZ2av/+fL7gamxtbQ3pe4yEYL/DYI+5QMZjxvh8zJlgj9NwRStnIiVRfr7xyploZEzQj4k/8sgj0Gg0w/7RarVYtWpV1BpiNH45HA7VjDkxHklnjK2trREd1FFtmDEUDuZMeBIxZ4Ju4AQzAFmyD1JGgQ2+Th1per0eP/jBDxSPniYS6SmHRB+cixkzvrW1tSElJQXHjh2Ly+dHO2cSXSLmTMhTNRAFK5SRRsdi3rx5+PDDD6P6GfHkf2M3EQUW7ZxJdImYMyFPtkmkFtevX4fL5cL169fjXQoREakEGzg07rW2tkIQBLS2tsa7FCIiUgk2cIiIiCjh8B4cIiIK27x589DR0ZFQU2vQ+MYGjgpt2LBhyBDgnZ2deO+993DhwgV5ACan04mioqKIPtInPSro8XjkCflG2lar1coTRe7fvx/9/f1wOBwwm80h1eVwOLBx48ZR55eJFf+ZeHU6HTwejyoGwBJFERs3bsTmzZsT5pFOiq1I58ykSZOwYMGCoD47mJyRJokEht4AKw3aB4R2c7F/zow0cnKsMGeigw0clbHZbPIIwf6PPM6dOxepqalobm5W/OIvWrQIBw4ciMjjkYOHIq+oqBj2l9rr9eLdd9+VRyHt7OxEamoqysrK4PV6sWLFChw9enTMNRiNRtU86ulwOOD1euVwFUUR9fX18nppJNB4EAQBhYWFcflsGv+ikTPnz5/H66+/jm3btmHu3LnDbhdsztjtdrjd7iHDPzz11FN46623oNVqUVpaGlIDhzkTnPGeM7wHR2U8Hg/Wrl2r+AUfiVarVUxcFw5pyG6J/+RugzU2NirmKrl27RoOHz4s16TT6eRh4KNt8eLF6O/vl3uSIsXj8SgmJhQEQZ60D1CedRGNJ9HIGa/Xi1/84he4evXqiNsFmzNms3nIBKydnZ3yvg6HQzU9veFgzkQPe3D+j8vlQn9/f1QmwZs3b15Q23q9Xrm7trS0dNT5pKxWKwoLC0ecsXssnE6n4mxJr9cHDDWHwyHPlyLJz8/H2rVr5dcejyeouqQ5V4CBrtnBZ2Pp6eloaWlRTN7ncDggiiK0Wi1sNhtqampw9uzZIcvCZTKZYLFYsGzZMqxcuRJlZWXymabdbpe70KWfmd1uR3V1NWpqamCxWLB9+3ZotVrU1tYiPz8foijK+w+uta2tDYcPH4bH48Fnn32GmpqaIfsZjUZYLBa5AXnq1ClFEJL6MWeCz5lAvvzyS5w/f16ec0k63kYzWs7U1taiqKgIhw4dGjFnAi0LV6xy5vnnn0dnZydcLhcyMjLk+hM5Z9jAAXD58mWUlpbi9u3bEX/viRMn4uuvvw5qpl+73S4feIIgBJzsrrW1VW5YmEymgF2Xoc4mHogUCv5EURyxW7i6uhrbtm0b9b0dDgf27t2LHTt2QBRF1NbWDnnfW7duIT8/H/39/bBYLKipqcHevXuRnZ0tzyTc3t6Oxx57DOXl5SgvLw84S26os/zu2rVLntBOuhYt3V+k1WoV37+0TKfTyaFjtVqh1+vlf1dFRQVyc3MV9QPAvn37MHHiRBiNRmRmZgbcr6ysDKIoyqE6XO8aqVOi5sxYZxMPJFDOBNLb2wutVis3tlpbW4edQVsSTM7o9Xr5OxguZwAEXOZP7Tnz+eef44/+6I+wfPly6HS6hM8ZNnAw0EvQ0NCA9PT0qJxZBRM6ABTdlHl5eaivrx8SPFlZWaNecw51NvHs7GzFmZTb7R5yMEo390mzhouiiIyMDHm9zWaDyWQK6rr43r175TMDQRCG3PAoOXjwIPR6PdxuNwCgqqoKdXV1sFgsyM3NRXl5OU6cOIHTp0+jtLQUubm5Q94rlFl+peCU/lRWVqKiomLUf5t/2DocDkUX/ObNmyEIgqL+HTt24PHHH8cLL7yARx99FEajEXq9fsh+/jOPA1B085P6JWrOSLOJFxcXY/fu3Xj00UdRUFAw7H7B5Mxw7r77bsWAnjqdDqIojtjACTZnrFYrvF7vsDmzY8eOgMv8qTlnXnnlFTz44IOw2Wx4//33kZubm/A5wwbO/8nKyoLBYBjTL2YkORwORevebDZj2bJlIb1XqD04JpMJtbW18muXyyUHn9frhVarVTzN0NLSgvz8fBgMBrS1teHzzz+HVquFyWSCw+GAVquFIAjyvoPp9Xo4nU5F3f7bHTx4EBMnTsSKFSswadIktLS0wOFwoLW1VT67qK6uxsWLF6HRaPDEE0+goKAA1dXVQ26eDOXMShRFeDweRfgPPti9Xq/ijHgwo9EIp9Mpv4fD4UBjY6OiflEU8dlnn+GJJ56AwWDAK6+8gnvuuQc9PT2K/Uwmk+J/Tt3d3QE/k9QrEXNG6sE5dOgQSkpK8Jvf/GbEHpxgcmY4eXl5+Pzzz+XX/pdjQs0Zq9UKt9uNyspKOByOYXNm8M3R4y1nXC4XPv/8c/z4xz/GlClTUF1dnfA5wwaOCtjtdtTV1aGsrEzuipRu0K2ursa6detw8eJF7Nu3DxcuXAjYpewv1B4cQRCwatUq+Xq19IQUAJSWlioeqbTb7fJTGPPmzcPFixdRXV2NlJQUAAMH5JkzZ+R9a2pqhtRcWVmJ2tpauVdIOvBFUYTVasW8efMwYcIEtLS0ICcnR17ndDrl7nNBEJCZmYnU1FQcPnwYXV1diveShHJmJX2e9H04nU689NJL8rqysjLF2Y50fd7/qYeysjJYLBZYrVbodDoIgjCkfkEQ4HK50N3dDZfLBUEQUFlZOWQ/s9ks1yPVFujsmyiQaOWM1IMjnWCMJticsdvtOHToELq7uyEIAu677z5MnToVa9askXtbNm/eLGdSqDkjnZD5X4oJlDPDHbv+1JwzWVlZ6Orqwv79+zFp0qSkyJkUn8/ni3cR0SK1QvPz80fcrre3F21tbXE9sxpJItQ3WlgGcuzYMSxduhTNzc0jBudo26n9+wPUX2Ow9QV7zCWSZM8Z6fj7z//8T/T19aGwsDAqY8tEK2ciJVF/vrESjYzhY+IUdQ6HI6pjTmRnZ8NisQx5pJSIYkd6+ODs2bNx+fxo5wyNP2zgUNQNvk4daenp6XjiiSeCvsmSiBJPtHOGxh82cCjqQhlpdCwuX76Mn/3sZ7h8+XJUP4eI1CvaOUPjT1xuMg5mHpKxzIlEyc3pdKKyshIFBQXsxSEZc4YoucW8B0d61M5sNst3foeyDRHRcJgzsXfnnXdi/vz5mDRpUrxLIQIQhx6c4eYh8T9zCmabYPl8PvT29o64TV9fn+K/apOs9Unv97vf/W7E95YeR9+/fz/6+vrg8/nQ3t4ur79+/TouX76My5cvY9q0afjqq6+GjO2Qnp6Ou+66C9euXcP58+cV6yZNmoQ5c+YAADo6OoaMRPsHf/AHmDx5Mi5evCgPEiaZMWMGMjIy0NfXpxiLAxgYfVaaffnMmTP46quvcPToUdx5550ABsZMmTp1Ki5fvoxvvvlGsa9Wq8XMmTNx/fp1dHZ2DvlOFi1aBGCgd2vwdzdz5kxotVq43W5cvHhRsW7q1KnIysrC7du30dHRofgOJ0+eLH8Pw/H5fPJQAfHEnBm7UOuTtu/u7sb777+P3/72t/K8dJLs7GykpaXh0qVLuHLlimKdXq9HZmYm+vv78fvf/16xbsKECfIQEe3t7Th//rziGJk9ezamTZuGb775Zsglao1Gg1mzZuHmzZv44osvhtS9cOFCpKSkQBTFIT+7zMxMeXDRwcfIlClTIAjCuMuZ//mf/8GRI0cU3x+gnpyJRsbEvIETzDwk4cxVMtiNGzfQ1tYW1Lbnzp0L6TNiJdnq6+rqAgCsX78+qO2ff/75iH4+KaWkpGD//v1DZncezD8844U5E7qx1jfW45RoOJHOGFUM9BfMPCTBzlUyWGpqqnyWPJy+vj6cO3cOc+bMifgQ6pGQrPUZDAacOHEiqJE0XS4XfD7fiGdWy5YtU+WZFfD/PTjp6emq7sFZvHjxiD/jeD0iHAzmzMhCrW/wcXr27FncunVLsU0ke3D8jxE19uCoNWekHhy9Xq/qHpxIZkzMGzjBzEMSzlwlg6WkpAQ9qFFaWpoqB0CSJGN9f/iHfxjSfn/6p38q/z2SA1xFa1bdJUuWhFzj8uXLh10XTr1/9md/Jv9d+g5H+xmr4fIUwJwJRyj1+R+najxGvv3tb0elpvGUM729vdDpdCHXF+2ciUbGxPwm48HzXAyeh2S0bYiIRsOcIaK4TNXg/2imTqeTxy8oLi6W5yEZbpuxOHbsGHw+36jX63w+H27cuIHU1FTVnIH6Y33hUXt9gPprDLa+69evIyUlJag5iaKNOTM2rC88rC880ciYhJ6L6vjx4/D5fEhNTY13KURJ4caNG0hJScGSJUviXUrMMGeIYmcsGZPQDRwiIiJKTpyqgYiIiBIOGzhERESUcNjAISIiooTDBg4RERElHDZwiIiIKOGwgUNEREQJhw0cIiIiSjhs4BAREVHCYQOHiIiIEg4bOERERJRw2MAhIiKihMMGDhERESWcO+JdQKzZbDYAgMfjgSAIMJlMIW0T7/o8Hg8cDgfMZrPq6vPfVqvVqrI+i8UCQRAAAGazWVX1SdtIYlWf1+uF1WoFAFRWVgbcJp7HxnjBjIlNjf7bMmfGXl9S5IwviTidTt/WrVvl1+Xl5SFtEy3BfHZra6uvsbHR5/P5fB6Px/fHf/zHqqpP4vF4fKtXr5ZrjYVg6ysvL/d5PB6fz+fzrV69Oia1+XzB1efxeHzvvvuu/Np/+2hrbGz0/fM//7Pi8/3F89gYL5gx4WPOhIc58/+S6hKV3W6HRqORX2s0Gtjt9jFvE8/6PB6PvEyr1UKn08HhcKimPkljYyNWrlwZk7okwdTncDjkbRwOBxoaGlRVn1arhdVqlX+m/ttHm9lsRnZ29rDr43lsjBfMmNjUKGHOhFZfsuRMUjVwnE4n9Hq9/Fqv18Pr9Y55m3jWZzKZUFNTI7/2eDwwGo2qqQ8YOKDjcekimPpaW1vhcrkgiiIAoLq6WlX1AcDmzZtRWlqK0tJSVFVVxay+0cTz2BgvmDHhY85Evz4gOXImqRo4gXg8nohsEy0jfXZ1dTW2bdsWw2qGClSfKIryded4G1yf1+uFTqeD0WiE0WhEa2trTM9OBwv0/bW0tKChoQE6nQ7l5eWxL2oM4nlsjBfMmPAxZ8KTrDmTVA2cwd1ibrd7yAESzDbRMpbPttlsMJlMMb1xLZj6LBaLXF9LSwvsdnvMDuxg6hMEQbFMp9PJZ1lqqM9ms6GoqAhGoxG7du1CXl6eai4DxfPYGC+YMeFjzkS/vmTJmaRq4JhMJrS0tMivXS6X3MUpdYGNtI0a6gMGrlFqtVqYzWY4HI6YHTjB1FdZWQmz2Qyz2Szf/R6r7u1gf77+35coiqr6+Xo8Huh0OsU+/q/jQQ3HxnjBjIlNjcyZ8OpLlpxJ8fl8vohUN074P36m0+nks5Pi4mI0NDRAq9UOu40a6vN4PCgtLZW393q9OHPmjGrq02q1AAYCsq6uDllZWdiyZUvMzlCD/fl6PB54vV4IgqCqn69Wq4XFYpG/x1j+/tntdtTX16O7uxtlZWWqOzbGC2ZM9GtkzoRfXzLkTNI1cIiIiCjxJdUlKiIiIkoObOAQERFRwmEDh4iIiBIOGzhERESUcNjAISIiooTDBg5FlMPhQHV1NRYtWoTa2lpYLBZYLBZUV1dHbSwNu92O0tJSeYbawa+JKLEwZygYfEycIs7r9WLZsmU4evSoYryKjRs34uDBg/KySJLGdCgrKwv4mogSC3OGRsMeHIoJk8kEr9ermuHAiSjxMGfIHxs4FBPSPDGxnJWYiJILc4b83RHvAihxSfPZOBwOuN1uHDhwQDGUujRBniAIaGlpwZYtWwAMzNtSX1+P/Px8eDwerFy5Uh6+W6vVQhRFOJ1OeXsiSl7MGRoOe3Aoakwmk/ynqalJMZmbKIqoq6uTJ83Lzs6GxWKB1+tFRUUFqqqqYDab4XQ65Zv4Nm7cCEEQUFZWhu7ubnm+EiJKXswZGg57cCjqjEYj8vLyUFdXh5qaGgBAfX09dDqd4lp5S0sLtFotBEGQbxCsqqqS10s3E4qiCLfbHdMZjolI3ZgzNBgbOBQTGo0GH3/8sWJZbm4uTCaT/LqsrAwWiwUajUZe5v8kxM6dO6HX62E2m2M2azARjR/MGfLHS1QUE9nZ2fKZkMPhwKpVq9DU1KTYxm63w2w249SpU0OW2+12nDp1CpWVlRAEAd3d3fI6idfrVew3+DURJTbmDPnjODgUUQ6HA3v37oUoisjPz4fJZJKfaNiwYQPy8/PlcSPsdjsOHTqE/Px8AAPX0rVabcDlAPDjH/8YjzzyiPxZ9fX1WLVqFQRBwNatWwEA27ZtAwDFaz5RQZRYmDMUDDZwiIiIKOHwEhURERElHDZwiIiIKOGwgUNEREQJhw0cIiIiSjhs4BAREVHCYQOHiIiIEg4bOERERJRw2MAhIiKihMMGDhERESUcNnCIiIgo4bCBQ0RERAmHDRwiIiJKOP8Lpn/A0iZNySYAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] @@ -415,10 +555,10 @@ "# Set the labels for the legends manually\n", "ax[0].get_lines()[0].set_linestyle('dashed')\n", "ax[1].get_lines()[0].set_linestyle('dashed')\n", - "ax[0].legend(['AP: 0.52, Healthy', 'AP: 0.46, Stressed'], frameon=False)\n", - "ax[1].legend(['AP: 0.31, Healthy', 'AP: 0.29, Stressed'], frameon=False)\n", + "#ax[0].legend(['AP: 0.46, Healthy', 'AP: 0.48, Stressed'], frameon=False)\n", + "#ax[1].legend(['AP: 0.16, Healthy', 'AP: 0.16, Stressed'], frameon=False)\n", "fig.tight_layout()\n", - "fig.savefig(fig_save_dir + 'APmodel-final.pdf', format='pdf', bbox_inches='tight')" + "fig.savefig(fig_save_dir + 'APmodel-model-optimized-relabeled.pdf', format='pdf', bbox_inches='tight')" ] }, { @@ -454,7 +594,7 @@ "labels = ['Healthy', 'Stressed', '(none)']\n", "sns.heatmap(matrix, annot=True, xticklabels=labels, yticklabels=labels, fmt=\".0f\", cmap=sns.cubehelix_palette(as_cmap=True, start=.3, hue=1, light=.9))\n", "fig.tight_layout()\n", - "fig.savefig(fig_save_dir + 'CMmodel-final.pdf', format='pdf', bbox_inches='tight')" + "fig.savefig(fig_save_dir + 'CMmodel-relabeled.pdf', format='pdf', bbox_inches='tight')" ] }, { @@ -469,7 +609,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 21, "id": "bfb39b5d", "metadata": {}, "outputs": [ @@ -477,6 +617,8 @@ "name": "stdout", "output_type": "stream", "text": [ + "Connected to FiftyOne on port 5151 at localhost.\n", + "If you are not connecting to a remote session, you may need to start a new session and specify a port\n", "Session launched. Run `session.show()` to open the App in a cell output.\n" ] }, diff --git a/classification/evaluation/relabel.ipynb b/classification/evaluation/relabel.ipynb index 894ff9c..a4ce0ba 100644 --- a/classification/evaluation/relabel.ipynb +++ b/classification/evaluation/relabel.ipynb @@ -33,7 +33,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "id": "cfd472e0", "metadata": {}, "outputs": [], @@ -99727,16 +99727,48 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "9e57cd86", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading labels from Label Studio...\n", + "Download complete\n", + "Loading labels for field 'ground_truth'...\n", + " 100% |█████████████████| 639/639 [1.1s elapsed, 0s remaining, 576.6 samples/s] \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "anno_key = \"labelstudio_basic_recipe\"\n", "\n", "# Merge annotations back into FiftyOne dataset\n", "dataset = fo.load_dataset(\"dataset\")\n", - "dataset.load_annotations(anno_key)\n", + "dataset.load_annotations(anno_key, url=LABEL_STUDIO_URL, api_key=API_KEY)\n", "\n", "# Load the view that was annotated in the App\n", "view = dataset.load_annotation_view(anno_key)\n", @@ -99745,12 +99777,45 @@ "# Step 6: Cleanup\n", "\n", "# Delete tasks from Label Studio\n", - "results = dataset.load_annotation_results(anno_key)\n", - "results.cleanup()\n", + "#results = dataset.load_annotation_results(anno_key)\n", + "#results.cleanup()\n", + "\n", + "# Delete run record (not the labels) from FiftyOne\n", + "#dataset.delete_annotation_run(anno_key)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "65f64f8b", + "metadata": {}, + "outputs": [], + "source": [ + "# Delete tasks from Label Studio\n", + "#results = dataset.load_annotation_results(anno_key)\n", + "#results.cleanup()\n", "\n", "# Delete run record (not the labels) from FiftyOne\n", "dataset.delete_annotation_run(anno_key)" ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "ef4fd54f", + "metadata": {}, + "outputs": [], + "source": [ + "dataset.save()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b099682d", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/thesis/graphics/APmodel-model-optimized-relabeled.pdf b/thesis/graphics/APmodel-model-optimized-relabeled.pdf new file mode 100644 index 0000000..2786655 Binary files /dev/null and b/thesis/graphics/APmodel-model-optimized-relabeled.pdf differ diff --git a/thesis/graphics/APmodel-model-original-relabeled.pdf b/thesis/graphics/APmodel-model-original-relabeled.pdf new file mode 100644 index 0000000..13940a7 Binary files /dev/null and b/thesis/graphics/APmodel-model-original-relabeled.pdf differ diff --git a/thesis/graphics/APmodel-relabeled.pdf b/thesis/graphics/APmodel-relabeled.pdf new file mode 100644 index 0000000..563d921 Binary files /dev/null and b/thesis/graphics/APmodel-relabeled.pdf differ diff --git a/thesis/graphics/APmodel-yolo-original-resnet-final-relabeled.pdf b/thesis/graphics/APmodel-yolo-original-resnet-final-relabeled.pdf new file mode 100644 index 0000000..58b053d Binary files /dev/null and b/thesis/graphics/APmodel-yolo-original-resnet-final-relabeled.pdf differ diff --git a/thesis/graphics/CMmodel-relabeled.pdf b/thesis/graphics/CMmodel-relabeled.pdf new file mode 100644 index 0000000..0298212 Binary files /dev/null and b/thesis/graphics/CMmodel-relabeled.pdf differ diff --git a/thesis/thesis.pdf b/thesis/thesis.pdf index 1a16369..55182b3 100644 Binary files a/thesis/thesis.pdf and b/thesis/thesis.pdf differ diff --git a/thesis/thesis.tex b/thesis/thesis.tex index 999971c..41caeb2 100644 --- a/thesis/thesis.tex +++ b/thesis/thesis.tex @@ -47,6 +47,8 @@ \nonzeroparskip % Create space between paragraphs (optional). \setlength{\parindent}{0pt} % Remove paragraph identation (optional). +\setcounter{tocdepth}{3} + \makeindex % Use an optional index. \makeglossaries % Use an optional glossary. %\glstocfalse % Remove the glossaries from the table of contents. @@ -117,18 +119,45 @@ % Switch to arabic numbering and start the enumeration of chapters in the table of content. \mainmatter -% \chapter{Introduction} -% \todo{Enter your text here.} +\chapter{Introduction} +\label{chap:introduction} -\chapter{Evaluation} +\section{Motivation and Problem Statement} +\label{sec:motivation} + +\section{Thesis Structure} +\label{sec:structure} + +\chapter{Theoretical Background} +\label{chap:background} + +\section{Object Detection} +\label{sec:background-detection} + +\section{Classification} +\label{sec:background-classification} + +\section{Related Work} +\label{sec:related-work} + +\chapter{Prototype Development} +\label{chap:development} + +\section{Object Detection} +\label{sec:development-detection} + +\section{Classification} +\label{sec:Classification} + +\chapter{Results} +\label{chap:results} The following sections contain a detailed evaluation of the model in various scenarios. First, we present metrics from the training phases of the constituent models. Second, we employ methods from the field of \gls{xai} such as \gls{grad-cam} to get a better understanding of the models' abstractions. Finally, we turn to the models' aggregate -performance on the test set and discuss whether the initial goals set -by the problem description have been met or not. +performance on the test set. \section{Object Detection} \label{sec:yolo-eval} @@ -149,7 +178,7 @@ consists of 91479 images with a roughly 85/5/10 split for training, validation and testing, respectively. \subsection{Training Phase} -\label{ssec:yolo-training-phase} +\label{ssec:yolo-training} The object detection model was trained for 300 epochs on 79204 images with 284130 ground truth labels. The weights from the best-performing @@ -240,7 +269,7 @@ before overfitting occurs. \end{figure} \subsection{Test Phase} -\label{ssec:yolo-test-phase} +\label{ssec:yolo-test} Of the 91479 images around 10\% were used for the test phase. These images contain a total of 12238 ground truth @@ -337,11 +366,10 @@ Figure~\ref{fig:hyp-opt-fitness} shows the model's fitness during training for each epoch. After the highest fitness of 0.6172 at epoch 27, the performance quickly declines and shows that further training would likely not yield improved results. The model converges to its -highest fitness much earlier than the non-optimized version discussed -in section~\ref{ssec:yolo-training-phase}, which indicates that the -adjusted parameters provide a better starting point in general. -Furthermore, the maximum fitness is 0.74\% higher than in the -non-optimized version. +highest fitness much earlier than the non-optimized version, which +indicates that the adjusted parameters provide a better starting point +in general. Furthermore, the maximum fitness is 0.74\% higher than in +the non-optimized version. \begin{figure} \centering @@ -426,7 +454,7 @@ is lower by 1.8\%. \end{figure} \section{Classification} -\label{sec:resnet-eval} +\label{sec:classifier-eval} The classifier receives cutouts from the object detection model and determines whether the image shows a stressed plant or not. To achieve @@ -448,7 +476,7 @@ regarding training and inference time as well as required space. The 50 layer architecture (\gls{resnet}50) is adequate for our use case. \subsection{Training Phase} -\label{ssec:resnet-training-phase} +\label{ssec:classifier-training} The dataset was split 85/15 into training and validation sets. The images in the training set were augmented with a random crop to arrive @@ -481,15 +509,15 @@ feature extraction capabilities. \end{figure} \subsection{Hyper-parameter Optimization} -\label{ssec:resnet-hyp-opt} +\label{ssec:classifier-hyp-opt} In order to improve the aforementioned accuracy values, we perform hyper-parameter optimization across a wide range of -parameters. Table~\ref{tab:resnet-hyps} lists the hyper-parameters and -their possible values. Since the number of all combinations of values -is 11520 and each combination is trained for 10 epochs with a training -time of approximately six minutes per combination, exhausting the -search space would take 48 days. Due to time limitations, we have +parameters. Table~\ref{tab:classifier-hyps} lists the hyper-parameters +and their possible values. Since the number of all combinations of +values is 11520 and each combination is trained for 10 epochs with a +training time of approximately six minutes per combination, exhausting +the search space would take 48 days. Due to time limitations, we have chosen to not search exhaustively but to pick random combinations instead. Random search works surprisingly well---especially compared to grid search---in a number of domains, one of which is hyper-parameter @@ -513,13 +541,13 @@ optimization~\cite{bergstra2012}. \end{tabular} \caption{Hyper-parameters and their possible values during optimization.} - \label{tab:resnet-hyps} + \label{tab:classifier-hyps} \end{table} The random search was run for 138 iterations which equates to a 75\% probability that the best solution lies within 1\% of the theoretical -maximum~\eqref{eq:opt-prob}. Figure~\ref{fig:resnet-hyp-results} shows -three of the eight parameters and their impact on a high +maximum~\eqref{eq:opt-prob}. Figure~\ref{fig:classifier-hyp-results} +shows three of the eight parameters and their impact on a high F1-score. \gls{sgd} has less variation in its results than Adam~\cite{kingma2017} and manages to provide eight out of the ten best results. The number of epochs to train for was chosen based on @@ -549,10 +577,10 @@ figure~\ref{fig:classifier-training-metrics}. produced the best iteration with an F1-score of 0.9783. Adam tends to require more customization of its parameters than \gls{sgd} to achieve good results.} - \label{fig:resnet-hyp-results} + \label{fig:classifier-hyp-results} \end{figure} -Table~\ref{tab:resnet-final-hyps} lists the final hyper-parameters +Table~\ref{tab:classifier-final-hyps} lists the final hyper-parameters which were chosen to train the improved model. In order to confirm that the model does not suffer from overfitting or is a product of chance due to a coincidentally advantageous train/test split, we @@ -579,10 +607,10 @@ is robust against variations in the training set. \end{tabular} \caption[Hyper-parameters for the optimized classifier.]{Chosen hyper-parameters for the final, improved model. The difference to - the parameters listed in Table~\ref{tab:resnet-hyps} comes as a - result of choosing \gls{sgd} over Adam. The missing four + the parameters listed in Table~\ref{tab:classifier-hyps} comes as + a result of choosing \gls{sgd} over Adam. The missing four parameters are only required for Adam and not \gls{sgd}.} - \label{tab:resnet-final-hyps} + \label{tab:classifier-final-hyps} \end{table} \begin{figure} @@ -636,7 +664,7 @@ F1-score of 1 on the training set. \subsection{Class Activation Maps} -\label{ssec:resnet-cam} +\label{ssec:classifier-cam} Neural networks are notorious for their black-box behavior, where it is possible to observe the inputs and the corresponding outputs, but @@ -666,7 +694,7 @@ become progressively worse as we move to earlier convolutional layers as they have smaller receptive fields and only focus on less semantic local features.''~\cite[p.5]{selvaraju2020} -Turning to our classifier, figure~\ref{fig:resnet-cam} shows the +Turning to our classifier, figure~\ref{fig:classifier-cam} shows the \glspl{cam} for \emph{healthy} and \emph{stressed}. While the regions of interest for the \emph{healthy} class lie on the healthy plant, the \emph{stressed} plant is barely considered and mostly rendered as @@ -675,8 +703,8 @@ inputs to the \emph{stressed} classification, the regions of interest predominantly stay on the thirsty as opposed to the healthy plant. In fact, the large hanging leaves play a significant role in determining the class the image belongs to. This is an additional data point -confirming that the model focuses on the \emph{right} parts of the -image during classification. +confirming that the model focuses on the semantically meaningful parts +of the image during classification. \begin{figure} \centering @@ -691,7 +719,7 @@ image during classification. class. The classifier focuses on the hanging leaves of the thirsty plant. The image was classified as \emph{stressed} with a confidence of 70\%.} - \label{fig:resnet-cam} + \label{fig:classifier-cam} \end{figure} @@ -727,20 +755,23 @@ the labels allowed to include more images in the test set because they could be labeled more easily. Additionally, going over the detections and classifications provided a comprehensive view on how the models work and what their weaknesses and strengths are. After the labels -have been corrected, the ground truth of the test set contains 662 -bounding boxes of healthy plants and 488 of stressed plants. +have been corrected, the ground truth of the test set contains 766 +bounding boxes of healthy plants and 494 of stressed plants. + +\subsection{Non-optimized Model} +\label{ssec:model-non-optimized} \begin{table} \centering \begin{tabular}{lrrrr} \toprule - {} & Precision & Recall & F1-score & Support \\ + {} & precision & recall & f1-score & support \\ \midrule - Healthy & 0.824 & 0.745 & 0.783 & 662.0 \\ - Stressed & 0.707 & 0.783 & 0.743 & 488.0 \\ - micro avg & 0.769 & 0.761 & 0.765 & 1150.0 \\ - macro avg & 0.766 & 0.764 & 0.763 & 1150.0 \\ - weighted avg & 0.775 & 0.761 & 0.766 & 1150.0 \\ + Healthy & 0.665 & 0.554 & 0.604 & 766 \\ + Stressed & 0.639 & 0.502 & 0.562 & 494 \\ + micro avg & 0.655 & 0.533 & 0.588 & 1260 \\ + macro avg & 0.652 & 0.528 & 0.583 & 1260 \\ + weighted avg & 0.655 & 0.533 & 0.588 & 1260 \\ \bottomrule \end{tabular} \caption{Precision, recall and F1-score for the aggregate model.} @@ -748,41 +779,39 @@ bounding boxes of healthy plants and 488 of stressed plants. \end{table} Table~\ref{tab:model-metrics} shows precision, recall and the F1-score -for both classes \emph{Healthy} and \emph{Stressed}. Both precision -and recall are balanced and the F1-score is high. Unfortunately, these -values do not take the accuracy of bounding boxes into account and -thus have only limited expressive power. +for both classes \emph{Healthy} and \emph{Stressed}. Precision is +higher than recall for both classes and the F1-score is at +0.59. Unfortunately, these values do not take the accuracy of bounding +boxes into account and thus have only limited expressive power. Figure~\ref{fig:aggregate-ap} shows the precision and recall curves for both classes at different \gls{iou} thresholds. The left plot shows the \gls{ap} for each class at the threshold of 0.5 and the -right one at 0.95. The \gls{map} is 0.6226 and calculated across all +right one at 0.95. The \gls{map} is 0.3581 and calculated across all classes as the median of the \gls{iou} thresholds from 0.5 to 0.95 in -0.05 steps. The difference between \gls{map}@0.5 and \gls{map}@0.95 is -fairly small which indicates that the bounding boxes encapsulate the -objects of interest well. The cliffs at around 0.77 (left) and 0.7 -(right) happen at a detection threshold of 0.5. The classifier's last -layer is a softmax layer which necessarily transforms the input into a -probability of showing either a healthy or stressed plant. If the -probability of an image showing a healthy plant is below 0.5, it is no -longer classified as healthy but as stressed. The threshold for -discriminating the two classes lies at the 0.5 value and is therefore -the cutoff for either class. +0.05 steps. The cliffs at around 0.6 (left) and 0.3 (right) happen at +a detection threshold of 0.5. The classifier's last layer is a softmax +layer which necessarily transforms the input into a probability of +showing either a healthy or stressed plant. If the probability of an +image showing a healthy plant is below 0.5, it is no longer classified +as healthy but as stressed. The threshold for discriminating the two +classes lies at the 0.5 value and is therefore the cutoff for either +class. \begin{figure} \centering - \includegraphics{graphics/APmodel.pdf} + \includegraphics{graphics/APmodel-model-optimized-relabeled.pdf} \caption[Aggregate model AP@0.5 and AP@0.95.]{Precision-recall curves for \gls{iou} thresholds of 0.5 and 0.95. The \gls{ap} of a specific threshold is defined as the area under the precision-recall curve of that threshold. The \gls{map} across \gls{iou} thresholds from 0.5 to 0.95 in 0.05 steps - \textsf{mAP}@0.5:0.95 is 0.6226.} + \textsf{mAP}@0.5:0.95 is 0.3581.} \label{fig:aggregate-ap} \end{figure} -\subsection{Hyper-parameter Optimization} -\label{ssec:model-hyp-opt} +\subsection{Optimized Model} +\label{ssec:model-optimized} So far the metrics shown in table~\ref{tab:model-metrics} are obtained with the non-optimized versions of both the object detection and @@ -790,7 +819,7 @@ classification model. Hyper-parameter optimization of the classifier led to significant model improvements, while the object detector has improved precision but lower recall and slightly lower \gls{map} values. To evaluate the final aggregate model which consists of the -individual optimized models, we run the same test as in +individual optimized models, we run the same test described in section~\ref{sec:aggregate-model}. \begin{table} @@ -799,11 +828,11 @@ section~\ref{sec:aggregate-model}. \toprule {} & precision & recall & f1-score & support \\ \midrule - Healthy & 0.664 & 0.640 & 0.652 & 662.0 \\ - Stressed & 0.680 & 0.539 & 0.601 & 488.0 \\ - micro avg & 0.670 & 0.597 & 0.631 & 1150.0 \\ - macro avg & 0.672 & 0.590 & 0.626 & 1150.0 \\ - weighted avg & 0.670 & 0.597 & 0.630 & 1150.0 \\ + Healthy & 0.711 & 0.555 & 0.623 & 766 \\ + Stressed & 0.570 & 0.623 & 0.596 & 494 \\ + micro avg & 0.644 & 0.582 & 0.611 & 1260 \\ + macro avg & 0.641 & 0.589 & 0.609 & 1260 \\ + weighted avg & 0.656 & 0.582 & 0.612 & 1260 \\ \bottomrule \end{tabular} \caption{Precision, recall and F1-score for the optimized aggregate @@ -813,63 +842,38 @@ section~\ref{sec:aggregate-model}. Table~\ref{tab:model-metrics-hyp} shows precision, recall and F1-score for the optimized model on the same test dataset of 640 images. All of -the metrics are significantly worse than for the non-optimized -model. Considering that the optimized classifier performs better than -the non-optimized version this is a surprising result. There are -multiple possible explanations for this behavior: - -\begin{enumerate} -\item The optimized classifier has worse generalizability than the - non-optimized version. -\item The small difference in the \gls{map} values for the object - detection model result in significantly higher error rates - overall. This might be the case because a large number of plants is - not detected in the first place and/or those which are detected are - more often not classified correctly by the classifier. As mentioned - in section~\ref{ssec:yolo-hyp-opt}, running the evolution of the - hyper-parameters for more generations could better the performance - overall. -\item The test dataset is tailored to the non-optimized version and - does not provide an accurate measure of real-world performance. The - test dataset was labeled by running the individual models on the - images and taking the predicted bounding boxes and labels as a - starting point for the labeling process. If the labels were not - rigorously corrected, the dataset will allow the non-optimized model - to achieve high scores because the labels are already in line with - what it predicts. Conversely, the optimized model might get closer - to the actual ground truth, but that truth is not what is specified - by the labels to begin with. If that is the case, the evaluation of - the non-optimized model is too favorably and should be corrected - down. -\end{enumerate} - -Of these three possibilities, the second and third points are the most -likely culprits. The first scenario is unlikely because the optimized -classifier has been evaluated in a cross validation setting and the -results do not lend themselves easily to such an -interpretation. Dealing with the second scenario could allow the -object detection model to perform better on its own, but would -probably not explain the big difference in performance. Scenario three -is the most likely one because the process of creating the test -dataset can lead to favorable labels for the non-optimized model. +the metrics are better for the optimized model. In particular, +precision for the healthy class could be improved significantly while +recall remains at the same level. This results in a better F1-score +for the healthy class. Precision for the stressed class is lower with +the optimized model, but recall is significantly higher (0.502 +vs. 0.623). The higher recall results in a 3\% gain for the F1-score +in the stressed class. Overall, precision is the same but recall has +improved significantly, which also results in a noticeable improvement +for the average F1-score across both classes. \begin{figure} \centering - \includegraphics{graphics/APmodel-final.pdf} + \includegraphics{graphics/APModel-model-original-relabeled.pdf} \caption[Optimized aggregate model AP@0.5 and AP@0.95.]{Precision-recall curves for \gls{iou} thresholds of 0.5 and 0.95. The \gls{ap} of a specific threshold is defined as the area under the precision-recall curve of that threshold. The \gls{map} across \gls{iou} thresholds from 0.5 to 0.95 in 0.05 - steps \textsf{mAP}@0.5:0.95 is 0.4426.} + steps \textsf{mAP}@0.5:0.95 is 0.3838.} \label{fig:aggregate-ap-hyp} \end{figure} -Figure~\ref{fig:aggregate-ap-hyp} confirms the suspicions raised by -the lower metrics from table~\ref{tab:model-metrics-hyp}. More -iterations for the evolution of the object detection model would -likely have a significant effect on \gls{iou} and the confidence -values associated with the bounding boxes. +Figure~\ref{fig:aggregate-ap-hyp} confirms the performance increase of +the optimized model established in +table~\ref{tab:model-metrics-hyp}. The \textsf{mAP}@0.5 is higher for +both classes, indicating that the model better detects plants in +general. The \textsf{mAP}@0.95 is slightly lower for the healthy +class, which means that the confidence for the healthy class is +slightly lower compared to the non-optimized model. The result is that +more plants are correctly detected and classified overall, but the +confidence scores tend to be lower with the optimized model. The +\textsf{mAP}@0.5:0.95 could be improved by about 0.025. \backmatter @@ -898,4 +902,7 @@ values associated with the bounding boxes. %%% Local Variables: %%% mode: latex %%% TeX-master: t +%%% TeX-master: t +%%% TeX-master: t +%%% TeX-master: "thesis" %%% End: