From 4a7e3147c5de58d21c41e38586e34c5f17595793 Mon Sep 17 00:00:00 2001
From: hannandarryl <hannandarryl@gmail.com>
Date: Mon, 24 Oct 2022 16:16:26 +0000
Subject: [PATCH] Overdue push

---
 notebooks/Untitled.ipynb                      |   6 +
 notebooks/exploring_onsd.ipynb                | 261 +++++++----
 notebooks/exploring_pnb.ipynb                 | 375 ++++++++++++----
 sparse_coding_torch/onsd/classifier_model.py  | 104 ++++-
 .../onsd/generate_images_to_label.py          |  97 ++++
 sparse_coding_torch/onsd/generate_tflite.py   |  18 +-
 .../onsd/generate_tflite_valid.py             |  43 ++
 sparse_coding_torch/onsd/load_data.py         |  34 +-
 sparse_coding_torch/onsd/run_tflite.py        |  92 ++++
 sparse_coding_torch/onsd/train_classifier.py  | 229 ++++++++--
 .../onsd/train_sparse_model.py                |  31 +-
 .../onsd/train_valid_classifier.py            | 144 ++++++
 sparse_coding_torch/onsd/video_loader.py      | 421 +++++++++++++++---
 sparse_coding_torch/pnb/pnb_regression.py     | 200 ++++++++-
 sparse_coding_torch/sparse_model.py           |  35 +-
 15 files changed, 1750 insertions(+), 340 deletions(-)
 create mode 100644 notebooks/Untitled.ipynb
 create mode 100644 sparse_coding_torch/onsd/generate_images_to_label.py
 create mode 100644 sparse_coding_torch/onsd/generate_tflite_valid.py
 create mode 100644 sparse_coding_torch/onsd/run_tflite.py
 create mode 100644 sparse_coding_torch/onsd/train_valid_classifier.py

diff --git a/notebooks/Untitled.ipynb b/notebooks/Untitled.ipynb
new file mode 100644
index 0000000..363fcab
--- /dev/null
+++ b/notebooks/Untitled.ipynb
@@ -0,0 +1,6 @@
+{
+ "cells": [],
+ "metadata": {},
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/notebooks/exploring_onsd.ipynb b/notebooks/exploring_onsd.ipynb
index 78a1a43..0a4f69d 100644
--- a/notebooks/exploring_onsd.ipynb
+++ b/notebooks/exploring_onsd.ipynb
@@ -10,48 +10,48 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "2022-07-22 19:18:15.582352: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.584225: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.585984: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.587061: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.600518: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.602470: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.604264: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.605003: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.606801: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.608539: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.610332: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.611042: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:15.618607: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA\n",
+      "2022-09-20 19:04:53.559420: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.561532: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.563471: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.565520: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.575543: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.577596: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.579563: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.581562: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.583495: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.585358: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.587191: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.588994: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.591695: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA\n",
       "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
-      "2022-07-22 19:18:16.048633: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.050425: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.052115: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.052779: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.054531: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.056236: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.057927: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.058562: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.060228: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.061956: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.063637: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:16.064268: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.753924: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.755787: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.757590: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.758279: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.760024: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.761768: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.763449: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.764100: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.765835: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.767551: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 43667 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
-      "2022-07-22 19:18:17.768085: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.769772: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 43667 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
-      "2022-07-22 19:18:17.770274: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.770894: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 1311 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
-      "2022-07-22 19:18:17.771266: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:17.772925: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 43667 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
+      "2022-09-20 19:04:53.974795: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.976628: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.978416: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.980218: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.981984: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.983693: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.985422: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.987131: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.988849: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.990563: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.992274: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:53.994006: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.411878: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.413947: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.415877: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.417829: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.419726: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.421648: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.423528: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.425643: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.427547: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.429427: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 43665 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
+      "2022-09-20 19:04:55.430005: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.431711: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 43665 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
+      "2022-09-20 19:04:55.432157: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.433879: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 43665 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
+      "2022-09-20 19:04:55.434341: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-09-20 19:04:55.436030: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 43665 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
      ]
     }
    ],
@@ -64,67 +64,69 @@
     "from yolov4.get_bounding_boxes import YoloModel\n",
     "from sparse_coding_torch.utils import VideoGrayScaler, MinMaxScaler\n",
     "import torchvision\n",
-    "from sparse_coding_torch.utils import plot_video"
+    "from sparse_coding_torch.utils import plot_video\n",
+    "import tensorflow.keras as keras"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": 2,
-   "id": "6f1b2c8a-8d98-43a0-a96f-d1ea0a2b4720",
+   "id": "a86670d5-8a91-4385-b1ef-d2da339fb251",
    "metadata": {},
    "outputs": [
     {
-     "name": "stderr",
+     "name": "stdout",
      "output_type": "stream",
      "text": [
-      "2022-07-22 19:18:19.462568: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.463520: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.465541: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.466311: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.468121: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.468851: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.470647: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.471384: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.473172: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.473929: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.475708: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.476443: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.485103: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.485924: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.487761: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.488493: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.490344: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.491064: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 43667 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
-      "2022-07-22 19:18:19.491196: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.492918: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 43667 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
-      "2022-07-22 19:18:19.493060: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.493691: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 1311 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
-      "2022-07-22 19:18:19.493823: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-07-22 19:18:19.495576: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 43667 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
+      "Loaded 1078 positive examples.\n",
+      "Loaded 2860 negative examples.\n"
      ]
     }
    ],
    "source": [
-    "yolo_model = YoloModel('onsd')\n",
-    "video_path = \"/shared_data/bamc_onsd_data/revised_extended_onsd_data/\""
+    "from sparse_coding_torch.onsd.video_loader import FrameLoader\n",
+    "\n",
+    "video_path = \"/shared_data/bamc_onsd_data/revised_extended_onsd_data/\"\n",
+    "transforms = torchvision.transforms.Compose(\n",
+    "    [\n",
+    "#      MinMaxScaler(0, 255),\n",
+    "#      torchvision.transforms.Resize((1000, 1000))\n",
+    "    ])\n",
+    "\n",
+    "dataset = FrameLoader(video_path, 224, 224, transform=None, yolo_model=None)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
-   "id": "3b05ae07-1df0-4e26-9083-86ab5225fab6",
+   "execution_count": 150,
+   "id": "0b3e2771-0ffd-4fc6-b2b9-a1ebf56581a6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import random\n",
+    "\n",
+    "sample_idx = random.choice(range(len(dataset.get_frames())))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 151,
+   "id": "ca4d0f7a-a8c3-42c8-a79c-797b04dae194",
    "metadata": {},
    "outputs": [
     {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|██████████| 37/37 [06:48<00:00, 11.04s/it]\n"
-     ]
+     "data": {
+      "text/plain": [
+       "<matplotlib.image.AxesImage at 0x7f6c4c01bdf0>"
+      ]
+     },
+     "execution_count": 151,
+     "metadata": {},
+     "output_type": "execute_result"
     },
     {
      "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAln0lEQVR4nO3deXyU1b3H8c8v+0zCTlgUERdAEQUkchVwKVRR3LAqFbUu7dWqta1KW1Fvb6239tq6VW2tpWrF1g13rlor7qJVGhAQxArKpgSIAmL27dw/zkQTMiH7PLN8369XXpmcmcl8n079MXnOeX7HnHOIiEjySgs6gIiIdC0VehGRJKdCLyKS5FToRUSSnAq9iEiSy4jli/Xt29cNGTIkli8pIpLwFi1a9JlzLr+9z49poR8yZAiFhYWxfEkRkYRnZus68nyduhERSXIq9CIiSU6FXkQkyanQi4gkORV6kbZyDpYuhTffhIqKoNOItKjFQm9mOWa20MyWmtkKM/tlZPw+M1tjZksiX6O7PK1I0D78EIYNgwkTYOpU6NcPHnkk6FQiu9Sa5ZWVwCTnXImZZQILzOzvkft+6px7rOviicSR2lqYNAk2bvSf6uudfz6MHAkHHBBcNpFdaPETvfNKIj9mRr7U21hSz2uvwY4djYs8QFUV/OlPwWQSaYVWnaM3s3QzWwJsAeY7596J3HW9mS0zs1vNLLuZ515oZoVmVlhcXNw5qUWC8Nln0cdra6GoKLZZRNqgVYXeOVfrnBsNDALGmdlI4CpgP+AQoDdwZTPPne2cK3DOFeTnt/sKXpHgTZwI1dVNx3Nz4YQTYp9HpJXatOrGObcdeAU41jlXFDmtUwn8BRjXBflE4sduu8Fll/nCXi8UgqFD4YwzAovVJs7BG2/AxRfDJZf4lUOS9FqcjDWzfKDaObfdzELA0cBvzGygc67IzAyYBizv2qgiceDXv4bx4+HOO+GLL+Db34YLLoDsqGcu48/ll8Pdd0NZmf95zhxf8G+8Mdhc0qWspT1jzewgYA6Qjv8LYK5z7jozexnIBwxYAlzUYNI2qoKCAqemZiIBWbrU/yNVX+TrhUJQWAgjRgSTS1pkZouccwXtfX6Ln+idc8uAMVHGJ7X3RUUkAM88A5WVTcdrauDZZ1Xok5iujBVJFaEQZET5bJeR4e+TpKVCL5Iqpk+HtGb+kz/ttNhmkZhSoZfE5hzcdRcMHgw5OTBuHCxYEHSq+DRokJ+IDYWgWzf/FQr5CdkBA4JOJ12oxcnYzqTJWOl011/vV8I0nGAMh+HVV+GQQwKLFde2bYPnnwczOO446NEj6ETSgo5OxqrQS+KqrIS+faEkymKvKVN8MRNJAh0t9Dp1I4lr5+ZiDS1bFtssInFMhV4SV//+UFcX/b5hw2KbRSSOqdBL4gqH4dJL/fedx3/5y2AydVRFBcyfDy++6LtiinSC1vSjF4lfN9zge8/ccos/Vz9kCNx2Gxx5ZNDJ2u6553zPHDP/sxk8/jhMnhxsLkl4moyV5OCc7yyZlRV0kvbZtAn22adpe4LcXFi/Hnr3DiaXxAVNxoqA//SbqEUe/HaE0eYbnIPHtImbdIwKvUg82LYteh+a6mrfJVOkA1ToReLBMcc0nVQGyMyEo4+OfR5JKir0Im312Wf+itwTToBZs/w59I467DA48cTGm5rk5vrJ2dGjO/77JaVpMlakLdauhYICKC31SyGzsvzXK6/48Y6oq4Onn4b77oP0dDjvPF/861fhSMrq8n70ItLAT37iz6fXT5xWVfmvCy6Ad9/t2O9OS4NTTvFfIp1Ip25E2uKFF6KvjnnvvaZLI0XihAq9SFtEmzAFf6olMzO2WURaSYVeUpdzzTdFa85FFzXdjSk722/coUIvcarFQm9mOWa20MyWmtkKM/tlZHwvM3vHzFab2SNmlsBXq0hK2bwZTj3164nUk06CTz9t3XOvucb3cA+FoHt3/wm/oADuvLNrM4t0QIurbszMgFznXImZZQILgB8DVwBPOOceNrO7gKXOuT/u6ndp1Y0Erroahg+HDRv8ptjgT7sMHAirV/tP562xapU/L7/PPjBqVNflFSEGLRCcV7+zQ2bkywGTgPprs+cA09obQiRmnn3Wr4OvL/IAtbWwfTs88UTrf8/QofCtb6nIS0Jo1Tl6M0s3syXAFmA+8BGw3TlX/1/LJ8DuzTz3QjMrNLPC4uLiTogs0gEffBB9dUxJCaxcGfs8IjHQqkLvnKt1zo0GBgHjgP1a+wLOudnOuQLnXEF+fn77Uop0lgMOiL5yJi8PRo6MfR6RGGjTBVPOue1m9gpwGNDTzDIin+oHAa2czUoBn3wCb7zhW8tOngwZui4tbhx3nD8fX1Hhz9eDf3/69oVp0wKNJtJVWrPqJt/MekZuh4CjgZXAK8BpkYedCzzdRRkTh3Nw5ZX+/O2FF8Lpp8OgQfD++0Enk3oZGfDWW/Dtb/uVM6GQXxr59tuJ3eZYZBdas+rmIPxkazr+H4a5zrnrzGxv4GGgN/AucLZzLkqf1a8l/aqbZ5/1BaS0tPH4nnvCmjXqWSIi7dLlvW6cc8uAMVHGP8afr5d6d97ZtMgDfP45LF4MY8fGPpOIpDxdGduZduyIPp6WFv0fABGRGFCh70wzZkRf0eEcjNMfPyISDBX6zvTd7/rle/WbR2Rk+Mm+P/8ZcnKCzZYKtm6FX/0KJk6Es86Cf/0r6EQicUHr/jpTTg4sWACPPuonZgcM8H3K998/6GTJr7gYxozx8yEVFX5lzVNPwT33+F2aRFKYdpiS5PDTn8Ltt/tNQBrq2RO2bFFnSUloXd7rRiQhPPNM0yIPvo/NBx/EPo9IHFGhl+TQp0/08epq6NUrtllE4kxiFPrKSvjyy6BTSDy74oqvJ8HrZWbCIYf4q5NFUlh8F/ovvvBXmnbv7vvGHHggLFwYdCqJR6ec4jfuzsmBHj38MtdRo+Cxx1p+rkiSi+/J2AkToLCw8bnXvDxYvty3FRDZ2bZt/irkgQNhxIig04h0iuSdjF26FJYsaTrBVlWlbdukeb16+Y6hKvIiX4nfQv/RR9Hb+1ZVwYoVsc8jIpKg4rfQH3RQ9OVyoRAcdljs84iIJKj4LfT77gsnnugLe720NL+y4vvfDy5Xoikrg0cegbvugg8/DDqNiAQgfgs9wAMPwDXXwG67+ZUUp53mJ2f79g06WWJYuND/b3fBBTBzJoweDT/8oW+yJiIpI75X3Uj71db6Ir9lS+Px3Fx46CH/15KIJITkXXUjHfP221Be3nS8tNR30xSRlKFCn6yqqprfurCiIrZZRCRQKvTJ6rDDop+Lz82Fs8+OfR4RCYwKfbxavBhOP923ffje92D16rY9PycH/vpXv2opK8uP5eXB+PFw5pmdn1dE4laLG4+Y2R7A/UB/wAGznXO3mdm1wAVAceShVzvnnuuqoCll/nyYNs2fY3cOVq6EuXP9ZhoHHtj633Pyyf65c+b4jTmmToUpU/wyVRFJGS2uujGzgcBA59xiM+sGLAKmAdOBEufcTa19Ma26aQXnYOhQf2XwzqZMgeefj30mEQlUR1fdtPiJ3jlXBBRFbn9pZiuB3dv7gtKCkhJYty76fW++GdssIpIU2vQ3vJkNAcYA70SGLjWzZWZ2r5lF3d3BzC40s0IzKywuLo72EGkoFGp+27vmNtcQEdmFVhd6M8sDHgcuc87tAP4I7AOMxn/ivzna85xzs51zBc65gvz8/I4nTnYZGXD++Y1bP4Dvrz5zZjCZOsvWrbBxo67MFYmxVhV6M8vEF/kHnHNPADjnNjvnap1zdcCfgXFdFzPF3HKLn0jNzvabruTkwMUXw6WXBp2sfTZt8q2DBw6EffbxfYwWLAg6lUjKaM2qGwPuAVY6525pMD4wcv4e4BRgeddETEHZ2b5NwebNsGGDL4w9ewadqn2cg0mTYNUqqKnxYx9/DMceC++/D4MHB5tPJAW05hP9BOA7wCQzWxL5mgr81szeM7NlwDeAy7syaErq3x8KChK3yINfErphw9dFvl51Nfzxj8FkEkkxrVl1swCIdi291sxLy9avj96Koaqq7ReBiUi76MoZ6VoFBU0/zYOfXD7yyNjnEUlBKvQNVVT4TTpuuAH+8Q+oqws6UeIbOtRf5RsOfz2WmemXip57bmCxRFJJi6duUsaaNb4PTGmp35UpFIJhw+C113yPGGm/v/4V7rjDb+peVgannAL//d/QrVvQyURSgjYeqXf44X7isOGn+OxsvyPTjTcGl0tEUp42HukMO3bAO+80PVVTWQl/+1swmUREOokKPez6Sk1dxSkiCU6FHvzG42PHNl0GmJ0NM2YEk0lEpJOo0Ne7/37o2/fride8PH9F6rXXBhpLUlRREVxyCey1l1+i+tBD+utS2k2rbuoNHepX3jz6KKxdC2PGwPHH+yZjIrFUXAyjR/smcDU1/v+PF1wAy5fD9dcHnU4SkKpYQ7m5cN55QaeQVHf77fDFF40vNCst9c3urrhC7aqlzXTqRiTevPyyX/G1s+xsWLYs9nkk4anQi8SbIUOi7+tbVQW7a3M3abvEK/Qffwz//rcmpiR5XXGF34OgoawsP280bFgwmSShJU6h/+ADOOAAGDnSL4Xcc09/JatIshk71q8Cy8/380bZ2fCNb8C8eUEnkwSVGC0QKiv9BhXFxY0/yeflwUcfQb9+nRdSJF7U1vq/YHv29EVfUlZqtEB45hkoL296uqamxjfMEklG6el+2a+KvHRQYhT6oiK/I9HOKir8xhYiItKsxCj048dHX4WQlwdHHRXzOCIiiSQxCv3BB8MxxzTevCIUguHD4cQTg8slrVdZ6S/j/9nP4J57oKQk6EQiKaPFK2PNbA/gfqA/4IDZzrnbzKw38AgwBFgLTHfObeuypI8+CrNn+6+qKjj7bLjsMrUoSASffQbjxvnJ9JISv5Lk6qvhn/+EvfcOOp1I0mtx1Y2ZDQQGOucWm1k3YBEwDTgP2Oqcu8HMZgG9nHNX7up3xfXGI9J1zj8fHnig8TxLWhoccQS88kpwuUQSRJevunHOFTnnFkdufwmsBHYHTgbmRB42B1/8RZp64ommk+l1dfDGG9Ev9ReRTtWmc/RmNgQYA7wD9HfOFUXu2oQ/tRPtOReaWaGZFRYXF3ckqySq9PTo42ZN9wAQkU7X6kJvZnnA48BlzrkdDe9z/vxP1HNAzrnZzrkC51xBvtYDp6YZM/zVnQ1lZPgJ9qysYDKJpJBWFXozy8QX+Qecc09EhjdHzt/Xn8ff0jURZZecgzlzYMQIf2HNt77l20XEk//9X9h/f78cNisLunWDQYPg7ruDTiaSElqz6saAe4CVzrlbGtw1DzgXuCHy/ekuSSi7dt11cOONvl85wFNPwYsvwrvvwj77BBrtK927w6JFvv3usmV+566pU7ViSiRGWrPqZiLwBvAeUBcZvhp/nn4uMBhYh19euXVXv0urbjpZSYnv81Ne3ng8PR3OPdevVxeRhNfRVTctfqRyzi0Ampsxm9zeF5ZOsGoVZGY2LfS1tersKSJfSYwrYyW6QYOaX564776xzSIicUuFvrM4B++/D2+/Hbu14fn5cNJJTTepCIfhqqtik0FE4p4KfWf4+GO/Kcohh8CUKb4AP/hgbF57zpyvly9mZ8PAgb518/jxsXl9EYl7ibHxSDyrq/OnSdat87frhcP+PPmoUbHJUV4OO3b4f2SidfoUkYSVGhuPxLN//tM37WpY5MGfvrnzztjlCIWgf38VeRFpQlWho4qLo1/GX1sLn34a+zwiIjtRoe+o8eN92+SdhcNwwgmxzyMishMV+o7q1w9mzvQ91uuFQrDnnnDOOcHlimbTJt+O4Lvfhfvua7r+XkSSkiZjO8v//R/ccQds3w6nnQaXXOJ7u8SLhQth8mS/oXpFhc/Wv78f79076HQisgtdfmWstNKJJ8bvtobOwXe+03j7vpISf8rpf/4Hbr01uGwi0uV06iYVbNrkl3/urKrKb9EoIklNhT4VZGX5T/XR7HxVrYgkHRX6VNCnj9+ce+ednkIh+P73g8kkIjGjQp8qHnoIBg/2m36Ew77If/ObcNllQScLxmuv+ZYV4bC/svmvfw06kUiX0WRsqhg0yLc1fukl2LABCgpi154h3rzxht/4pKzM//zRR3DRRb6FxA9+EGw2kS6g5ZWSeg4/HBYsaDreq5e/0rm5zcxFAqJeNyJttWJF9PGyMti6y03SRBKSCr2knr32ij6emQk9e8Y0ikgsqNBL6rnuOj8J21A4DD/5iS/2Ca64tJi/r/o7SzYtIZanZiV+tVjozexeM9tiZssbjF1rZp+a2ZLI19SujSnSiY4/3m+cPmiQPx/fowdcfTX8/OdBJ+sQ5xyzXpzF4N8NZsbjM5h470TG/GkMm0o2BR1NAtbiZKyZHQGUAPc750ZGxq4FSpxzN7XlxTQZK3HFOd/3Jzs7Kfr4z10xl/OfPp+y6rKvxjIsg3GDxvHmd98MMJl0VJdPxjrnXgc0QyXJx8xfT5AERR7g1n/e2qjIA9S4GhYXLeaTHZ8ElEriQUf+H36pmS2LnNrp1dyDzOxCMys0s8Li4uIOvJyI7Mq2im1RxzPSMthesT22YSSutLfQ/xHYBxgNFAE3N/dA59xs51yBc64gPz+/nS8nIi05afhJZKVnNRnPSs9iv777BZBI4kW7Cr1zbrNzrtY5Vwf8GRjXubFEgvfa2tc496lzOeOxM3hy5ZPUubqWnxSgKydcSf/c/oQyQgCkWRrhzDCzT5hNRpougk9l7Xr3zWygc64o8uMpwPJdPV4k0fz85Z9zy9u3UF5djsPxzIfPcMw+x/D49MexaHsEx4E+4T4su3gZsxfN5h+r/8GePffkR//xI0YPGB10NAlYa1bdPAQcBfQFNgO/iPw8GnDAWuD7DQp/s7TqRhLB2u1r2f8P+1NRU9FoPDczlye//SRH73N0QMkkVXX5DlPOuRlRhu9p7wuKxLv5H80nzZqe1SytLmXeh/NU6CXhJMe6MpFO1C27G+nWtLFZRloGPbJ7BJBIpGNU6KURXTIPJww7Iep4Zlom54w6J8ZpRDpOhV4AeGPdG4z50xjSr0un9296c91r11FbVxt0rEDkZeXxzJnP0CO7B92zu9M9uzuhjBB3nXAXw/oMCzqeSJupH72wZNMSJtw7odFVleHMMOePPp/fT/19gMmCVVlTyUtrXqKqtopJe02ie3b3oCNJilI/eumwX73+K8qryxuNlVWXcc+796T0FZXZGdlMHTqVaftNU5GXhKZCL7y3+T0cTf+yy0rPYt32dQEkEpHOpEIvHNT/IIymFwFV1VYxpOeQ2AcSkU6lQi/81xH/RSgz1GgsnBnmgoMvoEeOlhPGqzpXx6trX+WR5Y+w/ov1QceROKYGGMKoAaN44ewX+PHzP2bJpiX0zOnJ5YdezqyJs4KOJs1Ys20N35jzDbaW+w7i1XXV/OfB/8ntx94ety0aJDhadSOSgA7640GsKF7RqNFabmYud590N2eMPCPAZNIVtOpGJMWs3rqa1VtXN+mmWVpdyu8Xpu5yWGmeCr1IgimpKmm27fCXlV/GOI0kAhV6kQQzst9IMtMzm4znZORw+gGnB5BI4p0KvaSMd4ve5dynzuWo+47i+tevZ1t59K334l1GWgZzps0hnBn+6pN9bmYuQ3oO4cf/8eOA00k80mSspITH33+cc546h4qaCupcHTkZOfQO9ebd779Lv9x+Qcdrlw8//5C7Cu9iwxcbmLLvFM468Kwmy2QlOXR0MlaFXpJeTV0NA24awOflnzcaz0rL4tJxl3LzlGa3PBaJC1p1I9KCVZ+vorK2ssl4VV0V8z6cF0AikdhSoZek1zOnJzW1NVHv6x3qHeM0IrGnQi9JzTlH9+zujNt9HJlpjVeq5GbmcsWhVwSUTCR2Wiz0ZnavmW0xs+UNxnqb2XwzWxX53qtrY4q03ZMrn2TI74bQ84aeFG4spHeoN+HMMD2ye5Cdns0Px/2Q6QdMDzqmSJdrTa+b+4DfA/c3GJsFvOScu8HMZkV+vrLz44m0z6trX+XsJ86mrMZvplJTUwMGJw8/mfNGn8eYAWPIz80POKVIbLT4id459zqwdafhk4E5kdtzgGmdG0ukY6599dqviny9suoynvrgKSbsMUFFXlJKe8/R93fOFUVubwL6N/dAM7vQzArNrLC4uLidLyfSNqu3ro46np6WzqaSTTFOIxKsDk/GOr8Qv9nF+M652c65AudcQX6+PkVJbIwZOCbqZioAg7oPinEakWC1t9BvNrOBAJHvWzovkkjHXXfUdVE3U7lq4lVkZ2QHlCo5lFWXMXfFXO5efDdrt68NOo60Qns3HpkHnAvcEPn+dKclEukEYwaO4eVzXuZn83/GoqJF9M/rz1UTr+J7Y74XdLSE9taGtzjugeNwzlHraqlzdVx+6OX8evKvg44mu9BiCwQzewg4CugLbAZ+ATwFzAUGA+uA6c65nSdsm1ALBJHEVVVbxYCbBrCtonEzuHBmmHlnzGPy3pMDSpb8OtoCocVP9M65Gc3cpXdVJIW8vu51autqm4yXVZdxz7v3qNDHMV0ZKyKtUllTSTPz25RVl0W/Q+KCCr2ItMoRex4RtWdQbmYuZx54ZgCJpLVU6EUSyKKNi5j+6HRG3TWKi5+5OKarXrpld2P2SbMJZYS+2vAkLzOPI4ccyan7nxqzHNJ26kcvkiCeX/08p849lfLqchyOjLQMwplhFv7nQob3HR6zHKs+X8V9S+5jW8U2Thx2IlP2nUKa6TNjV9LGIyIpwDnHXrftxbov1jUaN4wTh5/I02dohXMy08YjIilga/lWikqKmow7HK+vez2ARJJIVOhFEkBuVm6zLR36hPrEOI0kGhX6FLWtfBtbStW5IlHkZORw5oFnkpOR02g8nBlm5viZAaWSRKFCn2I+2fEJR953JANuHsDgWwdzwJ0HsLhocdCxpBX+MPUPHLfvceRk5NAjuwc56TlccsglXDT2oqCjSZzTZGwKqa2rZd/b92XDjg3Uuq+vcOye3Z3VP1ytHu0JYuOXG1n/xXqG9xlOr5A2d0sFmoyVVpv/8Xw+L/+8UZEHqK6t5r4l9wUTStpst267ceigQ1XkpdVU6FPIuu3rmhR5gPKa8mY36hCRxKdCn0IKdov+l19uZi4TBk+IcRoRiRUV+hQydrexHD74cEIZX2/IkZWexYC8AUw/YHqXvW5FTQWPLH+EG9+8kVfWvEIs54VEpP0bj0iCmjdjHje+eSN3L76bytpKTh9xOr846hdNlu11ltVbVzPh3gmUV5dTUVNBVnoWoweMZv535jfZAUpEuoZW3UiXGvfncSwqWkSdq/tqLJQR4soJV/KLo34RYDKRxKFVNxK3ikuLWbp5aaMiD37y9y9L/hJQKpHUo0IvXabO1TV72f7OxV9Euo4KvXSZ/nn9Gd5neJNin52erY0qRGJIhV661IOnPkjPnJ7kZuYCkJeVx3599+Oaw68JOFn7rPp8FWc/cTZ737Y3k+dM5uU1LwcdSaRFHZqMNbO1wJdALVDT0mSBJmNT047KHTy8/GHWbV/HuN3Hcfyw47/aoSiRfPDZB4z78zjKqsu+uvAsnBlm9omzOevAswJOJ8ks0I1HIoW+wDn3WWser0Iviey0uafx5AdPNplf6Bvuy6aZm0hPSw8omSQ7rboRiZEF6xdEnUQurSqNuimISLzoaKF3wAtmtsjMLoz2ADO70MwKzaywuLi4gy8nEpwBeQOijjscvXLUYEziV0cL/UTn3MHAccAPzOyInR/gnJvtnCtwzhXk56sNrjTmnGPt9rWs2bYm7lsjXH341V9NKtcLZYSYMXIGuVm5zTxLJHgdmhFzzn0a+b7FzJ4ExgHawFJa5b3N73H6o6ez/ov1AAzqPoi5p89l9IDRwQZrxvQDprPhiw1c+9q1GEZVbRWn7H8Kdx5/Z9DRRHap3ZOxZpYLpDnnvozcng9c55x7vrnnaDJW6pVWlbLHrXuwrWJbo/Ee2T1Yf/l6umd3DyhZy8qry1mzfQ0D8gbQO9Q76DiSAoKcjO0PLDCzpcBC4NldFXmRhh57/zGq66qbjNfU1TB3xdwAErVeKDPEiPwRKvKSMNp96sY59zEwqhOzSArZ+OVGyqvLm4yXVpey8cuNASQSSV5aXimBOGyPw6K2Kc7LyuPQQYcGkEgkeSXe5YmSFI7c80jGDhzLwk8XUl7jP9mHMkKM6j+Kb+79zYDTJb8129Ywd8VcymvKOXn4yYwZOCboSNKF1I9eAlNZU8kdC+/g3nfvxeE4f/T5/Og/ftRlm6CI95d3/8IPnvsBNXU11LpacjJyuODgC/jdsb8LOpo0I9AWCG2lQi8SrOLSYgb/bjAVNRWNxsOZYeZ/Zz7j9xgfUDLZFbVAEJFWe27Vc1EbypVXl/Pw8ocDSCSxoEIvkkKaa7xmmJqyJTEVepEUcvzQ46mtq20ynpOZo1bLSUyFXiSF9Ar1Ys60OYQyQoQzw2SnZ5OTkcNPx/+Ugt3afQpYmvHWhreYNGcS/W7sx8R7Jwa2UY0mY0VS0OaSzTyx8gkqaio4YdgJDO0zNOhISefVta9y/IPHU1Zd9tVYOCPMg6c+yMn7ndym36VVNyIicWjsn8ayeNPiJuN799qbj370UZt+l1bdiIjEofe2vBd1fM22NVTXNu3z1JVU6EVEukBzG9X0zOkZ8z2TVeglKZRVl/HOJ++wZtuaoKOIAHDN4dcQzgw3GgtnhvnJ+J9gZjHNol43kvDueOcOZr00i4y0DKprqxk7cCxPnvEkfcN9g44mKezCsReyvXI7179+PbWuFsO4/NDLmTVxVsyzaDJWEtr8j+Yz7ZFpjVY2ZKZlcuigQ3n9fG12JsGrqq1iS+kW8sP5ZGdkt+t3aDJWUtpNb93UqMgDVNdV86+N/2Ld9nUBpRL5WlZ6FoO6D2p3ke8MKvSS0IpKiqKOZ6VnsaV0S4zTiMQnFXpJaMftexxZ6VlNxutcHSP7jQwgkUj8UaGXmKqureax9x/jomcu4lev/4pPdnzSod83c/xMeod6Nyr24cwwvz36t1F3sBJJRR2ajDWzY4HbgHTgbufcDbt6vCZjU1t5dTlH3HcEH3z2ASVVJWSnZ5Oels68M+Yxee/J7f69xaXF3PzPm3l+9fPs3m13Zo6fyaS9JnVicpFgBdYCwczSgQ+Bo4FPgH8BM5xz7zf3HBX61HbzWzfz81d+/tXWgfX65fZj4xUb1SZXpBlBrroZB6x2zn3snKsCHgba1qlHUsrf3vtbkyIP/mKn5i4XF5GO60ih3x3Y0ODnTyJjjZjZhWZWaGaFxcXFHXg5SXQ56dH3gq1zddonVqQLdflkrHNutnOuwDlXkJ+f39UvJ3Hs4kMuJjczt9GYYezebXeG9xkeUCqR5NeRQv8psEeDnwdFxkSiOvugs/nW/t/ym15khOmW1Y38cD5Pn/F0zHt/iKSSjkzGZuAnYyfjC/y/gDOdcyuae44mYwXg/eL3WbB+Af1z+3Pc0Ojr4EXkax2djG13UzPnXI2ZXQr8A7+88t5dFXmReiPyRzAif0TQMURSRoe6VzrnngOe66QsIiLSBXRlrIhIklOhFxFJcir0IiJJToVeRCTJxXSHKTMrBhJxN4i+wGdBh4gBHWfySIVjhNQ4zr5ArnOu3VecxrTQJyozK+zIGtZEoeNMHqlwjJAax9kZx6hTNyIiSU6FXkQkyanQt87soAPEiI4zeaTCMUJqHGeHj1Hn6EVEkpw+0YuIJDkVehGRJKdCD5jZvWa2xcyWNxi70cw+MLNlZvakmfVscN9VZrbazP5tZlMCCd0O0Y6zwX0zzcyZWd/Iz2Zmt0eOc5mZHRz7xG3X3DGa2Q8j7+cKM/ttg/GkeS/NbLSZvW1mSyK7uo2LjCfqe7mHmb1iZu9H3rcfR8Z7m9l8M1sV+d4rMp5sx9l5Ncg5l/JfwBHAwcDyBmPHABmR278BfhO5PQJYCmQDewEfAelBH0N7jzMyvge+3fQ6oG9kbCrwd8CAQ4F3gs7fgffyG8CLQHbk537J+F4CLwDHNXj/Xk3w93IgcHDkdjf8/hcjgN8CsyLjsxr8t5lsx9lpNUif6AHn3OvA1p3GXnDO1UR+fBu/gxb4DdAfds5VOufWAKvxG6XHvWjHGXEr8DOg4cz8ycD9znsb6GlmA2MQs0OaOcaLgRucc5WRx2yJjCfbe+mA7pHbPYCNkduJ+l4WOecWR25/CazE70t9MjAn8rA5wLTI7aQ6zs6sQSr0rfNd/CcFaOWm6InCzE4GPnXOLd3prmQ6zmHA4Wb2jpm9ZmaHRMaT6RgBLgNuNLMNwE3AVZHxhD9OMxsCjAHeAfo754oid20C+kduJ9txNtShGqRC3wIzuwaoAR4IOktnM7MwcDXw30Fn6WIZQG/8n/M/BeZacm5SezFwuXNuD+By4J6A83QKM8sDHgcuc87taHif8+cykmKNeHPH2Rk1SIV+F8zsPOAE4KzI/6EguTZF3wd/jm+pma3FH8tiMxtAch3nJ8ATkT/pFwJ1+EZRyXSMAOcCT0RuP8rXf84n7HGaWSa++D3gnKs/ts31p2Qi3+tPxSXbcXZaDVKhb4aZHYs/b32Sc66swV3zgDPMLNvM9gKGAguDyNhRzrn3nHP9nHNDnHND8AXxYOfcJvxxnhNZyXAo8EWDP5cTzVP4CVnMbBiQhe94mDTvZcRG4MjI7UnAqsjthHwvI3913QOsdM7d0uCuefh/1Ih8f7rBeNIcZ6fWoKBnnOPhC3gIKAKq8cXue/gJjg3AksjXXQ0efw1+pvvfRFY5JMJXtOPc6f61fL3qxoA/RI7zPaAg6PwdeC+zgL8By4HFwKRkfC+BicAi/IqMd4CxCf5eTsSfllnW4L/DqUAf4CX8P2QvAr2T9Dg7rQapBYKISJLTqRsRkSSnQi8ikuRU6EVEkpwKvYhIklOhFxFJcir0IiJJToVeRCTJ/T+QHR89dBCeHgAAAABJRU5ErkJggg==\n",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD8CAYAAAB3lxGOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAABYIklEQVR4nO29eZBsV30m+J3MrNwrq97Te3p6iCckYUEbQVuADDRtC8ZbI7VnZOgZxsSEWexomRiYMTaOQWA8MG7GdvfIeAztcDQdhsYdbmFs2m2CMN1gNz12OADzhDGLhTBaQOvT22rJfTvzR+Z36ru/urnUmlkv7xdRUVV3Pffe89u+3++c47z3SJAgweIiNesGJEiQYLZIlECCBAuORAkkSLDgSJRAggQLjkQJJEiw4EiUQIIEC44DUwLOuVc55x5wzn3bOXf3Qd0nQYIEe4M7iDoB51wawLcA/CiAxwB8CcDrvPd/t+83S5AgwZ5wUJ7ASwB823v/kPe+DeBjAO48oHslSJBgD8gc0HWvBfCo/P8YgJeOOtg5l5QtXuFwzmEnXudOj08wFS5470/ajQelBCbCOXcXgLtmdf8rDYVCATfeeCPa7TZ+/ud/Hvfddx8+8pGPYHl5GUtLS3j5y1+O5z73ufjYxz6GkydPYm1tDZlMBqdPn8bXvvY1XL58Gd57/Oqv/io++MEP4p577sHS0hLuuecenD17FqdOncKFCxfw7ne/G+95z3vw1re+FaVSCc1mEx/84Afxghe8AF/96lexsrKCu+++G7/2a7+G22+/Hddeey2y2Sye8Yxn4Pz58/jud7+L5zznOcjn8/jyl7+MXC6Hc+fO4Ytf/CLuuOMOnDx5EmfPnsXx48dxww03YH19HX/4h3+I9fX1Wb/iKwHfidt4UErgcQBn5P9nDrcFeO8/BOBDQOIJ7Af6/T663S56vR4uXbqEb33rWyiXyyiVSjh//jy898jn81heXkar1UKv10O/38fGxgZ6vV6wvPfeey/e/va3Y3l5GbVaDY888gj6/T6azSZyuRw2NjawurqK5eVllEol/NEf/RFWV1exsbEBYGDBy+UyAOD8+fO47rrrcOLECTz00EP4+Mc/jrW1Ndx66614xStegRe84AV44okn8KUvfQnpdBqf+MQn4L3HnXfeiZMnT6LdbuOpp55Cp9OZ5au98uG93/cfDJTLQwBuAJAF8LcAbh5zvE9+ZvezsrLis9msHyrjXf0457adXyqV/C233DLz50t+ws/ZOPk7kOwAADjn7gDw/wJIA/iw9/7/HnPswTQiQYIEivu897fajQemBHaCRAkkSHAoiFUCScVgggQHgjTgcgDcrBsyETPLDiRIcGXCIVt5FvpL1wP5M3DVr6FXfwT9zgaA/qwbF4tECSRIsC9wADyyy9fhuW++HydO5OB7wLm1Dh77b+/H5hffNbe1D0k4kCDBvsADcPie2/8vtH0OH/8F4Jde3UHBXcDxH/w5AMVZN3AkEk8gQYJ9g0d9cwNF7/HgYw380f+3gXy/Ad/OAS4968aNRJIdSJBgv+AckD6BZ7z247h2dR1Zv4rHqzmsP/43uPxf/9d5CAeSFGGCBIeBFLIo3vgylArPwrmHPgM0LmAQLsycGEyUQIIEC46kTiBBggTbkSiBBAkWHIkSSJBgwZEogQQJFhyJEkiQYMGRKIEECRYciRJIkGDBsWsl4Jw745z7nHPu75xz33DO/dxw+3udc487574y/Llj/5qbIEGC/cZexg50Abzde/9l59wygPucc58d7vtN7/09e29eggQJDhq7VgLe+ycBPDn8e9M5dz8GU40nSJDgCGFfOAHn3PUAXgjgi8NNb3XOfdU592Hn3LH9uEeCBAkOBntWAs65MoBPAHib934DwO8AeDaAWzDwFH5jxHl3OefOOufO7rUNCRIk2D32NIDIObcE4FMA/ov3/v0x+68H8Cnv/fMnXCcZQJQgwcEjdgDRrjkB55wD8LsA7lcF4Jw7PeQLAODVAL4+xbWQzWZnPdZ6G3a6bNZ+Xm/e4JwLPwpZO2LkefsB3mPUvWzbtF3TtFufT/fF3W/UM3G7vW6/3x973mHAez9yEZe9ZAf+MYCfAvA159xXhtveBeB1zrlbMBhA/QiAn52mge12ew9N2T1SqRSy2Wz4QEtLS5HJH0Z18n6/bxdQ2XbdUZ1y3pBOp0d20G63G47R3wQ7ODBaQL33sYK4E+xUyfL7EPo99NvF3ScsymG+H/enUtujaG5XRcA+TUXQ6/XCPtvHZomFnk9gaWkJN954I2666SY861nPwjOf+UycPn0aJ06cQLFYRLVaxcbGBtrtdli2q9vtot1uhyW/Go0GOp1ORNvzRz98JpNBJpOBcy4idJ1OB91uF/1+H845LC0toVAoIJ1Oh+vyp9frodfrIZPJhA7HawIIbSK0s/JvtjOVSoXOzrZlMpmwTe+rz8W/2RZVomy/Ra/Xg/ce6XQa6XQ6tIW/4xRunJDyHbB9XFqN7aZg8ThaPi6zxnfPbXxfPLbf78cKOK+ZTqextLSETCaDpaUlLC0thetlMhnk83nkcjlkMhmsr6/ju9/9Lr7zne/g4sWLePzxx/HQQw+h1WqF680A+xsOXAngunkUykwmg0qlgkqlEvbl83lUq1XU63W0221kMhmk02k0m030ej2k02l0u93QwewHpoXgDztOKpUKnVAVhPc+dExgy2qkUqlwfSoYHs9zs9lseBZth16X7eSPWiRVUHQfVRF474PyABBpp96HxwKI3IdKRJWgWmx9BgqyKi8KLoWbilAVmt6b75yhpirNbrcbtvV6PTSbzbBGY6fTiTwX28F26/fkc/B/tt17j2KxiJMnTyKTyWBzczMojFmGBXFYaCXATlypVFAsFiNWjBp/Y2MjCFCj0QiC2+12gwUHBh2v1WpFXE31CjqdTrCU+Xw+3CuXy0UEglaNSoLCzGNoCdUiUalks1mkUqnQvkwmE65LK0ah7Pf7QcitcPZ6PdTrdTQajYhHwGfjeUtLS8hms+EevC/fRzqdDkJnPRQVbhVetcT6XuiFUQnw3fAdqjejz5bNZkMbs9kslpaWIgKcTqeRzWZRKBTQ6/XQbrfRarUiz6EKoNlsotFooN1uo1gsIpfLRfbzfabTaZRKJTQaDayvryObzaJYLKJWq83c/bdYaCVAIfHeI5fLhQ6icV+lUgnb+v0+Go1G6NQ8Hhh0bAo0Oy0FU49TK62dkEqBQtxut5FKpSJtJNSqUQGoN8FrWeufSqXQ7/fRbrfR6XQiHZzHV6tVtFqtIETqUajrTGVQr9cjQs3r8Z1pG/S3to/KlEqNgkyBt0rIKrBqtYpmsxkJF9iWfD6PYrGIQqEQuT69MioyKqx0Oh36AttIpQMgKHENwXg+r8Hv3G630Ww2ASCijBIlMEfI5XI4efIkyuUyMpkMstlsxP3l39lsFisrKyiVSmg2m8EadDqdYDXYUSicuVwudA51VzXeVrc8k8kE3iGfz6NUKgXuAUBEifA3wxG1amw3FYPGyBRgWlQKH4W43++j1WoF15idtdvtBk9ChVTdYbZdPSJaa2sl1UPx3od2sK1UUhRsKka66RqW8LoqtLw230m328Xm5mYIN/L5PAqFQmh3s9kMz6fKi39TKdBjSKfTkfZYL4jKplAooFKp4OLFi2g2m8GrnDcstBJYXl5GqVQK5BIFpNFohM4ORDV3LpdDOp1GoVCIuIYUFBVWZYqVPeYPLY8NIaiE8vl85JraFrWy6lpT8NhJVehVETSbzSBs2nkzmUx4B9oOCh2Pb7VaaLfbaLfbQanpM/HeStjZMInCoyGBhhAam1MR0NPp9XpB2dg4XwlHa4GVxGUmiM9PBUEXn8qdvEs+n0ej0QjvqVAooFQqhfvz/TebTVy6dAmXLl3ChQsXsLGxgV6vh9XVVVSr1UhoNA9YaCVAIabbSKFTV4+dgJ2XnYoxdy6XC96AEki0asr80+1Xt7Hdbgerrky/utij0lU8jp2fwt9sNiNWle4zn1ePBwbCQOVGy93v91Gv10OIQIFUxUbr12q1UKvVIgJJxagKkG64Kj11+YEtT4dWWZWxHqehkJKVGkpoqKHXJQHIe/PdM2xgX2AooZ6Ltf5099kufttCoYCVlRV0Oh2cP38ezWYTTz311NwpAGCBlQAtzcWLF/HEE0+g1+uFeJiucFxaje4oU4Wa4qMC0M4JbC90YQ5Zj+F2dd81i6A5Z/5YthyICgpJLiXWAARLpzE0LXen0wnnsbNr29S606rZ57Lxu2Y8+CyqMKmEKPj0yhhT833zfFpmTfPlcjkUi8UgvORVOp1O4GvoymtaUUMRhngbGxtwzqFQKAQlpKEawwkaDL0GvZ5GoxFSg0888UR4V5pBOSy85S1vwbFjx/C+970vdv/CKgG6ir1eD5cvXw4fWq1+qVQKQg0MPAMlwOgV2JSX5vV5LwqEMtNqsWg52ZmUceZ1mZa0QqWCai22KgVaVmCgIDSUobDQ1VZFQ6JQeQQ+Dz0L5RA0hqYrrsKv71OVnabqGGrQw6C1Vy9Kz6/VaiGFS8+D9+cP28dra7hHfofvluw+PQdNA5PMZX0EuQuSiqlUCp1OB08//TQuXryItbU1rK2tBQVw2Irgt3/7t8fuX1glAADVahW1Wg0XLlwIhBY/UrfbDcqBAqVFKfyQmjtW4aTl0EyBxvs8VoVe430AESaeAqTkGDukeg9sX1y72u12pI0UBhVOHqMpOZKhPE7dfQolSVU+U61Ww8bGRlBcSmhms9kIaameCpVsrVYL4QjJPV5fuQ4AQYnQNS8WiygWi0GB1ev1kJblt9O8vvIJ+r+GDcCAM2DYVK/Xt9U68B1kMhk0m00451AsFnHq1ClkMhk89thj4dvPExZaCbTbbdTrdSwvLwdSq91u4+LFi8FSaFxPYVXFAESrz+LCAtvZ1S0k+0wXuNVqRUp0NaRg7MqYnddQS269B1pVClihUEAulwt1CyS8lPBrNBrBKtOCk19gG1Qg+YxUIGwvY2hbj0ByjxaT5/EZAUTqCxjuqBegsTn3UcBbrRaq1SrK5XJ4t/ru+F75njXsYzaIdSPkfvjdbSqQ35/eAT0D9XjoKbB/zRsWWgkAwOXLlwMhaHP6FAztxDbeB6JuuFpoEoeqCCgU7ITcX6/Xwz6CFknd23K5HPgMtospKUu60b1XAavX66HNzAJQ6NVLUGvLzp3P5yP8BIWLwsh7AVsCZdl6QlOBFFCt7iNRB2xxG8BAYNvtdrDKqkh4Xd6rWq2i0WhEainYnlwuF+FGeE8St/RyCoVC8FxU6fI3sJVVoCdAT4HHFYvF0Ga+/3nCQiuBTCaDlZWV4CaSiGJH1LhSWWYqAVu5xn3Mp5NU0hQSr0VijPdmB1TW3saOWg/AtvH+tPi0pBpS6HYNHbRwSUlO9Wx4vtbGF4vFiJCry16v14Ml5DnKs9jCIgq/ko4AglekfIR6VLwG6zEo0OQWlNxVwpFVlRR2flO+C3pNLArT78r3z3P5LUhE0rtietV7j2aziUcffRSXL1+eSy8AWHAloFZbC1/oFbAoRGNpdbVpUeKKZpThp5BrOEGh13w5rbHmzTX00JifnZnhCjuqhi68Bi05O3mj0QjxLgVeCUYtOmJHJoHKd0BWv1gsBoEkS1+pVMLzadEUlY0OwOLzEBTgZrMZ3hPTuFqHQAVRrVaDF5LNZiMeGb8x28xr05tQ3oTf2tY/UMHQG+A3035TrVYBbCk1JT7b7TZKpRIA4Pz58+EdzxMWWgnQOjL2q9frwYIzFqSFY8fk8bYakNZEwwnrgrPzUCgZp6dSKZTL5dDRGMvTGmrmgC54u92OKBEbbqjnopkKtqPT6QTmnQLDY5Rs5PNS2VAhskSaKTxgqwxbsyB8N1axaVillpZluZVKJcJ38BlopfV+bLMWKanyVPafz8/t6nXRcyNPwzYxrFNiWIWd19J6CCrcer2OS5cuYXNzM4QP84aFVgIU3G63G1w8nU9AtTY/PK28Dq6hS1goFIJ1pjJRaIEJvQh2OlpmzV3zp1AoBEtmO65mCKx1p1W0BTmaktQfhgS8Nt8BrbgScQwxtPiIz8jOzmvQ4yLnwnetHgIFiOdQkSq/ol4SawLUG+K7Y2ah1+ttywhoUZZVTFTgNAhsBzMPrBvgdXTwFkM55XU0ozKPHgCx0ErAe4/Nzc0wnJjWTfPkmtZT66apOFoETTHSUpOAYocHEOkY1Wo1WBreg0qBMWSr1Yp4KFQOhHVj1eUm20+XXp9By2zt6Dtg++g9/mh7NSWq6UVbGWcVAoWO34HvUVOv6iVonp/7WW2plllDo2azGSFadQCRxvRa5ciMiCrhdDoduZZyDVQM/M70RjTssor1sKFpzzjsWQk45x4BsAmgB6Drvb/VOXccwB8AuB6D2YVe672/vNd77Tes28fOQwutaTFaK2XDKfCdTmfbsFoVVLr3OlCIlXx6vLrW3W43UnTDYzSVyApHtktddq3KY7t4Tz4DoRWC6j3ocVQavB5/K1/RarVCHK8kmypMCqEl+TTbYsdLUDj5vRgy5HK5iBfDNjI/XyqVIkJI4aXnxGfkPhKTyusoVGFqhSOFTD05Ph8LsqxSPExMuvd+eQL/nff+gvx/N4A/997/unPu7uH/79ine+0bUqlUSHvVarWI9VfCil6CCoZ6AfrxaVWUiNPxB0qi0aIBW2P+1VrwfuzEtNIs4FGFpOMX2HnpxSinQcWg6S4VerrpFAa2m21Vd5jvRsdcqJAp8afDnCnAmqfnPdheLWOmZVXikmk5VYCqBPlO2U5NMyqXo++X5+s2hgf8turRaV9g2MH3zvtoX5qFFzANDiocuBPAK4d/fxTAf8McKgG6lBxrTredwqtxsbqOmqrT4acUCC2VVTfdsvAaX/M8Xp/7GeurQNdqtYiFsYw4hYL7LPmnFlprGDR1qR1an1WP4ztUAlIJNq2CVGuk3oIKiHpY+lxab6DFWvqN+L70fHoQWu+h7QQQeT8az7ONTAWrwux0OpE5CKgg1Vtkm3QKsll6A+OwH0rAA/iMG8wT+G+89x8CcMpvzTj8FIBT9iTn3F0A7tqH++8KdBnL5XIYOUbrQgtjO2mn0wnpKh0RSKFl3EWLurS0hHK5DGDLalgSSUtlga1BSuQXtPMrwUVCTON9TQ3q/wStZLPZjIz647W1foDX130q/MAgR68KhNaall89DvUSVNEBW9kC9R74DrRcWMMK5QFsZSffNc9XopLKQ7kPIu57a/xPz0/5I4aMzFTogCMex/EY84r9UAI/4L1/3Dl3NYDPOue+qTu9997FTCQ6VBYfAmYz0ShjeQoCR5nRVc3lchG3UMkqWmw77p1uNQVD48dcLhcyCLSotOS8j1ohlvYqSWXjbGvh7WAi/Q0gKCZaRN6XpKMKrYYatJpWmGhtlRhUD4PHKbRGgtfSsQQaCqgHpVkRWlYWI6mHos+vStQqIwqmfV/aZg1R9F1oxoGhyaiMg3pV84o9KwHv/ePD30875/4YwEsAnHPD9Qecc6cBPL3X++w32Pl0IgnNcwNbk07ww1oiSFNv7GxaWKIdkzGlssYks1Tw1L1Vt5dCoRaX+9kO5TFsAZPG1c4NhslSqSjHoak6taoqQCokykPQ2imBqtv4W6+vStWScqpUeS6vzffCd6hKhP/zeCovtfwacum1ge3FUtpntC3qGdDqE3yWdDqNcrkc0qnziD0pAedcCUDKDxYkLQH4MQC/AuCTAN4A4NeHv/9krw3db9DdowUjcaTlvoztOQBEy0VVaDU3rlZU2WgAEY+h0WhE6sv1erTY2qnYaXVqM3ohdI01BaTCaYUV2Kp7oKWzxTQa22q1nvIkGh5Yl12tn27jdivoKmA2xFLFpqy9WnqNw3l9fQa9vrr99h6qSOw+vgN6MhoO8jsr8at9giXemrI8LHz/938/stks/uqv/ip2/149gVMA/nj4kjIA/oP3/j87574E4OPOuZ8B8B0Ar93jffYd7Dx0y1utVhg5xhQYOzuAMKAln88DQMRN7fW2RtypBeTHVxJMC4TUndYYlLE2oVkBWnWdv147qs3p875KSqowqQuu17J5ewsqDmsp9f0qtPZCq/lUKdlzVRB5rMbWSggqX8BzVPmo0PNcqwj425J7wFaGSL+fKiNVlPwuqiSo2DXjcljg1PmjsCcl4L1/CMD3xWy/COCH93Ltg4Zzg3UFKpVKmGkGiFoodiqN7WzdgHYGYIsApLDxXhpXapWZlgTzOgwVKKwUeB3gw7bqYBVlxdXNp6LTkIFtoVXTgT7q+mvIoF6HCqgVWiu8BIVD31ccdxH3rYg4953nx3EAqiji2sbn1DSlfS4qPCVolTDUIiRga8KZbrcbUQKc0JYZi8NSBJ/73OfG7l/YisF0ejBzTLFYDJYfQBhYQiKQnoF2Mi2vZQex7Dw7iS3lBRARPCoHnRdQO7im+ezgIBV83c7QQI+nQiNZqe+BSo41D+oZKDGqz28VxjQd2gokMP3yYuOubwU3jom352u7+d71t95XU530rlT4eW9NX/Jcci98p1okNi9YWCXAyjum7QqFAsrlcpiIggKhqSR1kVXQgK2CFxV2Wg0lztjRer2tGnYqInYWtXYMKXhd1h6w7XQ59bocQEOr65wL7iA7M2NbbtPUHJWcPru6ycQ0Vjxu/7RKw54z6bqjzovjKca1hQINbI0M1AFDyl2oAtVUJ8+lIfHeo16vBx5onrCwSqBUKoUhnpqioxuvVlHHx5Mw1BBB42gKclxemwSWEpLczroDkn2qMLTslEKpLqwWyJBkYzpSyUBgu1XVgiZNPSrGpR3jXH7dHidg/L1bd3ga72En11GhB6JEpq3RoLDbdSWoIK2iUA9Mr3OYnMAkLKwSqFQqWF1dRblcDh2f8TXXHQAQiaX5IXV8Pl1DqwCUaGPsyI5m425gq/yWOW8d888hqRoW8D7OudAereLTNKadKFQnEFGFoz/A6KW2rYscF2fr8cQot34coTgKk64T9/+okECzD8rjUNHbYiGGcmoMmM1hmbEqTlXc+6XA9hMLqwRyuRyWl5exuroamRhDR3ypS0jXmj+W9QUQ1u9T152KQONxdgStKKNl0dmFOGqwUChgeXk51MRrGKJEGMMBHUHIYzlZqM43wPOJOHKN7bQxc5xATcI0FlCJ1r0KzLgwQO+nNQOa7tNhzDpJiiUDNT1IZW8Lr7SIat6wsEpAP4jG5ewUyuar223z7Dq3H+eS0/w6j7XKg/t1UArbwLJT5RIo3PQ2tICI8wTqdN1adaexvcaz/F+5A27TcAaIFs7oc00StDgrPypUGLUt7phJ3oP1XkZ5BRpW2bYpeUhX36ZG40q6LdlIb42KnkVb8xISLKwSaLcHc9qTIOx0OiiXyygWi/DeR+rDVasrmQZER6TZabltBwGiA3LUDdWViZitoGvJYiVdOqzTGSySsbm5GZZOp3uvIYnyE6nU1qhJtVA2HODzqQIcJcDjXO4491y3aXHNKFZ/N7Dejd1GaPUnvy0VuvIxahi63W6YR4C8kSoH/q/ho37zfD6Per1+qBkC5SzisLBKoFar4fLly+h0OhHBUJcQ2Ko8c85FqvhUcHQMv5J4tupN3UZLKGrWQeNNCjuLkTiPvk5YoYuI2LHrNuPAbdoeW+cwrlyW54+C5QXUU1BSLM6DUM9CsROPYdSxcZ6KpkC16pD9wFpwO1OU9UbUe6KxYAYHQIQ7OkxMUjgLqwQ4tzw1Oofo2tJbuvBqtWkxtHpP2XVgixVWDoDna0pJFYW2jSMZSQpyFl+6/pqrVy4grmaB19djqSi83yqfpgejoDCMEtxpOvS4qkCF5Rqs4piEcV6IIq4dqvioILUOgAqD340eFAu+dEQj071aTq7vYl7CAGJhlQDdXH4kLjmmQkR2XnPq1nrQzaJQqRvOY5RYooDr4B/rpnGBT16LS2zpICCtMuR2dX3p6qsQcx+Fkr/tsFhb367CP84jiBO8aTyIaa6zW5IwToHou9BwBIh+L35zDvdWAjaOO7LhhC3H5hL3DN/mRRksrBKgMJHkWVtbC7G4WkbG67p8uXNbq8wAUeJPBU95Aa5tr5NMjBMu3lvn86NryjkB+MPOph2a17VZDfId6hnEzSto2zRKMOM6clyK0HIi+mNTp2zDJI7AXmNU9mKSIrDfCkBQrFQGdq5CJf3iGH87nwI9LiWO5wULqwRYhUfyDRiM0LODZvr9wRLdHGHHbXbADbcD0ZlrAUQ6Ka27ZZcZdtClJNiRGLsrd0HronyApilp6XRWIB3cZEuQgWia0LrNe7HQWoSls/bwnahC1eIaO+DGknncpu9f2xZHYI7iHVQwNZQbp0D4WydFUW/S3pthqHqLs8bCKgF2JJI4dIUpOPrhWIVH953uoRaLKNOu7mFcBRmZaOaZNaZXYlKtiHII5CBqtVrI/fOe7FhUKHpvFRZbnqyYFE9PsrL2OhQ6DTc0MxBXr2/fWRw/wOsqKTfufN2m4DYVXPJEVErcr/fQOg9+m1KpFPYpQazfXec6mAePYCGVAF8+KwR1Vh1ge0Uf/89kMiiXy6GgR2v5tYAHwDZOgB0prjRXY03en+6nuo5KNmm6j52V91Jh02o1JTgV42LycZ00LhPA3+NCBR5vn22a8EK9IPIx1rJrXK7XnvTceqxeV+9JXofhnaZ09V3r/IT6LQFEhqrPAxZSCXjvwwhCtZga2ym5QyG21ovpOitsmiIqFAooFosRkom8gnoYej+dZ0Dr0dn2brcbmWiUIY0WF9E7sPH5JAukWRB9D9ZdZlss9B3GxeY2/rcegLaR99dQwXpUce3fqXDZjIe2n21m6KVegS5Prsf0+1trTqr1p/Ho9Xool8ux8xzuB2677TZ8/vOfR7fbxW/+5m8GBfW2t70t9vhdKwHn3HMxWFuAuBHA/wlgFcA/B3B+uP1d3vs/3e19DgK0iqrpWW3H1YEYRzu3NRcht3E8vZ0DUF09KgxmGJS44vXYwVVp6PWYmVB3Vev+eYwtT9Y59+xzsw3qFVg32CooYLpagUkCqDwEj7fn63HjMOo+NpzTVKi95ziPR1O5BL1Gvh/rYZC3YXgIIDKhDL+L9qmDwMMPPxw8mV/4hV+YeLzbD5fEOZcG8DiAlwJ4E4Cq9/6eHZx/qH5RKpXC85///CCwmi7UajEl0WyoQMHhRJNk8K3g8Bx+fBYCqQVkJSDbxm3aodUr8N4H/kBrAjRmpTcAbC8A0jjaWl5ttyU8CbXg07rZo2DdeL3+NNex3sGoNo+D9VaALU6FP6qYVcnwb13GrlgsRrweFhvx+u12G+fOncPTTz992CHBfd77W+3G/QoHfhjAg97770zLFs8SSpRpfK+ur67Kw2NVONRNZMGQxocUPB7LklHGkXZOfM0p093XYcvq3jN80I6uQ6HZWVWYVFA0z61eiy1kiXP/pwkJpoXtK3HsvlVOo8g/+/ckXmOUR2Dvy7Jtkn0aEqZSqTAVnBoFegxU4vT+6AloTcF+lUrvBfulBH4SwL3y/1udc68HcBbA2/2cLUGmVlBH3WntPT8gNbzOK8B55jT+5Qet1+sRl1q9hVQqhVqtFqy5jli0M9YwhQkMOiLHBmh2gh0N2G7tNb1GWAvPd2FHFFqhiBO0UWz7TozANApEvaE4ws/eN84riGvbpHurstGQIJ1OhwViqazJBQAIYSWPYak5OQE+BwlGXXpuVthzOOCcywJ4AsDN3vtzzrlTAC4A8AD+BYDT3vufjjlPFx958Z4asQt87/d+b2QtPWprrcqjS10qlSIfnnPH0QuwbiiVDLDlQah7qJVmyk/YsEPnAWBnJH/QarXCVFX8hkqijROUcUI+CnHXi8M0SmCUApnm/ru9t43fx52v3yru3lrjoOQhFbidiYjeA/tQo9HA+fPnsb6+PhfhwH4ogTsBvMV7/2Mx+64H8Cnv/fMnXONQ30Q6ncb3fd/3hRpw1uRzn44L4DYgajXVHdfqMN1HS8AUJM/XakQtVtJOxU5ERcNUpCoGdZ/jmHe2gdtGIc6Kjjpn1HUOKwzcyf0nhQHjeI24UESLmCzHwu2aHnTOhRmsGcrQ8Fy+fBnnz58PXNAh4cA4gddBQgE3XHRk+O+rAXx9H+6xr1BXj2Qby3F1gk1afH58HZqrA0V08kibj2e4wXtwAhMlHmkd2Ca9L0MRXapcwwUd6sr72dgeGO8K2/9HFRFNeqc7Pcdit8Qe7z/uGZVPmHSuvcYoHoZCz+MYEjDTw4IhKhCGnzbrMGvsx+IjPwrgZ2Xzv3LO3YJBOPCI2TcXoJDQslrXXfPSFNRmsxm2UbsrociPzY6mpbE2Y8ABO+QgGPfqzMT1ej3SJpsJsIjLAIwSCmXUDxv6juLCkmmzAnHQa9pnnPaacYShGgvr8fH7dzqdUHZOHoD9R0PGfD6PVCoVGfA1a+x13YEagKvMtp/aU4sOCXSX1SJz8g7u43FA1BqQ/NN6Al10gr+1alCLb6rVaoSZZ+cAECH++DeVxKh6erVQ2ol3m4cepzzijt1pbD8tv7AXTBP+TAs+I7+tHTCkxWa6DB23MSykB0flwHUhZ42FrBh0zoVSX5tvBxCp7NJ0m048qfXgALZt4/laZ6ALVyorrEQiPQ12FCom6yprVaBWBlpycTfu+SjB3glHcJgYpZwmnTPpeajk9f3bc9S1p+DbyWXS6XSY2ZrX6vV6YZahWWMhlUAul4vM06cEGq09sLWenHoBlhVmrT4QnYVIOQNN1Wn2gKGAKqFerxeKj5Q70OOsJ2CxU5d6VCptHgR8GkwiAO1xKszjuBOrYAlNWQLRVavYL7jfVp7qrMXWo5gVFlIJ2AlC9EPTrdfyXB3Bp4JIgdbJRjXMUO9AY0mdmdYu8MEOw5hRy4IJFfJRAjzOTZ9kBceFANMK3DjsRMnshb8YpySnRdzzU9lT4C33oN/bjuTkbyV6Z42FVAKM17WmPy7FxviPH54ED0MDu059HGGkgjwqpcSSYw1PtKR5XJHMOPJrEus96v9p980b9jtNqX1Crx/nGWh2RvsPDQIVB5U5RyFubGzsa5t3g4VUAhrvU7DjxqNraacqCn5UJYni2HneK879VMWgws5jtEgojuCzAj7OQsdZ070I927DhUkk46RzJrVnJ+dMe29N/epvC/Xm9Bvr9OJaf6JZo1kr2oVWArZIB4hOAMEUHqGkn03Vab6YH1fjQhv72+vpdRhu2DZbb2WUErDXHKUgDsPN3m/rfFAYZeUJ/e4Atgmxeo5WEWhYoNfTsGKWWEgl0O9vzSKkH9Gm5GzH0JJcjff1GFaM6foE2nn0njq7kXUV2c64OQu106nAKznFe43CYVmhaa8/a2GwHpv15Kz3Y0M77Qc60AvYXnFqh2kfNG6++WYsLS3hK1/5Suz+hVQCfPl21mA7LyCw5eZRYPlbj9NyUu4jZ8CqMV2YREcr8v6qPLS6TMuMdV0B7bR6PesFjMJeBW4vHXi/O/9+uv9AfKGRZpBsylANgP5Wb8D+rRPIHjRe8pKXoFwuj1QC+zKfwF7hDnnsQCaTCSsNaVyvU4BpFSCnk7LxoFpxwrqLdjy6vm9VCNY1VIsPIAi/Dhoal+qy7SR2871HWelpuIFZW/jdIC7johZ/FEk76ho8n94hr7W0tIS1tbXDfD8HOp/AkYKm8FSja75fPzQtuS45BmyfSkvP07hfr8/OoOcyzKC1t0N79Z6qGHZDmu2G1NsJB7CbY+YJ47wUNRbK7ZAsHvWugS0eSg3DvIwfWEglYF05knE2H68kD1ldfmy67zohh5bvagqQsSCACCHJQhKtSrPlx9p55tnizvr+Bwl+Bw7WIpQHsCEA/7bkIbC1mImdOn1WWEglAGzFZrTCVvgBRBSFJQB5rh2/z7/VfbSZB1UkPF5jSy1bVg9jFGbdiabBPHR2RVyINA1XMSqLoIJuQ0Qr8Aw1Odv1rLGQSoAuGfkAtbrWfVeBtp2YZb9k+pUFttZclYJmAyy7bMnKnbj+cRgXy+8nJrVv3hVA3DF2H7+VKnNeg984zrrbwV+7Gap9kFhYJRCXZtNUW1zsbbkA9SR0v95D91lGXyvJlHm214xLWy0C9kpojsJuuIw48leP428aAlUSdr3JRAnMAdSVt5bcEoR6joW17pYM1NQhr2GzAUC0/mBcB7VKZS/PfxSgIdJh35eII/e4PU7RxxkFfQ4NA+chFACAqYYxOec+7Jx72jn3ddl23Dn3Wefc3w9/Hxtud865Dzjnvu2c+6pz7kUH1fjdQsk2IDodmM4PoOMB1MXX1J6mj3g8j9GJQXWUYNwEoJZs1E4Tx0csCiYpxlHYjfKIu9c479D+bT08e4wKvfaZWWPasYz/DsCrzLa7Afy59/4mAH8+/B8Abgdw0/DnLgC/s/dm7i+oBNRNYw5ef1R4Cc0WqFLQY7UzMKvAUYk6AlEVhd5HFyU9KJf4SsdhvCsbRo7brnUCNuSbNaZSAt77vwBwyWy+E8BHh39/FMBPyPbf8wN8AcCqc+70PrR132CJP1pgTekB0YUyldCzpB2wPXaPs9pKEPL61nLwfGJeXMZFgmX37TbrAYzzOuI8SFtLMmvshRM45bcmFH0KwKnh39cCeFSOe2y47UnMCSiIcR9DBVzz//zQ6sJZL2Gc9dF6BO8HcxAA0anB6GHYMQCWSJq3dNuVhLg6gDiScNT7n8Tp2HUl5+E77gsx6L33boelvy667sChQoVOtznnQiFPXGfgORwAEufyKSno3NZEpXpfbrfb+D/nElDPQL0ILUOOe5YEu8du3uO0aUYlj+P2zwp7UQLn3HB68aG7//Rw++MAzshxzxxui8B7/yEAHwIOf+yAFS673R7Dab+53bK9Ck356ayz/X4/rBmgqca4QSR2qLGGEHH1BfMUXy4iRgnzKJJx1P+zwl4mOfskgDcM/34DgD+R7a8fZgleBmBdwoa5wqSUGwWcw4tHMcL6P/8m0Viv18My4jq7kL0vhXppaSksm04iSdvD8EVHF85LZ7qSoV6exV5SmfPw7abyBJxz9wJ4JYATzrnHALwHwK8D+Lhz7mcAfAfAa4eH/ymAOwB8G0Adg1WK5wpxBJxuj4Md1cfzRymPuPSjvacdZ0DFkc1mw9Jn9Xo9rIFoO5sNGSY9Q4Lx2EtNQhwBrNec5XeZFHYs5FDiUYK0H9ebhjG2+3TuAbr7JAcZMni/tVKy3stiXsimKwWjLD+ASOk533sqlUI2mw2TyHJ6MT0v7loHiQ984ANYXV3F61//+oNZi3A/MAslYAfv7PV6cZjmuuQOuHIx5zng/HQMD7LZLPr9wcIna2trYaZaW3bMbfOSfiLmhQTbKcZ5izqJDKeX14KwuG8zCyXNdSxqtVoynwDBjzUu5bZTN85WFo66b9w5jO85vfnKykoQ+lqthnq9jlqtFrFAXExV72urDOdJ6OapLbuBrRHhuhJcIwIYTCNeq9W2LTGmXNEsMGmBk4VUAsQ4lt/ioCr39Fq9Xg+bm5tIpVK45pprUKlUcObMGTSbTWxubuLixYtot9soFArI5/OhxJhKjUNTaYlGTVh61AVyVmDYppPCpNNp1Go1NBqNSOYHODrVngutBCbBphLHEYpUKJMKhiZ1hn6/j42NDXQ6HayuruLEiRM4fvw4Tpw4gRtuuAG1Wg0XLlxArVaLZAs44YmmF7nsuk57PW07EiBka/L5PLLZbFC4BFeJ0hJvloYfJSy8EpgkDNbFnuacUcIWpzz0+jovYafTwaVLl9But7G5uYlKpYJyuYx+v49SqYRUKoVqtRrSjr1eL4yE1HayTkHXMUgwHrYGo9lsotlswrmt9SRzuRz6/X7E9Y8rIAPm2wsAFlgJjLLqo46zx8Ztn1Y5xG3TUYzeD8pLC4VC6Fi06Py/2+0im80GK0UOgWsl1Ov10GmXl5fRbrfDNXSy0gQDqDLWtSBVUWcyGeTzeeRyOeRyubAc+ebmZmQBUlsZOO9YWCUwCXH592kJPz1vJ/dSRtl7j2q1ikwmg1arheXlZVQqFWSz2WCVisUilpeXsby8HBZGrdVq2NzcBDBYv6DX6wUSq9FooFqtotVqodFoRKa3suHMlaog7Heh1aeAc80HCjSXmuMQ80KhEOFivPeRBWo4CelRQqIEYjCpknC3GBeLW2+i3++HdRC63S7W1tbQarWwsrKCSqWCVCqFUqmE5eXlSDahUCigVCqh1Wphc3MT58+fD2FDv99HNpvFVVddhUwmg2q1ikuXLqFWq6Hf70fGO7AtcUphXO58J888azDmz+VywfpnMpmQnqVirFQqqFQqkTEdXEqMaV0OGW82mzN9ppWVFdx111344Ac/OHVbEiUQg72wupPSc6OubeNKpgzpftLy1Ot15HI5XH311SgUCsjlcqEjkxfI5/NotVoolUqoVCqo1+uoVquo1+vw3iOfz2NpaSnUJ+RyuUjdQavV2jaBqn2mo0gy8ttwxie2na58p9MJ9Rr5fB75fB6lUil8A+cGq0U3Gg1sbm6GrADPt9OIjWsHsd/vbnNzE+fOnQtjXabBQhYLmXuP3bcXkieOGJyWiCTJl8/nUalUkM/nAWwVqNB6VSoVXHPNNThz5gwKhUKI/XUyk3q9HgYvsbaASqXZbOLy5cu4ePEiLl++HFxb5xzy+Ty89yH/HRfn7qT0eifHTjp/J/Ub+t4zmUxQjvl8PnYqeVZpFgoFLC0tRcZq8FqtVgvNZjOEVboy1DhPcide1G4woY8lxUKK3daJT1sdGFc7vpOOS2Z/eXkZxWIRqVQKnU4nuKHsuBcuXMCjjz6Kb37zmzh58iROnjyJq666CqVSCc65kG60E13aVZZyuRyOHTsWhiqzGKlarYZ0pE2R6QAny2fsFHEeFN3xOOLVbov7Ljr3AyszV1dXsbKyAgDBotPjOn78OFZWVsKzU4FynQBgMBt0o9GIrGqthOA4TmWvSnAa7OrdL6InsFsFMA6TNP6071ndVArn6uoqTp06FXLVOudBJpMJxUO0YoVCAceOHcOpU6dQKpWCMDebzVA7YEc3ap0BLT8FRNOO6g3kcjnk8/mQziT3oM+soxztjErTZFPiVmHWak++B/1fMy30psrlclCeFGwqBhKCOrsU04CFQgHpdDooBHIF/X4/eALqedm5BRVzEDolYwfG3H/P19jv98g2MU2locHVV1+NlZWVMDsRU4Z6XKlUCkLKlJYOdCH5xUo3LobR6XQioQMFMZvNhtSXzsPYbrfRbreRzWZx/PhxlMvl0C6WO6+vr2N9fT0oIbty8jgvSd8DeQxVWgBCDE8FScWVy+VQLBbDsGwKKrkTCjyPXV5eRqlUQiaTiaRU1dXvdrshu0JvgYqU7dpJFumQkYQDimm1snXx9tOLGBfj6j6WqzJd1Wg0Qsctl8sol8tYXV0NGQIb++uCGCrUxWIRq6urAAbuPIWWnbrZbEYUAoWY99bimXw+H2LoZrOJWq2GjY2NYDmXl5cDZ0EBpquuw611Sjfv/bZl3mmtmbLj/enZ6HyRZPPJA6gi6/f7IfYvFouBZGWlJdOn/OZsN5UDSUGdN1CPnwOBnxoLqQRs/Dku/WWP2c+PO8plZF6fQquzEjF1yDULKdS0voznqQDYuTm8NZ/PY2VlBcvLy4H0IrNdr9dx6dIlXL58GfV6HZubmyGrwKq5ZrMZESKGLLlcLuJmZzIZXHPNNZHJW+0MzTrOgRZal3XXGn3lWPhcdNV7vR7W19fDYCtdV5Lkp/c+zNHAIiwqR8b5fMe6cCjfGRUYt+VyuUAKkquhsjxqWEglMG3hz6w0OoWCgmDjbLqcFK6NjQ3UarXgLbCCkLEu2fByuRwKjAqFQlA0tVoNly5dQqPRAIBQgHT8+HFsbm4GAWHqkILMd0QXnRwD22fXdqCnwfuwpmF5eTmyRBefTQWf74L7u90uNjY2IpPF8rnpObA6koJLi0/lQReeyojbqXyoTDj9PJUPh3dzPwnGjY2N4C3NE6677jqk02k8/PDDsfsnKgHn3IcB/DiAp733zx9u+38A/PcA2gAeBPAm7/2ac+56APcDeGB4+he892/e81McAvaaDtwv8J60KktLS1heXkY2m42Qd+12G+VyOUxFxhp3CiA7orqxFAi64RRKHf7KkIOhAK267qPbruWx3MbZkXRKNmYjVlZWAo8AICgsDQuArfQoC6VY5qxhAZUMvSTeh14QgEgqkJwB20L+hApR2854H9jiZNRboPfSarVQrVaxsbER2mhnkpoHvPGNb8TKygre/va3x+6fSAw6524DUMVgLQEqgR8D8F+9913n3L8EAO/9O4ZK4FM8blrMmhicV2jMypQfBYDMNSsGy+Vy6OQAIh6BzlzEeJ7lr2TL6U1wxKG6w7SOcR4UiUSm0XgvxvZUZuQAWHqbz+cjrD4rFVXYqMAIXpvKR3kPvi9aeO998IJUmWgdBq9Hj4hl1bVaLTIpLAk/ekMaGpFgJeGpi9QScSnOGWF3xKD3/i+Gwq3bPiP/fgHA/7jn5iXYBhaltFotrK2tBeafnZpxKC1ipVIJbjMtIo9rNpuh4zMroCWy6gkUCgWsrq6GQTKMt9V1Zlhga+qt8KvV1Ric22zlnlYrKutPxUIPgNCwh+Rlp9MJIQTfQTqdxrFjx4Llp7JgNSVTff1+P8zVwPfKLADfO8MAKohJWYF5x35wAj8N4A/k/xucc38DYAPAu733fxl3kpvhugNHCXYxUxJyGq9ubm6GdCALelRQKPwUQnZkWjNaMioZzYvXajVcvnw5En+TAde8Ol1uegx0p3lMoVBAsVgMA3SoeCiw9FA4nyIzAfRE6JJT4JiZoLW3szOTMOQAKpJ7PIaxPFOjJGPpkdgRgTo3g9ZpcFYhVQwW864c9qQEnHO/BKAL4PeHm54EcJ33/qJz7sUA/pNz7mbv/YY9189w3YGjAAoGy4V1wRPG33SD2+021tbW0Ov1UCqVgtDQGtLlpaDqwCTGtrVaLdTO05OgRSfrD2wV8LAtTDeSdwC23HKdiouhhy74qq4zQwMVfLXkDCeoLEj00QspFAool8sh10/iU9d7pLu/vr6OjY2NEM/Te1DPis/TaDRCmKRCzywEPYiF9AScc2/EgDD8YT98eu99C0Br+Pd9zrkHATwHwNm9N3WxoPEyq9dozSkEtHa06Gtra9sGFdH6qtvKSUvYgTXvzdieKUcKQrvdDtWJFDC2gZyAruas3ACFql6vh9F6tLgMQZiKoydBAdMU6OrqKorFYgiJgOhIQJZXUxnRc+D6D1RmvV4vKA+GJc1mM7IOpaZZOXMwKy3VS7IkqH6/o6IUdqUEnHOvAvB/AHiF974u208CuOS97znnbsRgZeKH9qWlCwYlpCj4dK0Zv2s5L4Uhm82iXC4HZWCHv9LattttVKtVrK2thTkGqBgY/6oiYn0C+QkKEivy6MpTkIEtV1rDB9YmxK3HwNACQCQlSAtcLpdDCGDb573H5uZm4Cx0+nZdIp7eDa03n7vdbqNWq0XKq+0yb81mM3gR6h2N+n5HBdOkCOMWHnkngByAzw4/AlOBtwH4FedcB0AfwJu993Y14wRTQDsRq/noYjMMoMXV+e80NuWwWACBYOx0OkGZsFQ2m82G/ST2eB0Akeo+KhJyFFqwRKXFCj0l17R6UfkJhgq2ApDX4zPR6rLdVBYEPQt17fkeNZXJZ+Mx5CaYKtUl6bXoSMcG2HkXRhWYHRUkYweOAOKsJQW8UCigUqkEK8zBMjobsVpzpvG896EqsNFoYGNjI1QI0hJqdR/vr6XCtNb0CDjisVKpYGVlZZv7rgOIWEjEa6kgab5eF+/g85CDoFByO1OkDGGYu7elvVQKSjSSpNSsBtugioHts9WB4wYOzRoSmiRjB44iVDi0Ug9ApAMzVmdJsM0UsOMSOmSYAkQGnKky3pMdWwuC9BrAVv0/PQnGzyx2YpkyU5I6JkE9BbXwmhGg4DIc0kk8VPDotmvaTj0bsvsqtLZ8mcfpMGGOQGQJMs+boxqAkZjUtkQJzClGVZwpI6/xN9lvAGg2m4EtZ7xOt5hsN4VdJ8ukO2wFS+9Nq8qYnDE6Lb4WJQEDYnF9fR2dTieQiVrgw2e1pJoWRtG6a5aA3oweZ/P6FGJe2w5fVuHnM5XL5UCs6ntluKCzCcVZ/6NECBKJEjgiUGtMV1ZTcMyDM22mhUAAIpNpamqLaTIqgWknyWRxD69bLBYBIOJ2s36AGQoqCBsWqPBSgLWGQVl+WyWo6VIbp/NeVBa27VzXke+RGQpemwrXTijCe3LmJX2Go4hECRwAbIfbDWzHpXVUK8nMAevngYFgFQqFSH27Zg50qC2ACAk2TVspXFQopVIpMoeADkFmoVG5XEalUomEKTyWAq6KQOv36aIzTUoS01YkAluFSSrE9CR07D+vr7wERxZSyKkA9B3pkGS+c10x+qgiUQJHENZC1ev10NFZ/cbxAbTAWmTEWFZLi+3EogpVRsz9U9A4aYnO2gNEU5Ystul0OtsmOaEV1hg/btIRTrPObZpu1JGLLLXmdWzptA7C4vHkShiyaGpVLTwrCpeWllCtVoNCGVUpeFSQKIE5hu1Y/N+601pQo/X1FHbW62tn5/Rhm5ubgQjUGJuwg3yArfkEe70eqtUqgC0BsW1jTYMKF91xJey0Jl/DAgqZlurabEk6nQ4r76qlp2LUIc18fzrvAoBIipKKk3UKtPhUGlpkpcTmUUWiBPaAWX34VCoVhsnmcrngQler1TADMQWOTD2AECPbabMnrUikfAQrA+lpsKyZQm2LfZxzqNfrQfEwfGB9gs56xEwHrbiO16f11xQpsx+qVFQh6BBpDaXofWi1Ia+lmQ0tVSYZS4VyJXgARKIEjiBogbvdbmQBEmCLnaZ1V6HX/D9jd1bXWWINiHoiJMJYE0BrqfMHsJBHmX8AgRBU699utyODg7QwR111Wmi69ABC+TKVEvkQPj+tfyqVQrFYjBQ2AYh4RSr8FHDWWpTL5UCw6sQhdvnxec8I/PIv/zJWVlbwi7/4i7H7k2KhHWC3lv+g3rHWtpOF1wVF+KPFQhQaS5LZghjbbiqBEydOhNl1eB2ds1AVgE4CooVKDA2YviRPoVadiotTm3EfxxnokGlb0ESXnZObsn0cQqzjFrScWJ+b16TXwbBAZw+Kq6WYR1x99dVIp9N48sknk2KhvWBeYz473h3Ysm50a5VF1w6rhTtWEEgcUlhIuJ0/fz4yGQfrAzS3zio+yxPwuhQ+TqGmJb8k9egRaNaBikTDmlqtFuFANJTo9/sho2AXb2HRE4k+3oMjDUkiMiXJNur7UPJynvH000+P3Z8ogSMMdk7GwRxFB2wx3RwpR4vP1YjYmXm+1uNr+k2VH+fY0yG8XKeP19JhuDopqZKUwEAYNzY2ghdDUlPnT9DxCzZfT55BS4c1z6+hBD0SVZj9/tachAAiZcYsTebowbjCo3m3/jtBogSOKDRdxsU/Ll++vG3uABYVAQiCqyP5lNzS9KFaPwp4tztYxIT1CSTr6AUwnLD1/jooiYJNUo71Bhp7x61PYNvE8IRjD1QJ9Pv9UOXHEY9awMRn5bk6TJg8CY/X9RcUV4oCABIlMBLz6v4rbEpPx/EzVtYRgCyMoRLgiEAW31Sr1dhUob4LehcsACLbzwIlDR3ofWgtv87aw4FAtVotUv6s7jzbQk9BlQotN7BVv8D7ank1JwQBEPEeVBGpguJ+VQZxOArjBqZBogSOIGyaS2cJoruvMwMzXGBcrJafQqVCNM7t5VyDzrlQKszsgs7aSy8km82GyTuV8VfOgLG9knQ6TyIFkhOCAoiw+3aEItN8Wt/Pd8WZl7rdbqiVIPGofMWVkv6bBokSOMJQplwFngJNsIR3fX19W6YgjugaB1r5CxcuoFqt4qqrrkKtVgt1CTrPH2csYu5fB/+wmpFhBq22PocdRMR2q8CyYlCPYyEP03266IjOmKRLpHHVZRKimq6MI1XnPS24E+x23YH3AvjnAM4PD3uX9/5Ph/veCeBnAPQA/O/e+/9yAO1eeLDzq6vLmF8r3tLpNMrlcqgt2NjYiCyxNWnQix0HQTc7nU5jZWUlsiaizjMIIExeyjSlDh7qdrthqi4KH8lAvbeOldBUo1b4UWHwHpzOnApHpzOj0HMkoM4RqPwHgG0jEK9U7HbdgfcCqHrv7zHHPg/AvQBeAuAZAP4MwHO892OHps1jncB+cgL7bTE0/04o624n/VDCzQ5/ZbxtF/awxJytGdDz+T8FVCc9YfpQy3RpYS1hp8ul6fyEDH30OW3ZL1OddhEQfV5NjZIbsBOoaJyvdQBXCPZv3YExuBPAx/xgwtGHnXPfxkAhfH4nLU0QD7WKlpSiIDGetUtts3qOhKDOkEuPIg5xAqCW2yoiFR5ekwShpiSV0WfRkJ16jFwFwx22k89Ar4HhAJUWPY7l5eVwnrr2VD7Mcmjb47IBbJe+3ytIMeyJE3irc+71GMwk/Hbv/WUA12KwGAnx2HDbNrgFWndglFex047E47PZLK655hocO3YMx48fx/Lycuzc/Ex76ao5TIPFzZA7bXvU61Bykj86vx+HAXOFJJ1NiIpLhZqzGuv6BSxtprfCZ2Pcr0OIWQDErAfDEYYIOvkKawY0E8HnU++D7TgqxUE7xW6VwO8A+BcA/PD3b2CwCMnU8Mm6A7tGp9PBxYsXI5Nlcr79XC4XsgQ6apCDhexQ2r3AknbAVqVhoVAAgGA5GYpwvy4pxmOYMQAGCkZnTOaEHhzwxFWDdHQhayNY+EThbbfbOH/+fGTiUr6HUTMGU6HxGlcyJ7ArJeC9P8e/nXP/FsCnhv8+DuCMHPrM4bYE+wQKHoWC5bOXLl0KVpXCpoJeqVRQLBYj6w1Y19bWBtiQw/7W8ETJOpJwVAYcw6BrI2pKj4qEeXqGD6wf0OnDWUrMSUp4DR1WTX6A70FToVQCDAO0nkJDqytZ6C12u+7Aae/9k8N/Xw3g68O/PwngPzjn3o8BMXgTgL/ecysTRMA0Hefv46g3DqFVUoxWj8fbeHZUZ58mZUhSkUOXOXMRz6dgUQjZblblMaanoNtzdJ1BZkN0klDNLFDAaeHpGagCoAK0IcmV5t7vFNNkB8K6AwDOYbDuwCsB3IJBOPAIgJ+lUnCDpcl+GoPlyd7mvf/0xEbMYThwGBWDu+l8liXX31oERAHlqEIAIfa2k2ROWyMwqj1KVqr3oHUL+XwelUolsrwZhdo5F7wFhgrqiuvgIOU9dEIPxv06voCTgsbNVrTb938UwDEPMYjNDiRDiUfgKCoBdZsJJe9UQDUvvhtycJr22XQeRx2yaIf3tvl5fRa2m0qART6s8tPxAFomrV4JRyja9QLjlN+VUAr8mte8Bp/85CfR7XZDyAQAm5ubyVDiKwGjYvZR1lzZcp5nXXZu1996r53A3kvJQ53IhILN8ERnC9KViUkc6ngC5xzK5XKkrawnYL2BWvter4dcLhdGUdqCJPv3UVYAAPDjP/7j+PSnP41er4d3vOMdQZG+853vjD0+8QRGYF49AUKr6Xgt22at0rNz/8V5AaPIwJ20k8LMewOIkIFK9Kmrb8/VuQb5bDqeQKcvU6+BSkKVBkc/WgVgMQ+ysB8olUqo1WpxuxJP4EqDWkvr9tMCknG3brDNBOg+vfZOobX9vC+n/6JCUkVjrTL3U+h16jN9Lh09CGwtXUbiUSdKiXt2qzDtOznKGKEARiJRAkcY2pk1LWg7uC7CQdfcZgV4LTupx05SZVaIuY3Xs0LP7fq3ThOm04jb9qVSKVSr1eDlUIDV07El0uPuu8hIlMARxajOTUGxK/0AiGQJbAGMuuoAtq1DMI2gKFGny5NrCk89FPUaNMMQx3EwBOCxljhkKpTPYrkO+yxXitXfDyRK4AqDZdtVKWgprgoet+l0YrsNB1TIgfiVk+LSmTxWfyvPYRWahkEs/Z1k8afZvohIlMAVCsuO2/kCRwnMQQiHjcGZDbBWnfvoIcQN5dUaCM4CzN/2ORKXfzokSuAKhQqBWvy444j9dJG1RiAubqd3wjZo/D5qMA/DFf7Pv0keUmloJWDcdGn2Wa+U1OAoTHq+RAksAKbt3AcpBFYpAdEhyaPG7tNb6Pe3Fl214QJThzrK0N5zFH8yat+VBOVj4pAogSkwbSc5jNqCowIKqGYi4gqcdHvc+gQ2LNDjlWeg4LMk+UoX7J1gXG0EkCiBBIeAcdZYyb9RndVyCnHM/6SBQOMU9KJ4BKOQKIEEB4ZJQqUTkdiYnfl+C0swximYae6rswVx9mE70/KiIFECCQ4VmgYcV/asZcDcr7/jSqSBeC9BSUWWG2vNAQuTAGxboXkRFEKiBPYRcaTWokOF28byekwccaikYNx2qziYeuQxWkrNocpc8ozWnyXG9AL0PjspKpqUfp1nJErgADHKai0aVJjihD+ujl/3A9F5Dflb04pWseiAJU47XigUwpwDWj/B6ccnZRMmPeNRxW7XHfgDAM8dHrIKYM17f4sbzEp8P4AHhvu+4L1/8343+rCwXx/2KHeQ/Yad1luhFt5u18wBr6PH6SpK3M5hyVyLgbMFczITTrrK2YiIRfte03gC/w7Avwbwe9zgvf+f+bdz7jcArMvxD3rvb9mn9s0Mi9YRDgM2zaeCHVflp8daIVWrr5Op6FRm/X4/WHmubuS9D2XGFP44pXNQRVSzwGte8xrkcjnce++9sfv3tO6AG3yJ1wL4ob00MsHh47DTYpa91/DADjEGsG0UoM7+q0qBws5BRFoxqEx/s9ncNm3ZqFGGcQrgKCuCs2fPhurMOOyVE/hBAOe8938v225wzv0NgA0A7/be/+Ue75FgnzErjsLG7cDWnP66L66kV6cZs+XAOokJLb0VWluarLMU7aSs2m5TqGczT7MVf/e73x27f69K4HUYLDtGPAngOu/9RefciwH8J+fczd77DXuiW6DFR+YFthPPQhnYbIGGAfp/nLKI20eBBqIjDK2g6sjGUeTfKOGe1msaVRk579i1EnDOZQC8BsCLuc0Plh9rDf++zzn3IIDnYLBKUQQ+WXxkT9jJABgVOj3uMDvouBSaDQV4DK133HlaamynK99J4dCotsa5/7aoaZwyOUoZob14Aj8C4Jve+8e4wTl3EsAl733POXcjBusOPLTHNiaIwU46+rj9VjgPKvaNc6vtPp3VSMOAOGWnNQDA6LJhjYX1/Dh3XWsMeE1t304UzFHwAIhpUoRh3QHn3GMA3uO9/10AP4loKAAAtwH4FedcB0AfwJu995f2t8kJLOIs/aT41p4LICzfpbG3MujjrNs4IdcCIVbs6aShACL3iyvc0Xto2KBxPfdxyXJmCbQugGsWqHKx15+FpzRLJLMNX4EYx2iP4gVG8QTjQo04paDXoaXm8mM6Q7DG+CQHmb+PWx+Av1XYddwBiUFmAHQKMyA65ZglF3fKEUy7fw6RzDa8KLBCY4WHv+Osd1xF3ygFMI07TAHXLACH+06zBNg4a61EnFYSeu+xtLSEUqkUZh1idSBnJVYycRwZad/JtJgnYpAeHldjtkiUAObrg+0WcR1VLaSuPUALaWcdpjDRjbYrAwGIuNbTvC/eo9PphOurcFlrzmuPKuLh83AwkFYJ6rkUeIYGPA4ACoVCGDXI8ADYnkacJPxHpd9wMpZRSJQA5v8jjsIo192CAkuLPMq6a5GNEnN06Z3bWjBklCegbZgULpAXoPDp0uE8Ji4LwspALlLKZ+T2QqEQWeqMSoKhCOcl5H4AaLfbaDabYQxB3DNZxbfT6djjYJXNLPriQiqBo6LBR2GaDmWPmWZ4rBVgFTrrSloPIu5ao7bTIjP+t8dZ0i7OTVdFkU6nUSwWw8rILB5Si87tdrWier2OjY2NUErM6/EZ7XyHVCL8f5TCGPVOx3Evs+qPC6kEiHGWat6grjDHwpNNV8sdxwfw/DjY57UDdXjMTlKSeoy2K0752jbaxUVt23RNBV3cVIk/fSdse71eD+9LV0SOa7MlDEcpNLsv7vlGvas4riNRAoeAUZZplNWZJ4Wgws0JMFQxKFHGffytFtEKo33+adNjcW7/uI6tacJx3orG54StHyAX0O12ceHCBXS73ciSZQDCO+E5zBoo/6DDkQlmGGjxdYzBKCLT9p9xCnce+9rCKIFRCmAU5vFjMa4Hxrc9TrBHua6j9o2z1vZnknDEncNr6rk6SEjBFYd5fK1Wi6QDi8Vi8BCWlpZiBxDFrcmoyihu1SblPwBEwgx9PltUNI4vmZe+pFgYJTAuBQRMN3x0nGt9EB90tx0mzvWeFLtO2xY9L25ugFHvhsrLrjhkB92oMJPZz+VyQeB5HOsAOM24rnasy6AxM8GYn+3hdp1PgNdVLoDX0/JknfuQbR5HuMa9y7hwLQkH5gB70dL7/RHHEUxx26Z13y1nMC6etbDj/yfde5QHofdTIk7DGxb8cOIPZhKY91dFl8vlsLS0hGKxGBRHNpuNPCfDqFarhUajEaYUy2QyaLfbIVVoQy1ga/BRt9sNIQeVgQq+ji2w32bUO9JSZatg9gt2CTeLhVMCuxGcUf/HHb9XRTDJy7DtHxVf67ZRnXCc8I57ViXh+H+c10QrqjH4qHtxCjDnthYXtUuPUUhpmWntuY5io9GILGNu3xWvSYHm9UgkKq/C9uq74/5JCjPum0zymA7SC3jta1+LXC6Hj370o7H7F6ZsOK4DAtOTOvbYaVjfnSqFacKMcYpJhS2VSm2bNmunGNUe+574t75jS7qphbfCxuO9H+TyWdyjJB9/KwFIr0DHD/A+usSZnXqs3W6j0WgEL6PdbkfCBbZLpz1XK62cBhBvYa0S1O32tx53wAunLHbZcJyLNkobTxMvW7dv1LnTuNmT7jMOcYrpIFxKtsWOtLPViHwvul5gnEdCa6xt5zUolEq4cZpwzhqcy+WQTqeD2295Br03/4+beYizDVsBB7bWJ1ClFSf4ViFO4p80a8H/7VwHh4mFUQLA7gbBxEHLb6f9aAfp/o1TRNOEPOO8mlGurQoLR+WphVQBVw+FSoMWP64wh79VmHkc1wVotVphpKAShxRahg/6d7vdRqvVCoKvoYAtF9ZMQdy7oKdg98W9V+thjAvvZoGFUQKjBNuOSye0xHbUOPM44TvsD2k75m7CjzjCz3ZsGyvHubkU7rgJRPm3TuvFfLxek/fSAT8AtoUJnD2Y7decvrrxFG5ez2YAeL6Se6qsrPfD59ExB/a9a8jkvY8snc522SHb+u4PGwvDCewF1hpOIoR4zCSm3R4zjkiato08Z5yF0WurQMeFFur+WkVJQVZhYexN91utu22fdbv1Wioc6nHx2DhOQN3quPkE+TfvTaWgioj3UCXHb2ULh1SwJ33vOcHuOAHn3BkMphs/BcAD+JD3/recc8cB/AGA6wE8AuC13vvLbvDmfgvAHQDqAN7ovf/yfj3FLDCtpt5JJxhFJgHb41IeH9fR1DrpNnsdy2xrO/T5rFtvBcOGQNbSk723hGAulwvHqtutwqZtIXuvSkjbpYLn3NbYAFp8En3Wk7Pv/QgI7oFjoifgnDsN4LT3/svOuWUA9wH4CQBvxGAqsV93zt0N4Jj3/h3OuTsA/G8YKIGXAvgt7/1LJ9wj+RKHgLj43io4jfeVybfWW/dbMg5AxDUn1OXnPfljB+sk2D3OnDmDxx57LIQiRK/Xi/UEtmnnST8A/gTAj2KwytDp4bbTAB4Y/v1vALxOjg/HjbmmT36Sn+Rn7z/FYtG/733v89ls1jvnfLFY9KVSyZdKJQ/gbJz87YgYdINFSF4I4IsATnnvnxzuegqDcAEArgXwqJz22HDbk0iQIMGBolKp4MEHHwwCXq/XJ54ztRJwzpUBfALA27z3G8al9Dt16V2y7kCCBPuOp556Ch/5yEd2dE58GZ2Bc24JAwXw+977/zjcfG7IF5A3eHq4/XEAZ+T0Zw63ReC9/5D3/tbYGCVBggSHholKYMj2/y6A+73375ddnwTwhuHfb8CAK+D217sBXgZgXcKGBAkSzBmmyQ78AIC/BPA1DNYSAIB3YcALfBzAdQC+g0GK8NJQafxrAK/CIEX4Ju/9thWIzD12FEokSJBgV4jNDiTFQgkSLA5ilcBUnECCKw/5cgbFkyn8439WwqlnZ7B0DHAVAGng2I0Od77pJHLFFHJlhxf/Uwc3GKeDn/q5ZwPLwOt//jl46e1ZuCzwylcXkFlN4X/4X/4Bfuh/SgMOyFSA7ApQvjaF5/6jHJAFztyUw8tfVYGDA+DwvJcu4XkvWQKKwMr3APmTSHrkDLAwYwcSROH7KSDjcfLaMi49tYR2qorbfvQ07j97ERcuNdFJdYCcR7+dwolrUnCZDnwnhTvfdArZ0zVUcm288Ooi7n+gjVbP45+98Xq86ieegUfOPYz80mm0MxfwhT9rolhawrNvPoYHvvQUTl+fxQtfUcbn/3MV3/MPl3HjrSl885ubSJcciqsevgM0L8z6zSweEr27sHB4+Y8s45prjqPfTeOFtx7H7a8+iaVMCv/oh47hef/wKiwfy8Cn+zj/SAq+CwAe6X4FD51tolgsAC6NXhP4/tuuw003l/Dlv34QQA5PPlbH+Uf7SHcd0Egh1S4Cw0GFp5+9jJfefhrXPjeLC0/2cM3VJdxySxndKtBvYlDykuBQkXACC4rMUho3PH8JhUIBDz9Qw1VXZ3DNM3P46hc2cN0/yOHEVQV8+QtraDZ6qKyksHa+D3jgB//JGfzlZx7FS155NS5eWsND32jj5T9yAuefbmHjUhNXXePwjb9uI5UB0g5IpdO46uoCnnikiuOnMrjh5jIuPNFHo9FEq9HDsauW0Gn1ceF8G7020B2/WE6CvSEhBhNMA4fEHF+xSIjBBNMgUQCLhkQJJEiw4EiUQIIEC45ECSRIsOBIlECCBAuORAkkSLDgSJRAggQLjkQJJEiw4EiUQIIEC45ECSRIsOBIlECCBAuORAkkSLDgSJRAggQLjnmZVOQCgNrw91HFCRzt9gNH/xmOevuBg32GZ8VtnIuhxADgnDt7lKcfP+rtB47+Mxz19gOzeYYkHEiQYMGRKIEECRYc86QEPjTrBuwRR739wNF/hqPefmAGzzA3nECCBAlmg3nyBBIkSDADzFwJOOde5Zx7wDn3befc3bNuz7Rwzj3inPuac+4rzrmzw23HnXOfdc79/fD3sVm3U+Gc+7Bz7mnn3NdlW2ybh2tJfmD4Xb7qnHvR7Foe2hrX/vc65x4ffoevOOfukH3vHLb/AefcP5lNq7fgnDvjnPucc+7vnHPfcM793HD7bL8B1zGfxQ+ANIAHAdwIIAvgbwE8b5Zt2kHbHwFwwmz7VwDuHv59N4B/Oet2mvbdBuBFAL4+qc0A7gDwaQymH34ZgC/OafvfC+AXY4593rA/5QDcMOxn6Rm3/zSAFw3/XgbwrWE7Z/oNZu0JvATAt733D3nv2wA+BuDOGbdpL7gTwEeHf38UwE/Mrinb4b3/CwCXzOZRbb4TwO/5Ab4AYJVL0c8KI9o/CncC+Jj3vuW9fxjAtzHobzOD9/5J7/2Xh39vArgfwLWY8TeYtRK4FsCj8v9jw21HAR7AZ5xz9znn7hpuO+W3lmF/CsCp2TRtRxjV5qP0bd46dJc/LCHYXLffOXc9gBdisLr3TL/BrJXAUcYPeO9fBOB2AG9xzt2mO/3AnztSqZej2GYAvwPg2QBuAfAkgN+YaWumgHOuDOATAN7mvd/QfbP4BrNWAo8DOCP/P3O4be7hvX98+PtpAH+Mgat5ju7a8PfTs2vh1BjV5iPxbbz357z3Pe99H8C/xZbLP5ftd84tYaAAft97/x+Hm2f6DWatBL4E4Cbn3A3OuSyAnwTwyRm3aSKccyXn3DL/BvBjAL6OQdvfMDzsDQD+ZDYt3BFGtfmTAF4/ZKhfBmBdXNa5gYmRX43BdwAG7f9J51zOOXcDgJsA/PVht0/hnHMAfhfA/d7798uu2X6DWbKlwoB+CwP29pdm3Z4p23wjBszz3wL4BtsN4CoAfw7g7wH8GYDjs26rafe9GLjMHQziy58Z1WYMGOnfHn6XrwG4dU7b/++H7fvqUGhOy/G/NGz/AwBun4P2/wAGrv5XAXxl+HPHrL9BUjGYIMGCY9bhQIIECWaMRAkkSLDgSJRAggQLjkQJJEiw4EiUQIIEC45ECSRIsOBIlECCBAuORAkkSLDg+P8B9rKHC2+GeS8AAAAASUVORK5CYII=\n",
       "text/plain": [
        "<Figure size 432x288 with 1 Axes>"
       ]
@@ -135,6 +137,57 @@
      "output_type": "display_data"
     }
    ],
+   "source": [
+    "from matplotlib.pyplot import imshow\n",
+    "\n",
+    "frame = dataset.get_frames()[sample_idx].swapaxes(0, 2).swapaxes(0, 1)\n",
+    "label = dataset.get_labels()[sample_idx]\n",
+    "\n",
+    "imshow(frame)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 152,
+   "id": "7f65d8a9-7332-453f-adf6-4f4361e03b80",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "0.0\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(label)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "6f1b2c8a-8d98-43a0-a96f-d1ea0a2b4720",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# yolo_model = YoloModel('onsd')\n",
+    "video_path = \"/shared_data/bamc_onsd_data/revised_extended_onsd_data/\"\n",
+    "classifier_model = keras.models.load_model('sparse_coding_torch/onsd/valid_frame_model/best_classifier.pt/')\n",
+    "\n",
+    "transforms = torchvision.transforms.Compose(\n",
+    "    [\n",
+    "#      MinMaxScaler(0, 255),\n",
+    "     torchvision.transforms.Resize((224, 224))\n",
+    "    ])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "3b05ae07-1df0-4e26-9083-86ab5225fab6",
+   "metadata": {},
+   "outputs": [],
    "source": [
     "from matplotlib.pyplot import imshow\n",
     "from matplotlib import pyplot as plt\n",
@@ -193,6 +246,58 @@
    "id": "51427400-238d-4d5a-b139-5d28b2084f9c",
    "metadata": {},
    "outputs": [],
+   "source": [
+    "from matplotlib.pyplot import imshow\n",
+    "from matplotlib import pyplot as plt\n",
+    "from matplotlib import cm\n",
+    "import math\n",
+    "from tqdm import tqdm\n",
+    "import glob\n",
+    "from os.path import join, abspath\n",
+    "\n",
+    "labels = [name for name in os.listdir(video_path) if os.path.isdir(os.path.join(video_path, name))]\n",
+    "\n",
+    "videos = []\n",
+    "for label in labels:\n",
+    "    videos.extend([(label, abspath(join(video_path, label, f)), f) for f in glob.glob(join(video_path, label, '*', '*.mp4'))])\n",
+    "\n",
+    "best_frames = {}\n",
+    "for label, path, vid_f in tqdm(videos):\n",
+    "    vc = torchvision.io.read_video(path)[0].permute(3, 0, 1, 2)\n",
+    "    \n",
+    "    all_conf = [0] * vc.size(1)\n",
+    "    \n",
+    "    for i in range(0, vc.size(1)):\n",
+    "        frame = vc[:, i, :, :]\n",
+    "        \n",
+    "        frame = transforms(frame).swapaxes(0, 2).swapaxes(0, 1).numpy()\n",
+    "        \n",
+    "        frame = np.expand_dims(frame, axis=0)\n",
+    "\n",
+    "        prepro_frame = tf.keras.applications.densenet.preprocess_input(frame)\n",
+    "\n",
+    "        pred = classifier_model(prepro_frame)\n",
+    "        \n",
+    "        pred = tf.math.sigmoid(pred)\n",
+    "        \n",
+    "        all_conf[i] = pred\n",
+    "        \n",
+    "    max_idx = np.argmax(np.array(all_conf))\n",
+    "    \n",
+    "    best_frames[vid_f] = max_idx\n",
+    "    print(vid_f)\n",
+    "    print(max_idx)\n",
+    "    print('----------------------')\n",
+    "    \n",
+    "print(best_frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f6fa46ac-afc6-41c4-b8e2-2283ed02a4b3",
+   "metadata": {},
+   "outputs": [],
    "source": []
   }
  ],
diff --git a/notebooks/exploring_pnb.ipynb b/notebooks/exploring_pnb.ipynb
index 5baa638..a76795f 100644
--- a/notebooks/exploring_pnb.ipynb
+++ b/notebooks/exploring_pnb.ipynb
@@ -2,7 +2,7 @@
  "cells": [
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 1,
    "id": "40fe0f6e-aa6a-4d7a-9175-6b6e6aa02412",
    "metadata": {},
    "outputs": [
@@ -10,48 +10,48 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "2022-08-12 01:31:29.832438: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.834371: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.836208: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.838047: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.849348: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.851260: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.853111: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.855336: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.857171: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.858973: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.860793: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.862614: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:29.866676: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA\n",
+      "2022-08-25 15:04:46.181898: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.183851: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.185760: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.187608: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.197704: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.199631: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.201476: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.203366: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.205246: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.207062: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.208942: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.210731: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.213814: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA\n",
       "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
-      "2022-08-12 01:31:30.290806: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.292756: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.294584: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.296398: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.298162: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.299945: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.301681: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.303459: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.305247: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.307002: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.308785: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:30.310617: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.727334: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.729222: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.731200: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.732942: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.734672: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.736372: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.738076: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.739774: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.741476: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.743183: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 42277 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
-      "2022-08-12 01:31:42.744358: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.746217: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 42277 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
-      "2022-08-12 01:31:42.746913: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.748617: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 42277 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
-      "2022-08-12 01:31:42.749184: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:42.750883: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 42277 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
+      "2022-08-25 15:04:46.709187: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.711102: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.712960: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.714695: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.716435: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.718164: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.719883: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.721612: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.723368: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.725137: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.726845: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:04:46.728585: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.769070: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.770989: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.772817: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.774559: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.776291: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.778016: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.779711: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.781417: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.783104: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.784812: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 42277 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
+      "2022-08-25 15:05:01.785487: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.787150: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 42277 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
+      "2022-08-25 15:05:01.787639: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.789341: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 42277 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
+      "2022-08-25 15:05:01.789948: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:01.791606: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 42277 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
      ]
     }
    ],
@@ -70,7 +70,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 2,
    "id": "a9ea96d9-6ef6-4ee6-82ac-c6dc45f7caa5",
    "metadata": {},
    "outputs": [
@@ -78,30 +78,30 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "2022-08-12 01:31:43.687293: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.688181: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.689973: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.691706: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.693507: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.694194: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.695976: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.697709: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.699416: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.700118: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.701987: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.703849: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.705839: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.706615: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.708415: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.710193: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.711997: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.712708: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 42277 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
-      "2022-08-12 01:31:43.712835: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.714563: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 42277 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
-      "2022-08-12 01:31:43.714702: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.716440: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 42277 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
-      "2022-08-12 01:31:43.716576: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
-      "2022-08-12 01:31:43.718309: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 42277 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
+      "2022-08-25 15:05:02.757072: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.757966: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.759668: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.761552: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.763405: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.764154: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.765948: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.767705: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.769737: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.770511: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.772338: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.774146: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.776115: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.776921: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.778745: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.780502: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.782336: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.783053: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 42277 MB memory:  -> device: 0, name: NVIDIA A40, pci bus id: 0000:01:00.0, compute capability: 8.6\n",
+      "2022-08-25 15:05:02.783186: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.784964: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:1 with 42277 MB memory:  -> device: 1, name: NVIDIA A40, pci bus id: 0000:02:00.0, compute capability: 8.6\n",
+      "2022-08-25 15:05:02.785155: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.787003: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:2 with 42277 MB memory:  -> device: 2, name: NVIDIA A40, pci bus id: 0000:03:00.0, compute capability: 8.6\n",
+      "2022-08-25 15:05:02.787163: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
+      "2022-08-25 15:05:02.789023: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1532] Created device /job:localhost/replica:0/task:0/device:GPU:3 with 42277 MB memory:  -> device: 3, name: NVIDIA A40, pci bus id: 0000:04:00.0, compute capability: 8.6\n"
      ]
     }
    ],
@@ -228,10 +228,35 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 33,
    "id": "7af3ea06-6173-4cef-9e40-9750dd8d8d4d",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOYAAAD8CAYAAABjJ9hGAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAABem0lEQVR4nO29e4ys6X3X+X3qXl3dXX3vPtcZz/gWj5117CSEkCCUEDYYC2elQAwoOCHI0i7LBtgVOLt/wEogkRWCBIESRgRwWMAJJlpbWXaTEAdlkcgkcezYiWOPx55z5tz7Xl1Vfanbu390fZ761nvOmTNzzunTNdPvTyp1V9Vb7/u8z/v8bt/f5QlJkiijjDKaLMqd9gAyyiijuyljzIwymkDKGDOjjCaQMsbMKKMJpIwxM8poAiljzIwymkB64owZQvjeEMJXQggvhRA+9qSvn1FGbwQKTzKOGULIS3pR0vdIui7ptyT9uSRJvvTEBpFRRm8AetIa81slvZQkydeTJOlI+oSkDz3hMWSU0cRT4Qlf74Kka/b+uqQ/5AeEED4q6aPDt+9/QuN6w1KhUFAul9NgMFCSJMrlcup2u/E7Ser1eioWi+r3+8rlcur1evH7fr+vfD6vwWCgwWAgSSqVSvEcuVwunr/f7yuEEF/9fv8U7vhNRZtJkizf64snzZgPpCRJnpf0vCSFELJ8wQfQ2tqa3vve9+rw8FAzMzNqtVr68pe/rNu3b+t7vud7FELQb/7mb+r973+/QgiqVqv65V/+ZbXbbX34wx/Wiy++qOeee07Xrl3TF77wBVWrVb3zne/Uzs6ONjY29O53v1ulUkn9fl+f+tSndPnyZX3Xd32XNjc3tb+/r8PDQ5XLZe3t7Smfz2t5eVmf//zndf369dOemjcCXb3fF0/alL0h6ZK9vzj8LKOHpN3dXa2vr+vg4EC3b9/WwsKCisWipGNNmSSJ5ufnlc/n1Wq1dP36dS0tLSmXy+nmzZt66qmndOvWrahFO52OyuWySqWS9vb2tLOzo/39ffX7fc3Pz6vX6+lrX/uaFhcXtby8rHq9rq2tLdVqNdVqNc3NzalcLp/mlLwp6EmDPwUdgz/frWOG/C1Jfz5Jkt+/z/GZxnwAhRBUKBQUQlAul1OhUFCn01Gn01GlUokmZ6lU0tHRkSTp/PnzunbtmvL5vEII8TyDwUDdblezs7M6PDyMpm+pVFKxWFSpVNLm5qZyuZzy+Xw0gXu9nkIISpJElUpFBwcHmZn72uizSZJ8872+eKKMKUkhhA9I+glJeUn/IkmSv/cqx2aMmdGbmSaHMV8PZYyZ0Zuc7suYWebPXZRTNi0ZnTZNHCp7epRTae5tmn/nfyeFoN0vf1pHO1+SlCntjJ48ZYw5pELtkp760L/SW9/zdh10C7r2Dd+nVz75YXX3Xj7toWV0Bimz2YY0ff79qpx/Th/5owP94Hf0VF15l6YvfvtpDyujM0oZYw6pf9RU6PfU6UpJkqiY66h/uHvaw8rojFJmyg6pffM3tP25/1PP5/+Ueipo63P/Ua1r/+W0h5XRGaUsXOLXy9dUnn+7ciHoYOdFJb3Wk7x8RmeP7hsuyTSmUdJv63Dzc6c9jIwyynzMjDKaRMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkh2bMEMKlEMKvhRC+FEL4/RDCjw4/Xwgh/EoI4avDv/PDz0MI4R+HEF4KIXwhhPC+x3UTGWX0ZqNH0Zg9Sf9zkiTvkvRtkv5KCOFdkj4m6VeTJHmbpF8dvpekPynpbcPXRyX91CNcO6OM3tT00IyZJMmtJEl+Z/h/U9IfSLog6UOSPj487OOSvm/4/4ck/WxyTL8haS6EcO5hr59RRm9meiw+ZgjhaUnfJOkFSatJktwafnVb0urw/wuSrtnPrg8/yyijjFL0yHuXhBCmJf0HSX8tSZK9EEL8LkmS5PVuDBRC+KiOTd2MMjqz9EgaM4RQ1DFT/pskSX5h+PEdTNTh3/Xh5zckXbKfXxx+NkZJkjyfJMk3328XpIwyOgv0KKhskPQzkv4gSZJ/aF99WtJHhv9/RNKn7PO/OERnv01Sw0zejDLKyClJkod6SfoOSYmkL0j6/PD1AUmLOkZjvyrpP0laGB4fJP1TSV+T9EVJ3/warpFkr+z1Jn799v3WfrZxbUYZnR7dd+PaLPMno4wmkDLGzCijCaSJ3uq9UChobm7uiV4zhKBcblxeJUkiwkD3M/1DCGPfpd/7Oe7123sdw/sHXTtNSZLEe3itv0mP9/Uew+f3u8f7nc9/n77Pe71PX/+1jPte14MGg4EGg8FrHvPjol6vp93d3ft+P9GM+aDBP26qVCpaXV3V4uKiFhcXVavVdHBwoIODAyVJosFgoP39ffV6PUmKi/9ezAxD8btcLqdCoTB23GAwUAhB+Xxeg8FA/X4/MhW/LxaLyufzcVGxkFjInLPX642BB/67fr+vbrerfr+vwWCgfD6vQqGgJEniNQuFQhxHr9eLY+PeuBdnAsZRKBTi+CHGk8/nVSwW430zJs7r1+acvV4vjtWPk5QGB8fmhvvpdDpxfvL5vMrlsgqFgnq9nnK5nGZmZlQsFtVqtbSxsaFr165pfX19bPwnTQ8SJhPNmJIiEzwpKpfLCiGMLQoWZS6XU6lUiuPq9/txkSF1OY6Fz+cwCIumUCjEBelSn/e+gAuFQvyf88AU6YXpzJieOxb4YDBQt9uNnxeLRRUKhcik+XxepVIpnpdx9/t95fN55fN55XI5DQYDdToddTqdeF0YkO/7/f6YAGNeYZxcLqdutxsZ2S0EZ3xn/lwuN3ZOngPvi8VivA8EA58zB91uV0mSaGZmRtPT07pz584TX2uvRhPPmE+KQgiqVCqq1+s6d+6cpqen40I6ODhQt9sdY7J8Ph8/Y2GzYGAkjocBkyRRr9eLixwG6/V68fpojlKpdE8tzPH8hr/SiBlyudyYgHEhUigcP3LGAqMwFklj2hINzXkh7sfv281CPnOB40IE7VUqlSIjufBBwzvBbGkNzu/5jZu+zBFUKpXGxgLDThpljDmkfD6vmZkZlctlVSoVzc7Oji2iJEnU6XTisdJIu+ZyORWLxchMabPLtUSpVFK5XI7MIilqYRZ2sViM/7Pw0R6+4N0vc20kKS5QZxY3C2FSZzwWqWt/jiuXy9FU9PcwN+NjfjCf8/m8arVa1FbcC4yJRnOBgxApl8tjc+NaluM7nU68Z+7n6OgoamHum3MhDMrlchx3pVJRqVSKz3cSKGNMIzSNJLXbbUnS/v6+Dg8PxxZkPp/X/v6+2u32mMaBkOKuOdE2nU5n7D3H8Hv38TiGhQQjhBBUKpWiWYfZyaJGI7i5y7h8cSNQnKnL5fKYj9vpdHR0dBSFBMQ5+ev3nhYg3CPfu5bye3czGaaFwTmWMcOcmOCuRavV6phFwt9cLqdqtTrG7J1OZww3mBTKGHNI5XJZ09PTYxqzWCzGz9rtdmSKEIKmpqaiyeV+JQwpjfxFTFeYpFarqVarqdvtRp9TUlxER0dH6vV6KhQK0fRi0aGZWXhJksQxA3DA2Gg4X5jOlNIIhHAzFaZg4buv679xa4Dfu2ZzwAmBJikyW9p89ev7/4yb95itzB/n5OXn8nN3u13lcrk4b+12W1euXNHLL7+sw8PDx7KOHhdljDkktAULCEabnp6ODNHpdOJig3Gq1aokRX+TBQcayAuzDiCjXC6rWq1GaY2mgfndTPXFCdM4Yzk6i8/GGGEGX9yO/KZNWWmktbkPgCw/D4znJjYMlwavEBb8HsJPR2O6GYuQctAGpsRUdWZMp7QxLub08PBQhUJB9Xpdc3NzSpJEW1tb2tjYmDhtKWWMGWl/f1+3bt2KYZLDw8MxJimXy3HBDwYD1ev1sUXAwi6VSnHxeFigUqmMoaUsfExkzKv9/f0xDeQoaT6fV6/Xi8zP733Re0gmrV1Z2B6OSMcMHcRhDiD3JQG0uJbf19HRkfL5vKrVapyzw8PDOG5nJCwGxuhmqmt+6dikL5VKUQAeHR1F60JSvHe+g+Fh6mq1qsFgoHa7rf39fbVarbvu8UkQwvnVtHTGmEMaDAZaX1/Xiy++qF6vp5WVleh/edgELYnGIFzg8D1axkMGXMMlvINCjvw6UMQDbLfb8XOAE0mRGVyToiUPDg60t7cXNRkvSRFw4ZqERCRFAYD564IBZvLwBkLl8PAwzodrce6VcbXbbbXb7XgcjOn3nhYY0jFjVqvVqGk5RxoI43cOmuFiYPUcHh5GpnxQTPFx0+zsrBYXF/XVr371vsdkjGnU6XR08+ZN9Xo9NZtNTU1NqVqtanZ2Ni4I6ZgZisWiut2uGo2GWq1WfLgeOnFTUTo2dzFd3WwE3HFzGe3qzMr/Du5g1nkMs1wuq1araTAYqNFo6ODgQNLxwna/Dy0mKTIcCxktks/nValU4otr4G92u121Wq04Lx6nRbu7me/CqVwuR1/d59AFhINDjCudsOBaOB3mQbAw361WK87Z0dGRDg8Pnzhjbm9va3t7+1WPyRjTCBN0Y2ND3W5XlUpF1WpVU1NTEfnELMW3bLVa2t7e1uHhYZToaDpfJEjog4ODMVMOBqhUKmOIqAfv7wW8SIqIqS/ctIDAtGTR+jXT2tvJUeTDw0PV63XVajVNTU1FcxDGhuEwb0ulUowDM752u61mszkWZ5RGYBFCgc98HH5ez05yEIlj3XIpFAqanZ2NoS+EEuh72jWYJMoY0wip2ul0tL29rWKxGBc4Ehgwh+/QGGS/TE1NaXp6WpVKRdKxFmFRYnY6QCIpahgW9PT0tDqdjnZ3d9XtdsfGkAY9JMXFRuiAa8Dk7qt5uCRtAjrayvgITwBWlctlNRqNMWZPMzgMJI1imsyvAzRYB1CaOdKakPc+fw5S4cdj2s/Ozmpubi7GLB2U6nQ60RR2QTkplDGmEeEO94+Ojo7UarXuCtJ7WhoaI5fLRWChWq3q4OBA7XZ7LGvIfSeX9CCbBwcHOjo6iovMU98wXT1+CgN4LJLF58AJxzAGTOL0MRxXLBY1NTWlQqGgqakpTU1N3WU2E8v10Ick7e3t6datW3f5iffy516NGVyIVKtV5XI57e/vR2GBycv8YR5LozTCdrutg4ODsdhoCCH630dHRxFRniTKCqWHhIlKeMS1zf7+flwEaZPPA+pTU1NRisN46Xidm2JpE4zEc2mUeFAoFHR0dBRNYBcQkGsij9s5WuxhCg9xpM1YBI6HfBgX5qQk7e7uan9/P14/Dd74/KQ1cnrcr0bMWbVaVaFQ0P7+/l25wmRgkbSA+e7+fT6fj9ZIkiRqNptjiQWnxAf3LZTONKZG5WVLS0vRl5yenlYul9POzo7W19fHNF/a5GOB7e/vjyVNu98kjcAMUN2Dg4MYa6xWq9F3zeVyEUr3hAVQTzcH0+ijM0F6sRE68TH5+PkdCxoTj7GgPWESzwRyU9Ov6yazU5op04LG7wEwLh3PlaRaraaVlRUVCgU1m03t7Ozo6OhIkmK4xgUhlUJJkqhSqYwl6E+SkjqzjAmDAKGTo7q8vBz9w06no8XFRdXrdXU6HbVarWj+AH6gedAm8/PzqlQqY4BCGsTBTC6Xy1ETEgQnQcDBimKxqKWlJR0dHWl3dzcySjp4z2fSCDRxbYo/h7/qjO0Akp8HwQGwBaCE6c4Y3YTm+pjTfr57PYf03zSA5daGMz/X2NzcHEOX/X494YK5LJVKmp2djch1q9XS7u6u2u32EzNpH2QtnFnGzOVyunDhgt7+9rer1WrpxRdf1Ne//nVtbW3pwoULWlpaiouw3++P+X6k1O3s7Ojg4CBqmHw+r7m5Oc3Pz48F/fE1Dw4OIuDhOaiYt8QrQXXRoJiR1WpVSZJEPwqw5+joKMYWHYQBVKpUKnFxur/rGUfSvQu20VhoHfddPWzjjJ6u1sDE5Tuu51qKv16/CSA2NTUVEXNANo/VMnbmCH+Y62Gel0ol1et1zczMRCHTarXU7/e1v78fTfOTppWVFa2trekLX/jCfY85s4w5GAzUbDY1Ozur9773vXr22Wf1pS99SZubm9rZ2dHe3l5ctK1WS81mMyKdkmKiQbVaVbvdVpIkqtVqEYaH8Xq9nlqtVgRsMJ0II7imYTGywGu1mubm5sZCLmgvGB+NABDjWllSZFyS29N+rzQOHLkvCrnJ7hrWmT+Xy0XBAANjSTiK7ah2OqGCtEWSFmZmZlSpVKIp2+v1xsrher2earVaNOVhPp4PAnBqakqLi4uam5tTvV6PwFqn04nCj+oTNPJJ0vr6utbX11/1mDPLmMD1Ozs7Ojw81Dvf+U49++yzunbtmq5evarNzU3t7u5GjegxMpiL/Eu0x8HBgV5++eUoqWdmZiQd+4YwIPFMFp/7fC7VCbmUy+V4rU6no2KxqEqlEgEhxkcIwTUkYRPQVMg1iYc80torXREjjSpA0hUrHvIpl8uanZ3VzMyMarVa/Kxer0dN5RUeCBcHubgXtDogHOVchKg8lOLJEcx3pVLR8vKylpaWVK1WY5LHYDDQ7OysJGl6ejoKvP39fW1tbZ16/uyZZUxoMBhEM/Xy5ctaXl7W1NSUXnrppejH4B+SuN7tduMCSi9a97FardZY7ieLl+SEfD4fmQZtiUYplUpaWFjQ7Oysms1mZBYQR/xWSarX65ERuUav19PBwcFYWIexeVwRTcd7Z1CQ6qmpqbH7Y14YL3HLYrGoarUauwJ4vWpaIxUKBc3MzERzHBO1UqlEs3lvby9aI4wDjZnW3G4uM34sB1wDLJa5ublYewuzS4pZXJMQOjmzjIn5BZJHJcnKyore8573qFgs6vDwMPqALHQHa/w8+HjSeFwN7Zh29AFOYKRyuRxNNxIN5ufntbS0pNXVVc3Pz0efFuBpfn4+MgPF15wbMIlF5/4hfi73xngRMggPfFlPXcNyKBQK0dSenp4eA2pcC5P80O/3ozkKw4Nicyw+uDRCiykqIPPIzWPmHVPdM5oQYOkcYQTs5uam9vf3tbu7q93dXe3t7Smfz6ter6vZbJ56GdiZZcx6va53vvOdWlpaisDC3t6eqtWq5ubm9Nxzz6nf7+t3fud3tL6+HhciDApT4Tu51PaSJ+lukMXjhK5p0Aj4X9evX9f29nZc+IuLi9G/7Pf7KpVKqtVqmp6elqQxs9UFRdrvY2z4rc1mM2ongvTpOkxMa2KBnjcLcx8dHcU4o1/DhZIXZsPkCAS0bbVajYxIRUk6nOO1rLgTPMNutxvHj7mOcKHw3QUun2EGZxrzlAng55lnnpGkaKrip73jHe+QJP3u7/6urly5om63G8GdEMJYNYUvQK+kQAPBjBRfe2oYDO1ZRN1uV9VqNVbYT09Pq1araXl5WfV6PTI0+assMkI5LDYWGosY367dbseypxDCGHKLaV6pVKKfODc3FwEU0uxAa2ECwg6ONAM6eSMsTFvyWCXFmC7fYfqSYNFsNuM9IABgTn7rYRaYEv88hBBjsI1GQ5ubmxGcI9mAWHXGmKdI+/v72t7eVrvdVqFQ0Llz51StVuNig9BspVJJV69eVavVig857Y9B7gOhATENCbeEEOKCY0HyYkFWq1UtLS1peXk5+kTT09Oq1+tRi+CLhRBi+xHMSRaa59keHh7Ghd1qtaL5SNXG1NRUZCJivXNzc9FkdQHAfVFVghanTjLt77k/SrICgm5qaipqL4QJvjVgj/t/mKeunZmzWq2mTqcTAbx0FhBuxNbWVsz77fV6EZxqt9va3Nw81YSDM8uYSMo7d+7oxRdfVLPZ1NraWoxt0UpEki5evBh9wJdeekm7u7tKkiQuLHwkZ2oPYtNOBGDJJb13N8C3XFhY0NLSkhYWFiIYAura6XR0586dsXpCSdH8BLVsNptqtVoxqZ7kBF+kVLt4pg8MygvN1Gq1dOfOnTHUs1qtanp6Wv1+P6Kk1FrCMG7yknPLmPA1XWtxPZiaa8zPz2tqaiq6ESTREw7CbWi1Wrp9+/ZdWpb7oxCBeZudndWFCxf01re+VefPn1en09HnP/95feYznxlDsh8nLS4uamlpSV/5ylfue8yZZsw7d+7o1q1bkVkajUb0a0hFA1woFot65plnVK1Wde3aNe3t7cXaPmm8kRS+GFpmdnZW9Xo9mm1UmxCqoS4Rbbq0tKTFxUWFELS+vq7Nzc2o2TxLiURuNKM00iReCdNqtSJzeWWIL1YAHRBnhATIKckPmJo0vLp9+3b0T93EJGZJNwO0tscz+/1+zGRqNBpxPrwImznB58SfRKhQYOBdIRBgvLB6SHwgb/bZZ5/V29/+dn3DN3yDarWa1tfXde3atSg0TooajUZs9nY/OrOMycOlA/fMzEzM2llYWNDCwkJE/vCNarWaZmdntba2FpkBlNBfLHKQSv6XFNPrMDWl8WLfUqmkSqWiRqOh27dva3t7W41GI2oTB5K8RQiaEp8PBqCTPb4WggSTEF/Y47KUS507d07Ly8sx/re3txfHgCuwt7cXhVer1Yp+OjFYzON6va7V1dXIYOl2mMwZcy2NUgcPDw9jXitoM/4xrTE9+ygNtCFIMHNnZmbia3Z2Vv1+X3fu3NHe3l5MRjhJxkynUd6LzixjShoraO50Omo0GjEnFu3pSdvE02ZnZ7W/v69msxkXhGfTkMUCQuoF0mgLFi6dAQgXHBwcRPSQgLtnDsFInoaHZidYDyM5akmOKBqQe2cBExpBSy0sLGhqakrtdlu3bt1So9GIYQQPP6SzgDgHGmp+fl5ra2taWFiI4R3Maq/sAPXO5/OxsJlw02AwiC1SPG8Wf9hNcXxjhGK1WtX8/Lzm5+djgQCvdrut3d3dscSOra0tbW5unjoAdKYZEz8E4GBqairG+I6OjjQ1NRWZE+k6PT0dH/DBwUEMNWxubkYpDkOyYNykgklAJ0FJPUUPbY7k7/f7UUAkSRIRRK7nfi2IJNoUtLfX62lqakrz8/MqFAoxqYLY7crKSkR79/b2dOfOHV29ejUKlf39/WhOpytUMFEx15eXl3XhwgUtLy9rbm4utg9By66vr+vOnTvRj3ft7SEmN0cHg0F8DtIozukhIISspKglAaN2d3fVarViFpKkKGRAmR3NPm16ZMYMIeQl/bakG0mSfDCE8BZJn9DxztKflfSDSZJ0QghlST8r6f2StiT9QJIkVx71+g9LPECKjz1ZvdlsRsQRDTc/P69z585FfxNtR+c1AIW9vT2tr69rY2MjwvgAIvheIJMgtfh1VHKQjeQBfTJg8PVoAJaulXQt4yYdKCcMRd4t9725uRnnhrQ1GBHGhpFgAMzEmZkZnTt3Tm95y1v0zDPPaHFxMWpjTODbt29ra2tL7XY7Ztig9R3d5jqeGOF5wZj6ZALNzs6OZfV4XSg+PmEerARJ8RqY8ISSCoXCiZuyr4Ueh8b8UUl/IGl2+P7HJf2jJEk+EUL4aUk/Iumnhn93kiR5awjhw8PjfuAxXP+hibxTJCamIg+s3+/H9Lm9vT31ej3dunXrLoCHWB8LmIWLGYp5ygKE6R1VxST0PNw0XO+ZNfzvwA0MDdKKOcd5YH7MaLYFuHnz5ljWD6VwHO9mKgt2ZmZGFy5c0FNPPaVnn31WFy5ciGEchB3xQl7kHsP0+OjT09Ox/Ir7JvMKgQTQxTiwauhowPxgul68eDGmWJK0TgJCo9GIc5/L5SK+AJNjKp8mPRJjhhAuSvpTkv6epL8RjtXQd0n688NDPi7p7+iYMT80/F+SPinpn4QQQnJKwSL8LKB5mMoTo0l/w68EnGGRgO7xOcnn0sjPglloKOW7YzkA4jFP9yM9Q8jzWkFcCa9ghjrai/+J9pU01g8IAMkzhbx8i+sSI8Wsn5+f1+XLl/XUU09pZWVFs7OzUfOjzUk22NjY0M7OTgzfENqYn5+PDbs4luwcwjHScTjDTU3KvLxHEmhtqVTShQsX9Nxzz+ny5cuq1Wo6OjrS5uZm1Na4KtzrzMyMFhcXtbKyoqmpqWjtvNF9zJ+Q9DclzQzfL0raTZIEyOm6pAvD/y9IuiZJSZL0QgiN4fEjG0pSCOGjkj76iON6IBWLxVgBwYMnMI7mhHlg4EajEc0n7y1DgTMm3cLCgi5evKirV6+OlYbh1wHukCvqFfbDOYh+IhqEdDXyRb1BFuAVjFqpVGJIBpMQf3cwGIylzTnqTJaOJwGAsC4tLenSpUtaXV2N8VmulSSJ9vb2xmKRzWZTjUYj+naDwSCeZ2ZmJv5mc3MzotPEKl1rgspieqPViYXOzs7G2Ojq6qouXbqk+fl5DQYD3b59OyLbrVYrJlXgjgAKlUol7e7u6urVq9ra2lKz2TzRtUc976uVfj00Y4YQPihpPUmSz4YQ/tjDnidNSZI8L+n54TVOTJtibhGzI8NFUkxrgzkcXACyR6t5NgoZMefPn495uLOzs7p69apu3Lih9fV1tVqtWMrlyCnaNF2fiPYCRALZ5f3R0ZFmZ2ejedrtdqPWBEX1ygvMZPdxqT4BSSWRfmZmRqurqzp//nxkSPfnms1m7OSHIACYIqxSKpV08eLFaFI2Gg2tr6+r0Wio3+9HPz1JRk2vuV98WubC+yoRhiHxn8SQ69evR0GAFuYF2FMqlbSysqLLly9rfn4+duE/PDxUsVjUwsLCifqYCMRXo0fRmH9E0p8OIXxAUkXHPuZPSpoLIRSGWvOipBvD429IuiTpegihIKmuYxDoVAjggwcFeZK1mzOYuSB9lIJ5apgjjCsrK1pYWNA73vGOseLfRqMRAY12ux0TFUgCQAgwRk+G9zaLaO7Z2dmo0a9duxbTDPFXHVhBADEWz0dl0S8sLGhxcVELCwtRQxPiYJykEUojoMh7IvEdiQGlUkk7OztjLTy8vy5am/gknzGn3uaSUIik2HEAlFxSLDjgnrAQ8EUXFhZ0/vx5Pf3001pZWdFgMIh5s8RdaWB9UsQ8vho9NGMmSfJjkn5MkoYa839JkuQvhBD+vaTv1zEy+xFJnxr+5NPD9/91+P1nTsu/DCFodXU1akiQUhDLmZkZzc/PR80J4yG5O51OLBOiATJS3HMy19bWtLy8HJHL69evR62J5pyZmYkxVLJgYHpqDwGb0G7SSOPv7OzEMZEDKo227gP8cfOPTCP8M9p3gGLyG5hiZ2dHu7u7sau7h4Tw15zpMYtBez2nFY2Ouep+rMdGMdcRGJjUjDfd98dRaCwLzkmLynq9rkuXLun8+fMqlUq6detW1PKETYrF4phPflp0EnHMvyXpEyGEvyvpc5J+Zvj5z0j61yGElyRtS/rwCVz7NVEIQe9617u0uroaQQmkPnmwLApAEfw5z+yRFBMBMDtZRGhGYP0LFy7E33iFidcjYraxqDF5qKhoNptj5qkDIJSGObOwuEErZ2dnNT8/H2Oy8/PzqtVq0YxEKOzu7kb/lJQ/ENODg4MILqU1Hoi0NL49vB9DDvHMzEwMf3CP3p8Xf5n+OBQNMBZPAySJgzpV5jGXy0UXpV6vR58yn89rZ2cnChrAJgrCvTD9tOixMGaSJP9Z0n8e/v91Sd96j2MOJf2Zx3G9R6VcLqf3vOc9+pZv+ZZYSXDz5k1dvXpV169fj3A60hh/ErOSxeZmJIncLJBGoxGTr1dWVrS8vKynnnpK1WpVt2/f1sbGxtgeHTC+Z6lQ8gWaiFZgXJ45hD/syCrmOIzqneWJjbKY8QkRFHNzc5HZms1m9K8PDw8jY5NKKCm238TvwxdkPlyQAGQhSEgTJPkCa4H0OboztNttbW9vx5I7b6sCuISAQrAiBBkz4S/G4V0fPESD+3BadGYzfwaDgWq1mtbW1vTcc8/FjJdXXnlFX/7yl3XlypWxFDTPbfQCYYLRaEwYAqYqlUrRhwGNu3z5sgqFgtbX16OW8S3mCAlwLm85gl9I9hB+Leak+8hejHx0dKStra1YIubhob29vdiGpF6va3l5OYYyYLyZmRktLCxE8xCrAhSXzCQv7cLs9tQ7QDQP1yBsYA78S6wVzPyNjQ3dunUrzicCslKpaG1tLSYRgB2Q+FGr1WIOtFsVHitmLM1mM8Y0eQ6nQWeSMZMkUaPR0I0bN6J0J0H9fe97n971rnfp5s2bunXrlnZ3d2PXPMw3AAiKi723jccb0V4AO91uN7YKWVhYiI2kYUBJMWaaJIl2d3e1vb0d98z0RGx8W0/lkxRNT28YTfvHra2t2GjKgaUQQkyno4MAmhUAh3tjrG6GO+FDShozybE6AGQAuvB3vb1mp9OJLgaxZBLo9/b2YuI8zwG0udlsjlkeJIAsLS1pZWUlhmlAk3meoMp7e3txvtMtOJ80nVnGvH79egxnYN54+w0kKTGy5eXlqDm9JyzAAdoBAIGFSEYR+ambm5vqdDqq1WoRASUlr9FoqNFoaGtrSwcHB5ExWegAOmgBGCVJRhvrevyTBAXCOF4WRY3o/Px8ZEjKnfD3nIGZN0IPbiITA063W+HaMBDADJqc+4FZPeuJQoJmsxnBGIqq0YyYzXNzc9EaAakm5OPtKRFajOnw8DDOL0xKiiW+8uOgfD6vb//2b9cLL7wQAS+sqvvRmWTMEIK2trb0xS9+UZKi1vM0N0lRI8Jk+EMscnxD0vgAZUAPQTdpZYk22tvbi/m3gBEE0gEk2PRmb29POzs7MTRABpHvqenJAOmsHcZChQfIKcSWEDAj9wKo5a1JEEggmYSGYCzmBJ+OJIxut6vt7e3YTAzT3FuAINgQjDAw7U0cRQZBrtfrsVgdE5rniDVBsgPam1xoz8etVCrRykFLP6gs6/XQYHDcKpMY6dLSkvb397NC6TSVy+XYSBlN5R0BAFbwNbwHK7WI1B4i+VlcDmiQhwvIgFRGe4YQoqQHPSyVSmo2m7p165b29/fHajHxqWAkzFQHXQBW5ubmYl0pqYJkOAFwwHzecwitxj0Sc/SSNFBsTFqAH+mYsUhwID7spW8IPTQHTOoVPMSKEUCM31uegDRTkuddGfAxeWFVgCiDOHsKIfdN17zH2b0gSRL9+q//ug4ODnTt2jVdu3btgb85k4xZLBb19NNPx8A35iX+Ew+SHa1ICEfKe10kmgIfjEWBbwW6SuCcLBdJscaQEAaAC4XBZMsQQ3TGZFHjk5IUUK/XY7dxzFKYC+aDUQiPSIpMhE+HP0ehNsgt2kdSNKkRTmhf/G9JY/FB5pZjKUrGHwTJxdTFzwVB5fz47ySkw9wgx5jw3oUPrACUG+SVwne6Mezs7MQwz+Ok15vmdyYZU1LM21xeXtalS5fiYkRLoN1gOjdzvSDXzUavwvD8VxZTs9mMrTYwg9GG9Xo9dgvH98Vs3N7ejr8j7oe2xKzDN8LcpIUI2tCLmRm3j5fv6epAjqm3tYTp0GZkH5E3S3iDNpykyqGlDw4OVCwWYycDGBrwxjvUMUauJykCPpKiBdLr9WI7lnq9PrZVRAghhljo/+M9cwk19XrHfWZpk8KcnSadScZMkiRKTzqCz8/Pa3V1NYIULFpADi83Si9yz5JB26JB8UE9lMJv6ZjOItva2opmLaGDcrkcm3iRsI1m9nIy0FAWIXt6YgLD1Ji6dDLAnyQbaXNzUxsbG1HDI5i8PQdxRadyuazFxUWtrq5qamoqCi4ECZqPeeC6jUZjTNAQMsJc9QQMwidYIGh75oDyNWnUrgXzlXlgrr00DYFULpe1sLAQc3hPk84kY3pXN7RZv98f28OChSSNSrhgQgdLHMnlxTWkUWI75hXmmaRoirKQaZjFAvFyLGD+o6Oj6FORe7qzszOWtI7w8MyXWq0m6bhAeGNjQ+vr63ehwSxetCO9ejy2CJMhPGZnZ7WwsKDV1VWtrq7GihMPuSCQDg8P4/6VXknjaDcxTsbmSfHpShnvq7u9vR2fEZgAzzVJkthJAe2N0CKsRFvQRqOh7e3tN0fmzxuNer2eXnnllTH/iE7odLQD7fMQhAfKPdWMtDjvIRRCiKYclReYXqClniiAX9Pv97W+vq5ms6mlpSXNzc3F1hggitKx5oYpASwckAHNBEyhKx3adXt7WxsbGzFGChPjuzpSjZYhTQ6zFabAX3RTH/OX88CYDv541wE3+6XRPiqSounsWtSZzzN3POHBt9wrlUo6ODjQ9evXY/vPVqsVUWSY1oXmadKZZMzBYBALZ0H7Go2GJMWkaaoiJI09fMw5mIkgfKPRiMgsjE58lJxO9ztZQJh6LFK+QzMCpNAkGs1GKiGmGCVanoDAeTBVvT/snTt3Ys4r/Xro+4MWQ3POzMzo6aefjuVfdBzAF0cIwQD4vviPCAOycohPwrjSqP8OHQVWVlZiHi8am/hpsViMBdqYxC5YEC6DwSCa94B8VMcQuikWi9rb24s+Mc/vzdBa5A1JSFsYkjigpPjg3Ox0SQ4jsWhIkMYvcW0qjUwvruFxUpLepVFVBb/H75ydndXKyooWFxdVLpe1tbWlbrcbGdczf6hYoT4RJnBzFaDFgRW0HIKgUCjEkAuIsbexBH1GOztTePYRPh9m5/T0dBQQCCxS4+jOR1cIz85BQ+O77u3taXd3N95LCEELCwtaWVmJ42w0Grp27Zpu3LgRu67jwqT7JXFe2r6cpMZcXl7W6uqqfu/3fu++x5xJxsTUZHHxQIgJAlIQOsAX9ZQ7Z8ydnR2tr6/HzgZoP/dN8c28kRVmKQyC7+oFy4Rb6PlKiRihAhYa6X9ofDr+0UGA+8AkZ2H6dej9CspLHNcT98n+8fYfMBc+KIIBM9FzZtGeXiuKSe/br7uVwZxhFZCOyDygUelXS7zw6tWrunnzZuyS4PnDlUpFc3NzcS8YXIpu93jrhJOkjY2NseZn96Izy5hs/oqvA/ggKaaAEeIg8RtAA0YiTujtDvGzCJp7jinMg9ZEMEijagc0GIsE5NVjnB5awGyTFEMSZA2RyYK/52gwRdHsS7K8vKxz585paWkpakxpfI8Q/keo4fMyXzC5J9yTbeNhGgQQ2UGk0GFJYM0wF4BMzEGn01GpVNLq6qrW1taiAO12u3rllVe0vr4e45GVSiV27eM5OpBHBwuSL0iJfJyZP/eiB4FLZ5YxCRU4cieNevgcHBxErbK4uDgWHCf+5TmhaDo0LulhMJgHrN3HRNoTCO92u9E/g3G99480asSFZj08PIyJ3by4J8YPoIWgwGSDKc+fP6+lpSUNBoOYBojfR3yU/FuEGDFCTGCAFnxUGM0rZDAdAdsQZAgQ74KABi6Xy9F0deFF5haCEVAOTe2ZVqQlMn7m4ejoSLdv3x5rOrawsKALFy7o61//+pNclmN0JhmzUCjEImmqEtAIoIiAP57B4i0eqbrAH/KeNOwaRVjG/VjvqSMpxtLwe9j1yrdr9/DOYDDqto5mw6/EjHNturCwoHPnzsWiaAAOaVT9kcvl4r6S+KweZD84OIgMiCYhNZGMoCRJtL29rX5/tM0DlgVgFKZmtVqNJVhUeiBQyFMlvoxAoaxMOsYAtra2onnPfRGO8eRwTFbSIj304hssFQqFOEckfJwmnUnGpJofTUE5FFqHBeo1gkh3JO309LSWlpZilzXMWtLuiIlijiKp6W1KPA9Nmq7JJAMFZkTDYSoSAvHu497nB3M1vRWDJ7qj2SRFNLLf78cEBfzRbrcbtb/nzrp/TnKFCw1vr7m4uBjbliCcyHqi7tHBNlISz58/P9aB3pFkR05huH6/P9akiznf3t6OSfGrq6uxUNzbWdIZwQsZTovOJGNSWV8qlbS8vKyLFy/GDJHNzc3YbMoBCRYC/iL/g44S80Szbm1txS58AEFeLuZ+IVrQi3gxWxEOmK++bYNXmgCM0F8WrUWGC74p7Um8phJEltALPipaF81H/NKTx70bAtunu4BaXFyMXQigbrcbex+B7DpKignabre1tbU1FoahzYg3epZG5j1CCr8RS4Mx4X4gWDx7CiEjKWPM0yCC3Sze+fl5Xbp0Kfbq2djYiCEJDwW42cnnvV4vorKkjVGbiC/j5i1b/oFqIrHpIADDEu9kfNQZEr+DiWGEer0ewRxMa1p2EHMFxUxXleCbgWzC3LS5JLuHlic0xPLc3V6vF+tH8fmwMCTFrvRYADR+JgECpNvzYWFgEFnuHS2KUGDeSLrY3NyMZrWkeGyr1dL6+nq0jDyTyU1l8IXTpDPJmCCpMOje3p5u3boV07ZqtVqMg3lME1MQhuh0OtEPhBl5oN4DFvOVRQdk72YgviQASdpHq9frmpubU7FYvKsWksqNvb29aKZ6FhDoo5cyYbJ6cy9PTAdRBbn2LCXG3u/343UwUefm5sZqHSXFag7QVLb5q9frsYCZfUCZI9BT5pxre/0mTEXsFxfBQ0Q+DwgQUHOvwCGE4rHNkyLanHzta1+77zFnkjFZQJhL+BbsmEyKGA/Rkwu8ugJN51X5SH0+QxsQMGfBes9VAAh8TO/jA1qLOYoWckbmmHa7HatcMLFZdN5Fz01a31YvXTPKWMlZ3dzc1Pz8fKyCYX4YNxYDgsPjszA9KX1oZElRw9NzCHOTY72h1+7urra2tiKyTsogoBHJ/OQVc18wHffHM0BT0hGCTWWxNk6CmPdXozPJmNPT0/rGb/xG1ev1iNQRImm1WhHZ87xO7yQHSigphlGQ3jAUlSUeIqDK/l6FzrxHo3I9FhP+Fggkkh2mpQk1PVTRpIQbYATM2FwuF81fTHT8NnxcBAYJ7Zit1GVyn5SelUqlGEIBvPIuC/iiFArTm4cwzvLycuy+BzB08+bNWHrG3NBeslQqxSQIip65PypbELStVivOCy4EaZlYPWRIsQZOijzv+n50JhmzXC5rbW1Nly9fjmYbDwwEj6JZmJFsHh7a3t5elKrei4eF6EnV6cwZXkh9zCkPX/iOXOS9Sororqeoecc376IHWIRQmJ+fj9dFoxCi4IXQIJyDj7ywsKC1tbUY4gAZJV2PsitqGkGciUlinpLTyp4x+ObEENHsHjqhOgdfcm1tTefOnVOxWBxD0wuFgur1+lipF2Eo5gAgifPu7u7GLoEOiGVJ7KdAmFy+PwnV+PV6XRcvXtQ73vGOWBVCeKRUOt4EdX19Pe6yDHN63x3XimmT2AP1MAZxN9dQaE9JMXwDE+AbOXjjWTne5Io6y+3t7Xh/1WpVGxsbunPnTgQ66JKXbtgFYHV4eKiXX355LMZZLpdjaIV2HfwWU5S4aKfTiUJKUsxTBuCi0JvNZdGmmLUgrqDEaPl8Pq+LFy9G8548XO+QMD09PSYQd3d3deXKlQhUAWLhGzO3p0lnkjE7nY5u376tbrcbkwSICZJYDmhCMjdbHfT7/Ri0Zx8Oz1FlMdLvBzMUNNTrCz13FW1HAyu2Z6BUzGsbAWQ8bENMknImQCgWcrlcjguPZG8v8JY0ZvLirxYKhcgEHtdEC3oROIw8MzOj5eXl2HN3Y2MjlpjBiFgUpVIpxjlhTPxpEGkS0yVF83ZnZyf6pOQFuxuBLzw1NRVTB2mXsrW1FbeWACvADcAteNytRV4vnUnGlI79RhbB1tZWNCfz+XxcWPV6XYVCIZYLwbw80NXVVUkj89KZ2gt0l5eX4x4gGxsb2t7ejtrMgRn8VAeSWGyEN2A4NKWnvRH7RGt7TA4ghNjq8vKynn32WV26dCnWKBJzdb/UU9xIgaPzg/trpNIxJoRTv9+PPifmKPeFz4qL0Gw2x2KhlUpFN27ciG0p2VYBUI3qm42NjbFmavjgWAMkJhA3BWwC+GLMJPJXq1W98sorp7AqR3QmGRMGpK6QxchDR1PdvHkz+omYqTAQphwgEIzDghwMBndtOwCiSPNoAt8scnwcJLY3bUZTpiv0uQZj4nhHdR1NBoCRjs3JlZUVfeM3fqP29/d148YN3bhxQ9vb23HhAkCxF+XKyopKpdJYW01MaLSzpLhZLIANfqtXmqDxMY19/8okSWINKn47cVTQ8CRJYiohIRtPZfRNdAG3mCPm2vv0+vYKL7/88hNdk2k6k4x5eHio69evq91uR83iD8kBGaQwZg4SFnQUUIZjvNwLhuY9iwXf0ouDYUiyjEgrw2wknQyAyrcI4HzOHHQ3BwySRl0BuCfMXnJpz507F/ve7uzs6Pbt22o2m/F3VNpIGgvOk0/swX6ykgjdYBmw+KmUQbjwIk+WTY9WVlZ08eJFPfXUUzEtknvI5/NxAyQEBOa2pydiVXjuLWmZjInyNM79wgsvnNj6w29/tfKyM8mY/X5fd+7ciY2XAUWkUbMtzEIWFyZguvcscUoY2TsReAU/iwMQh2Rpck/Rtn4uFj5AlZutaEFMTg9PMCY0kF8fM5XtA0IIunnzZjRvn3766VhxgXbx/RzRTDMzM7GbH1k/HOPJCPTXJXGda8JQIKJYAyDA586di+l3uBOY+RR9eyOz7e3tMVPcM5Omp6e1tramt7/97VpbW4vle1gTLvxAuS9duhRN+cdNjOvV6EwypocQkPbprQ48P5XqDTqlA/rs7u7GxeslYd6jBrMU8IeAPAxPTinj4Ly+m7KjtbwYO9p+dnY2amvGAfJJwN/7wLK35v7+vqanp2OIwbsVnDt3TiGEWFkijfrPDgaDGO+dn5/XhQsX7todGt+TmktiraQ9rq+vR80Hs3DtVqulK1eujGlhrBlJ0VLBrPXQDIKUGCvMd/36de3s7GhxcVGLi4txrN4MDCFEMsdJ0IluXPtGJ0ANT4GjltAlsXdclzQW5oBxvKgajUQ2CTA/Gos0Ng9rSKPibGm08xTX8VYl9B0CpfUxSIrILdk78/PzMcsJ85ffEJRHa5fLZd2+fTsWF8NwxWIxbhvovipCgcQMSZEJSW9kLvf393Xnzh1tbGzEbCs3XRk/DIUAAqjhGWAGep7s0tKSpNHu1rgbMDJtRWZnZ3XhwgXNzMyo2WxGwUA4B6FLju5p0iMxZghhTtI/l/RuSYmkvyTpK5J+TtLTkq5I+rNJkuyEY/Hzk5I+IGlf0g8lSfI7j3L9hyWq6kFFCWi32+3ok3kYwuOSmJoAPOkYJdKWHFw656H5nEmJ+fn/XsXCiywk1wpoQS83I4APsEE1SKFQ0OHhYaycuVc3Avxmyrs2NzfV6/V0/vx5Xb58WfPz87FnLfcMcIWwQAj1escNlD2LiS4DaLdCoaClpaWxZmTMMbm7/NYrcLw9CHOKf8r94CsCInG9fD4fM5OwAhCwjlhLOtUt+KRH15g/Ken/TZLk+0MIJUlTkv5XSb+aJMnfDyF8TNLHdLzL9J+U9Lbh6w9J+qnh3ydOaB4S1T07h+8xn/AHyBih5Mk7z3neprc/dKnP/26aeqmRawHOKY2SDjCzGRMAB+8BfbwtJr6fxxjxPdEqWA0hhBiywBRGw7Lx7uzsrDY2NmLbEubK24iQUcS9cQ+egsjfubm5qNHwkb2dSxpwA1HGFGcDJklRAPrzQ0iQN4ww2tzcjOirm8J8hqA5TXpoxgwh1CX9UUk/JElJknQkdUIIH5L0x4aHfVzHO03/LUkfkvSzybHN8hshhLkQwrkkSW499Ogfkliw0t0PVBo1YvbuBZ7/igl5dHQUTV1ptABZFIQOvDrC0VukOSEJrunIJtf3tiHezCqttX3nKuKrXtAsjfrN8FvGzzhBdzE/2+22FhYWop9YqVTGmIV5A1FmvxPuE6FAn1dJsaLEq1A8RQ7TOJ2KSD4v1gmgmKcgpjOu+A7BhiXh1TJueZA1dJr0KBrzLZI2JP3LEMJ/I+mzkn5U0qox221Jq8P/L0jybY6uDz8bY8wQwkclffQRxvVAcuDAaxFJtGYhe4cBLz7GL/S4m9c1ekEu2gztQfAfrQdzwOBoQJIVJMXzURfa6XQimEQMz5MIvFyKLBrAE8zGNKCE+cfnZBvBxMR1QTx9XxJQVQQT15Q0tinwwsKCer2e7ty5E+8XoUPclKwoLycjGQSrwat4pNE2epzHM6G4NxgQH9krX7hHzttut7WzsxM/Pw16FMYsSHqfpL+aJMkLIYSf1LHZGilJkiSE8LruLkmS5yU9L0mv97evlbwnTaVSiWVI+I7SaOs475HqNYswJ7/hwRNKgSldO2GSDgaDmAaIv4kgIJGca8PA6dgoqW/e5Aq/ytuFYNq6X4sfC7DEuNBs6d5AmM1ujmO+o/G73a52dnYig7vfzh4lN2/eHBMIbJrEnNCc2rf885pSt0jS+bd8TwgkbSEAHmG68nwQQpLi3Ofz+de9O9frIZSAWwtpehTGvC7pepIkRGI/qWPGvIOJGkI4Jwl464akS/b7i8PPnjglSRKZ0XNPyTDBXIURmEiSz7e2tuICwfxECpME4OEQaVRz6EnnxWIxhi/SZqVXRFC+hLag/pFtEMiQYZHD4GgyLAL8JwegsATQbGymi0mIv+ftNwCjXIils5wcBXamxh8FBeV3rnl5j2nJPHqTMYSTZxsxz973lucnKY4dVNjNWQQAFs2rMc2jEpvXfvnLX77vMQ/NmEmS3A4hXAshvCNJkq9I+m5JXxq+PiLp7w//fmr4k09L+h9DCJ/QMejTOA3/Uho1reKB4kNi3nj1vi8s/C8PX/g5QCl9MbBIiDtiWjGOXO64nQdF2155AlPDNPifjKHX62ljY0O3b98eyyRCK0kjsx1NWS6X41jJNPI2HRQdc9+EL9gWEO1ISZYjpx7ecGRVGiUdEAtOV9UAzDB+tL2HS7yA3EvJmBMXhJwHdJoYLscRdsFM9pAYGMJJ0ebm5ok3fP6rkv7NEJH9uqQflpST9PMhhB+RdFXSnx0e+x91HCp5Scfhkh9+xGs/NPEwOp3OWPV6tzvehduhevfxPHncwQlMNEkxbOI7iiGhXaNS20jgHq2N+YVQcLMPXxb/N61ZiCVirlKaVq/XVavVYviC36IBmQOaIGMWApClwzcsfoLz6TQ4hA6BfE9F9BxfaVQ8TPgCpoNZ0ygvjMPz8Bgn84VPSuuVdLsVzHHi166JpdG2GKdBj8SYSZJ8XtI33+Or777HsYmkv/Io13tchEQnIWBubi7uC4LJ5B3u0HauCT2li4VEUgKfk9HjWUH8DgAIhoPJ0pUhVO+nW2ikc1I9iZ3z4AejnWgy5uEVR5j9HG4G0qwKnzLtu1LFgfnNHHIO/EH3Z/nrvYgQErxPAzMc4/dIGMsL0wGNJEWQCP+XkBdz77WrAE2Aa29U8OcNS5h1LErMMx4yUtWLfDFTWZAsGgcdAEyQ3vye7/EpPVGeCnvvIIckB/kk5pnP56NPSjUEAXm0EQtxc3Mzai/XRF6BASMyRg8rMAYXPGgkwg0wHfnDENoKU9gTALxHkgNIzoDuA6crUkiG8DS9UqkUtTAWjD9fB5FcKyJsECSg7IPBIMZ/T4vOHGPCNLTioDkzSCihhXQs0hOePRMHhgWg4b0vfAdxYFZ2bPbyLK9ugJmohHATWFJkCqo2ZmZmxhjAE7PR2DT/Qtig1bgX97U9aYKxOYOQnA5gREjCmzhTbsVcI1hgfo4nUaDf78cMIk/Dk0Zb0Xv4KpfLRYaE3EfFd+aeXeDgT6O56cqPQCA3+LTozDGmdPxQCTU4/A84AVTurUJgNEmRaQBoHG1Em8IQvqjdVHMNg/kIQxDfg3E7nU5kADbggdl9qwQ62W1tbcVdkT0PGFOdrQ3QoPhzDp5wn+kSLS+IRqjQQ4fyK0IdrkkRdMwD8w7C7Mn2jMWzo7hfaQTeIQhhUkdzMW8d0AEnICsKJvfNmAjBEJs+LTqTjMkD9abNgDBuegLAeMDeQQpCHoQNAHuOjo7GfB/Xio4usuC4DiYzjE4oxLcbgEHn5uZiJQaLDfCK3qq+9RxhAt9sFn8MLSJpTFB5WiI+GIKDseLz+o7ZoMPERT3dURqFgjD/uT+2q8ff5XqeYud1oA4ouXBz64Zj0Z4IpvTcIwBp6oXr8TjKviqVis6fP68rV65EjfwgOpOMCbOQ8+mtKAAO0H6e9ub1jyGEse7pnornkj4NYGCuOvSPZoX4HYgoWhITmJgfvqoXdnMdioClEdhFDaaHYzyc4mgz36EN0YDcr6ToswEAgf4Wi8VoNrvpDVNLihpcUkyzww+kLyymJgLREwwkjWlV5s21qecgg9Ry/bQJjzBgzhwFflSqVqv6zu/8Tt26dUtra2taWVlRu93ONq51yuePu6pNT09Hf4hWGpKihoAx/cHTspFiZ5IOiMcRBwVMaLVaYwzG97w8p9PNMbQYflej0YiLC/AGpvTEdUc83d8lU4lYnjMzzMKiJazg3fWciQHFPAHCt5fn3Ji6Hrj3YL6bzq6B3b2AwTiHm/mM3c1iBB/AD0XXbHRLDq7HOv35Eb9OI9yPY83dvn1b/X5fV65c0dWrVx8ILJ05xgwhqF6va2lpKWqe7e1tbW1txYeGNMfMpaMdyKj7nGhAFhALnIXMcd4V3EvL0kCL+6EuFFiMFECzENH+HAfoAiCCuctYfW9PB3RgEDYHQjO7BuL+iZWCbHION5sxQR0BdcQU85txgk7zezeHeW5uBvr/MCKCA9cDYYFG99pWnj2Is1sdzOFrNTsfRJubm/qlX/ql+P61oL1njjEHg0HMlJFGUpMYF34G/o6HQ5DmDs2nEb50phBgEKarI5QwqPt6+Xxec3NzkkZhB8bgCe9ohFwuF1sywgwuGPD/MO085siLRU97R3JGERqumUqlUtyLE7PeKznQ2oVCIe6Uxj16kTWCwBMYuDcPqWBdYD67sOC3nBOtzPHu43tHBQfmCEehid0szsCfJ0hk/QDm0KXA2xoSG5RGaB7ZI5LuCpWk440sPD8fgiAd+HazkDinh0VY6Jib+HGYXV65AjOhIX0MHoZh0aGx0VT4c4uLi/HaMKTHZ8mM8coQR5kRJKS6AbJhgXiyfbpdS7vdjsAc/XpgPIQI84P571ra457MO+1a/JnyF/8ZYcbnnP+06MwxpjS+k7L7dpiLLHpMH2nU08ezfzBd2cXKgR7Xrh6O8V6qmG+MA83pVRL4ZSCcLDbfIcx/y7koGfN9IL3nkGtXmBjtky4K9yA8JV3e4U9SPA/+JnNWLpfjBr/EHX0nb5IjpFFCfb1ej3Pj5XNeyOz5wlzHfU3SC+mm703PHG2WNMb0vPdnfxp0JhkTjUh4hIWBSccDBEDxhG4+Y2GjRVmYnjGDNuN/fFNMJw9VsMA9x9M1M4sNAeJaD8bihXCRNBZLpIof7U2SAhqO+J6b5J7Hi4Zj4x5J0RwHkEpvHktR9d7eXmTMvb097e7ujs0nFkE6zMJ9u5BgzJ5W6L56CCGmMtLShOftApZnnn6PGexI+ZOmM8eYxAhrtdoYIACC6BA8/ogzDudIx7q86oHF5AkKvt+IpDETksC8p/u5tPZSLR+r+30wBtKe5lksTHwyR4IZiyPHDuCgodIpiIzT0/28Jy5jRvO6gPJOA1yPhAfXZL5dvLfDdIFGnisAV7/fj5lECDDfm5P58mfobgr3ctr+pXQGGTNJErXb7bHAPZkfPDw3kdzk9QfL4nbmCyFERkfaO/SPSebaDhMRBkKzcj1ptGGRZ+jAmL5gWaxo93SlhqezcX4PMzjTwmAwNyagm7qe0gfIROH59PT0WJsPr7wBaJNGW917mAMG9nAOAom0voWFBS0vL8d84e3t7bi9AlaBdxl0SwOzn7lxywWr4iTN2Gzj2nsQ5tvW1lZc3GnNASNQGYIklUZgkGfzpE1INx2JV7JIWHzp1Le0aesbDAEA+XeYnYQW0hqAcQNQeRoawgZNBKMSGqJkDebhPglv7O7ujqGl+NIkXJCWR4cGTyDHVUCTed0pc+81roPBYKy3L0kSs7OzMQ2Rxl0IGFBqQh6OnsN8fn7ypSuVylhB+ElResfte9GZY0wyabzcCT8M3wVz131FwBSH6WEA9yclxZxUjof50ZyeTytpjJE5DwznWg7TDUnvqChjph4RRuAYT29De5CwjykMORO5pvJUOPJhXbu7fwZD5HK5mIfKMV4I4BpX0tgcop2JgcJ4jUZDm5ubsQYWYXtwcKC9vb2YFI9QkEZ9gdDAnsPrAJYXFJwUc3qR+P3ozDGmhzQwmRypxLeURmhdOtDsZUrS6OGmTUU3iSEePCEEtIP7jffKyHEQytFEryfkd8RlPcZK2CZddOwlWmnUmXvxbCWu7SEPz0mlUoP0Qa7BQmS8JAHwTDy2653+vOm2pwu6heCJGY7yej8nnq03WHN3wlFY5uC1JAKcFJ05xmRB8pCpw/PF6pLcTT3PK3UAyIPemJfO0F4GBoOxeDA1vVgXpsW8RKO7+SmNmmQRxkDgODqMVu90OmP3yVhgQsbvXeTSmyylF3cajHIQKV1V4skH6ZBNuja00WjE86JdPVfW47XMebPZjB0LPcXRfVTGSOaTCzusjLSAPC06c4wpjScwe4qWF9d6Ohz/u6/ConHwQBoF1/1hO2GuYnphTk5NTY0lXBOb84JoFqUDGdKo6bKkyEye0YPpK2lsawcn7pl8YPoT4WfCVB5+8AQLTF9vBOaJE2hCwh5pIecmr7sVDmS5r+paDoDKfeF8Ph/Ph8Dy5HuuC5N64gdMXavVHrjHyEnRmWRMGMdNPJewgDsuUXnvXQ5cSzjU76ipNNpBjEUjjcxkR2UxXTkXG9yiKREI7o8hINBEBPdZaPeKu3q3OcbiYBHzQRKBNNLOblq75iashCbyMSHM3OrwODHCjN3P8JvxxZhn14Bpv9TRZYQPfyEvhHZh5uCapIj8IpROg84cY2IqeeIzDOqZOB7ScJ+RhcpD85xLzpUGDZwB0yghi5KyqaOjo7GwipulaX/XGTUdp/TMFtfikmJKnQsezyJybeSVIo6ewhj5fP6eDAT6mU7O93twxoSRncHS4SFPLEj72y5gmTuELfMtjW9h6PFhn1NSAU9zm4Qzx5iVSkVPPfVUDETjd6EB3Ndxf0Ya7aTl2kUaaS6OYRE4aOPxRDdHpVHbEkljFRXSyBf2hAbX7vhB7g957BVyJNi1maPFCA3O4aEVtxoYKxrUNXqaMbnftBnO9Tz53cEumAkLhd5BbuXwbLzaxxmZLCG3bjzvlvn1WCuoPVlGJ0GY9Ds7O/c95kwy5rvf/W71ej3duHEjMmc6C8iBoG63OxazY1G7RKclBeYRC8e1rueyuknpcVAWMddGk7GY8P8ckElraPeJJY0JGha/7wfqQAiCAF8QBBX/kYQAv66nG9Ksyzu5+5xy3+6nQh4LRut5sgAa0dPxEDiSxsxRfsc8kK3EPHIeBCrJJvj6aU36OIkKnYwxjZCe09PTunjxolZWViIQw/ZsbIgDk7AgYQrMMRgGJuQ8XsiL5sDMTcc13aRyxnRmZXG5hoQ5HP1Eo+Nfedc4aeSLeUKFm9gwXbp2EYuBWKknQngmE++5hgstzGLXovl8PiYZuDXh5rX7tN7ZDrQai8etErQnzwpmkxTLyiTdlRzPfSJATyqOube3p729vVc95swxZrfb1a1btzQzMzOGGno4wwEDR3AlxWC91ypynDTSVmgItFDa55NGvV+J57l/hYaSNDY+mMlBIH7LYnfmkzSmHTivm4Me5uB33AfHerBeGvm3Lni4Bua4h4+YIxcgkiKDcW4PVyCIQH05pzf6QrDwP2Y+WonkhHSfYL9HrA86EqbBu9OgM8eYg8EgxrsqlYoWFhYik3p1xdTUVAxYA8PDcE4sCO/KhiT2GJ8DEwAhxB+RnvhUSHoW/L3AKA+VYEJ7Tag3Tea+YTLCD2k02s9PbLDX62l6ejpqKUduXbujIdPuQJIksZsA2tITAdLzyTH+vNC6XvuJEPO5JXQzNTUVi7n5rfucCKVCoaC5ubl4rKQHpso9KTpzjJkkSdxQRpK2tra0v78f80/n5uaUz+djw2TIpbikMW0ljeKDXjLklScOyDhYAoNAXueIZuHaZN64SYgfykLD1HUtRpyPGCQVFGg49yPTCecwhoeI0OBoMje707Fg3xQXZnTAxfOEYZo080uK8Uq3DKTxyhg0Hgn2mOSASVgwtVpNS0tLWlxcjP5wv9/X7u6uNjY2xtDo06Izx5ggfPgsLAga/Ho2SnrheZCdBZYGe1wzuXmMySWNUvYAJRiDo5NIeG+b4f4l1/dkA7+eNDKVyTllTxOu52EOaRRbhWBcN0W5nufapjNsqNxhcR8dHUVf0pFqNLdXwuDfcZ8ONDHHnoqHgCJJI5fLjaUAYk0wJ9PT01peXo6VKZyD0rVqtTq2ncNp0ZljzMFgEKvagcZnZmbiwkJjeewQE8pRQ5hUUszrTGtRmDOdEO5mrWedOALrn5GnygJzk9LDLyxGGNazctDS6eR7jnd00rWnm6teSQJgw1jRomhl79zn42WcCARHRtMMyBy5f07CPdfzfWV6vd5Ymxg/F8+aPkSSIgPSMd53/XIBdxp0JhnTtcTu7q52dnbiIkRao83c96DJlIcHfOHye3+orilhdgcXPIPF087wM72CBD/SkyG8GZWn26E9+W0aFYVZWPxoqn6/H/1lGBrhwX1xTWdMzGFJd2l3juOaDiZ58gBz4vFQF2xuwrpgYm5BojGHPR0Q3x2h5An4bK5Lg2wH906CuLe0f+105hhTGqGUrqE8vQxmoqyJnFXMJNA9TzHzDCGY1f0aN2O5DmYi0t2BHUq+0LAstvTGuMRPMdn8gSMkYHDMR9eGnoWENeBaKZ1UgGCRxs13dsdCS3vs181W5tsTARBYaYvCx+6JF/i5kqKGdr/Yw1QIVw+JILBo84mgQ6DByCeVkre0tKSVlRX9/u///n2POZOM6SYK0pqF6jC5xzB99y6vIPGSKxaT+6Msfioh0IqO8Lpfx3eMwRcjPlg6OyedZOBxUM7hmhytB2N53BWNRmpgWnP5+dGWvrUEwiKEEJmCeGTal/UQTBrU4drOrOmQjYeMXEO7psSy8Z5BvV5vbMdtkGMyfhCsJ5X5s7GxoY2NjVc95pGuHEL465L+sqRE0hd1vBntOUmfkLQo6bOSfjBJkk4IoSzpZyW9X9KWpB9IkuTKo1z/YcjNpnRuJsT/LOx+/7iXTDojx7NxPJc1jSqy4FgkaFBnTteCnlnkviYLHqTTF7WkMf8xXS7lGs9NbTdH7wV2uXnvObXpJAXmjd9xj/v7+xEEch/SE+SdETDh3ffk3DArgJGbzJ6t5e4IvYgWFhZihwLipqC4JK2zyxjd2dkF7jR8zYdmzBDCBUn/k6R3JUlyEEL4eUkf1vGu0f8oSZJPhBB+WtKPSPqp4d+dJEneGkL4sKQfl/QDj3wHr5OQuvhn0njRL8icAzC+OKWR35hGQlm0nkfr4Q732zwwz4L1gDdMhfBw7QalTT+EBT4SWjoNXHk2EMdNT0+P+bRoIjfrXePBOG6+I6jS8U0Xhvzes6BcMHIPzIm3r4TBEVw8G79+sVjU9PS0lpaWtLS0pLm5uagl6cwHGOTX9q0y0u7HadCj6uqCpGoIoStpStItSd8l6c8Pv/+4pL+jY8b80PB/SfqkpH8SQgjJExZH5FemzSOYhEWLz+YJ0phApIHhc0IwBUzG4sI/9AR3FpZXYFBg7Gipd23D1PRzsyjRtL6oJY0xPeRVFx6MZzHeLwnB47HOTK750/Pmpj+lbWg6jvGkAs6FtnNTnmM83IT/CFPOzs5qdXVV8/Pz8Z68YEFS1JK4FvSdRVjA+A9q/3GS9NCMmSTJjRDCP5D0iqQDSb+sY9N1N0kS4gbXJV0Y/n9B0rXhb3shhIaOzd3Nhx3Dw1C5XNbc3NzY4mNxs0jQcGhBNI406tfS7/ej2SiNzDF8SLQyzASw4CYqiw6zzq/rIQz37WAy14xee+nkmhbmkkaLH83Y6/Wib4VPnS6jYtwgw37fMAxCA7+N8XMNtFr6Oxc0HvZxZJTnQ7tKfHvOXSqVNDc3p8XFRdVqNR0dHcU2lrlcLmYfgRsAGjlT+vy5mXwa9Cim7LyOteBbJO1K+veSvvdRBxRC+Kikjz7qee5HhUJBtVotLjxf5DAiD8iD6W7aoX0cwndt63E7abwQ2c037/fD+X2he7oc5+F4z3tlwbt25zhMaG9gjBDBRD08PIw7gnlWEuP1MXtCQToxgnvEdHTTlt+60EqnA8KwLhRdOzvKSpzU/Uh2YIPxPAkkSY7blrJ3KAKUZ+woNc/rjWrK/nFJLydJsiFJIYRfkPRHJM2FEApDrXlR0o3h8TckXZJ0PYRQkFTXMQg0RkmSPC/p+eE5H7uZW61WNTs7G1FHf4BIa6Q5C3o4rrhQvHEXC8nDABCLzxFV14ZoJEkxtOBa2sMZw/mIY3HzkXPS/cBBIAeauAb3iRnOWKitTDMc33tdKg2bYSiun968xwlB5r69h5EcNXZtj6as1Wpx30828UUQYXp6Hm25XFan04ld2ekgz7jcbPexMsY3aubPK5K+LYQwpWNT9rsl/bakX5P0/TpGZj8i6VPD4z89fP9fh99/5kn7l5I0NTWlxcXFuNkM5iDhjxBCLN1Ck/E5GiVtxjlkn/bxHGCAkby9COfxWKNXSriG5nwwDb93AQAjOQjj/pIzuwsHNET63gCHHB2G+TA13aSGYZ3cV0Ybu3bkWu7DkhJZq9ViOiGaEgbFN/aMn8FgMJZIAONR++kplK6xGQeaGf/1JGh5eVmrq6sns3FtkiQvhBA+Kel3JPUkfU7Hmu7/lvSJEMLfHX72M8Of/Iykfx1CeEnSto4R3CdKIQStra3FXE40Cy09KJgGGEhLb2lcssMgmEqeCuYPPI1Iohl9YXooAoKp76W9PKXPF7XHSNNCBA3FQkVT+GJ0MxpTkXP6dTiGMZAA4O6Ag2tYC5wXk9iBKE8d5DjfCh7zGe23ubkZUWb8yKmpqVhGh+85NTWl7e3tsf1SuAfvOsjz4bevlpnzKLS5uamtrbuMxTF6JFQ2SZK/Lelvpz7+uqRvvcexh5L+zKNc71EJic2DR7I2m827kEs3PaW7t6RL+5rS+I7Sacb2mKMnBbivhj+Y1rzODF5onAzjgb6APPkAxnYEl/H6fTAuT2PzEjL8TY514IpxkA2V1jwc48d65wSY1MNHvjGRz7UzEfdD1wTQWZLnJY1t7U4YrN8fFbNzL2hXkG0SIti273GTC/r70ZnL/MG3cgChUqmo0WhoMBjEh+bgkGfiOGDjGsoXuzMD2hGwwYETX8SgnWkAycMqZLV4Ync62M/ftLaCGWAErpEOzHtc0iltBXgDZ0cvPdTj2VR+7zAlpqgLRcJLkAtHTGvu35mKjYlIW6TSJPqdGjG5l4K5AIYhsSo8FPak6UwxZpIkunLlig4PD1Wr1ZTP58dif/1+P/osaIl0CpxD//fK8HFtyUIgXsjDd6aDWe7FvL4Qie3xmWsGaeTHuaaGYfx/D8vwnYMlMAn3xv17bx2/Z5jOrQXGk9agbqbCgOnsHmmUy+yAFEKD+axWq5qZmYm5wz5fCB/2Wdlv7+uwcxS1IPN7dHSkQb+vfq+v/cODsVrYXm+0E/dpUDitC78WOglUdnjeeyKoPFjXOI4MSrortnivKgTXlunroVkdbLkXCIGp5hrO0V1pvEUJgsJ9ufT9pWO0WA8evvAkfE++IGlAujv2iAb0hH7Xoq7xYE4sBIh7dWSWe+YeGZsnY4Be+7z1ej3dunVLe409HXWOM5b6g0TdfkftZlv9Xl+DZKBBb7h3zKCnTqcrmcDxZ32C9NkkSb75Xl+cKY0JufOfptdrvrAo/P29/kKeHZQOC7Do3Bd0gMQZlBfX8Ot5vNFNa89N9VAP54TSDIbGBjhJ+97psdzr2p5F5H1zETb4+ZzTNTIZO9IoyQLr436xxsEgUWUqrz/6fWtqNgZqbh3p3d9U0Rf/y4G+/sV9ffsHF/XyV/ZVmy7pG75pTtdeOtDn/r/N46zvCaAzyZiPk9IWh5uI96JHycFMM/m9yEMw6d/xF+Z37cn3DhhJ4yCYnzMdwrlfzM+Z7UnT2tMFrb21KF1N9Nlfu62Vt5zT1p2eSrWcLj03ramFnGq1oF/6tzdVn6s8+IQPSYCOr0foZ4z5BqLX4na8FvPrSeaAnqardNAeaHejp1q9r7XLNXUPEjU2OnrrN83pcL+vlXMVtVoDPfvuWVUqJd15ZV/JCajMxcVFffCDH9QnPvEJ1et1zc/P6/DwUFeuXLnvbzLGzOhNSkHrr/T0+c/sqtvtq7nV1ca14z06b7/c1tU/aKo6FZQo6PwzNX31KzsnJkQArXK5nJrN5lh22X1HfxbBn4zOEgU92HF8Lcc8wgiGQF66J5ReBfw5vfT5jDJ6IvRaGO5k5T9A1euhjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAyxswoowmkjDEzymgCKWPMjDKaQMoYM6OMJpAeyJghhH8RQlgPIfyefbYQQviVEMJXh3/nh5+HEMI/DiG8FEL4QgjhffabjwyP/2oI4SMnczsZZfTmoNeiMf+VpO9NffYxSb+aJMnbJP3q8L0k/UlJbxu+Pirpp6RjRtbxztN/SMe7Tf9tmDmjjDK6mx7ImEmS/Lqk7dTHH5L08eH/H5f0ffb5zybH9BuS5kII5yT9t5J+JUmS7SRJdiT9iu5m9owyymhID7up0GqSJLeG/9+WtDr8/4Kka3bc9eFn9/v8LgohfFTH2jajjM4sPfJuX0mSJI9z858kSZ6X9LyUbSqU0dmlh0Vl7wxNVA3/rg8/vyHpkh13cfjZ/T7PKKOM7kEPy5iflgSy+hFJn7LP/+IQnf02SY2hyftLkv5ECGF+CPr8ieFnGWWU0b2Irb7v95L07yTdktTVsW/4I5IWdYzGflXSf5K0MDw2SPqnkr4m6YuSvtnO85ckvTR8/fCDrjv8TZK9steb+PXb91v72ca1GWV0epRtXJtRRm8kyhgzo4wmkDLGzCijCaSMMTPKaAIpY8yMMppAyhgzo4wmkDLGzCijCaSMMTPKaAIpY8yMMppAyhgzo4wmkB657OuEqSXpK6c9iAfQkqTN0x7Eq1A2vkejkxzfU/f7YtIZ8yv3yyWcFAoh/PYkjzEb36PRaY0vM2UzymgCKWPMjDKaQJp0xnz+tAfwGmjSx5iN79HoVMY30fWYGWV0VmnSNWZGGZ1Jyhgzo4wmkCaWMUMI3xtC+Mpwu4WPPfgXJzKGSyGEXwshfCmE8PshhB8dfv66t4g44XHmQwifCyH84vD9W0IILwzH8XMhhNLw8/Lw/UvD759+QuObCyF8MoTw5RDCH4QQ/vAkzWEI4a8Pn+/vhRD+XQihcupz+FqaYj3pl6S8jht6PSOpJOl3Jb3rFMZxTtL7hv/PSHpR0rsk/R+SPjb8/GOSfnz4/wck/T86bkr2bZJeeELj/BuS/q2kXxy+/3lJHx7+/9OS/vvh//+DpJ8e/v9hST/3hMb3cUl/efh/SdLcpMyhjhuPvyypanP3Q6c9h090ob+OyfrDkn7J3v+YpB+bgHF9StL36Dgb6dzws3M6ToSQpH8m6c/Z8fG4ExzTRR13LPwuSb84XNCbkgrpudRxy9A/PPy/MDwunPD46sOFH1KfT8QcarRLwMJwTn5Rx1t6nOocTqop+5q3VHhSNDRZvknSC3r9W0ScJP2EpL8paTB8vyhpN0mS3j3GEMc3/L4xPP4k6S2SNiT9y6G5/c9DCDVNyBwmSXJD0j+Q9IqO27Q2JH1WpzyHk8qYE0UhhGlJ/0HSX0uSZM+/S45F56nEnEIIH5S0niTJZ0/j+q+RCpLeJ+mnkiT5JkltjXaHk3Tqcziv482w3iLpvKSaJmDDq0llzInZUiGEUNQxU/6bJEl+Yfjx690i4qToj0j60yGEK5I+oWNz9id1vMsaedA+hji+4fd1SVsnOD7pWNtcT5LkheH7T+qYUSdlDv+4pJeTJNlIkqQr6Rd0PK+nOoeTypi/JeltQ2SspGMn+9NPehAhhCDpZyT9QZIk/9C+er1bRJwIJUnyY0mSXEyS5Gkdz9FnkiT5C5J+TdL332d8jPv7h8efqKZKkuS2pGshhHcMP/puSV/ShMyhjk3YbwshTA2fN+M73Tk8Scf/EZ3yD+gYBf2apP/tlMbwHTo2sb4g6fPD1wf0EFtEPIGx/jGNUNlnJP2mjrej+PeSysPPK8P3Lw2/f+YJje29kn57OI//l6T5SZpDSf+7pC9L+j1J/1pS+bTnMEvJyyijCaRJNWUzyuhMU8aYGWU0gZQxZkYZTSBljJlRRhNIGWNmlNEEUsaYGWU0gZQxZkYZTSD9/4a8YURXNTmHAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOYAAAD8CAYAAABjJ9hGAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAABe00lEQVR4nO29eZBk+XXX+/1lZuVaVVl79d4jjeQZa7GtxbKxBXLYiGdkBTKEwcKEEcaEIp6BZ+C9APnxIsyLgAgg2ExAyEwgQOYBAgvHkzD2s41lvETgMdbikSxpFmm6p5eqrr2yKmvJysz7/sj8/PKbt7tn6e7qypm6J6KiqjJv3vu7N3/nd875nu85v5AkiTLJJJPRktxJDyCTTDK5XTLFzCSTEZRMMTPJZAQlU8xMMhlByRQzk0xGUDLFzCSTEZSHrpghhO8NITwdQnguhPCRh339TDJ5NUh4mHnMEEJe0jOS3ivpuqT/KelPJ0ny5Yc2iEwyeRXIw7aY75L0XJIkX0+SpCXpE5I+8JDHkEkmIy+Fh3y985Ku2f/XJX2bHxBC+LCkD/f/fcdDGterVgqFgnK5nLrdrpIkUS6X09HRUXxPktrttsbGxtTpdJTL5dRut+P7nU5H+Xxe3W5X3W5XklQsFuM5crlcPH+n01EIIf50Op0TuOPXlKwlSTJ/pzcetmK+pCRJ8oSkJyQphJDxBV9Czpw5o2/5lm/RwcGBJiYmtLu7q69+9ataXl7We9/7XoUQ9Du/8zt6xzveoRCCKpWKfvmXf1nNZlMf/OAH9cwzz+jNb36zrl27pqeeekqVSkWPP/64Njc3tbq6qre85S0qFovqdDr61Kc+pUuXLum7v/u7tba2pr29PR0cHKhUKqnRaCifz2t+fl5f+MIXdP369ZN+NK8GuXq3Nx62K3tD0kX7/0L/tUzuUba2trSysqL9/X0tLy9rZmZGY2NjknqWMkkSTU9PK5/Pa3d3V9evX9fc3JxyuZxu3rypy5cva2lpKVrRVqulUqmkYrGoRqOhzc1N7e3tqdPpaHp6Wu12W1/72tc0Ozur+fl51et1ra+vq1arqVaraWpqSqVS6SQfyWtCHjb4U1AP/Pke9RTyf0r6oSRJfv8ux2cW8yUkhKBCoaAQgnK5nAqFglqtllqtlsrlcnQ5i8WiDg8PJUnnzp3TtWvXlM/nFUKI5+l2uzo6OtLk5KQODg6i61ssFjU2NqZisai1tTXlcjnl8/noArfbbYUQlCSJyuWy9vf3Mzf35clnkyR5553eeKiKKUkhhPdJ+ieS8pL+VZIkf+dFjs0UM5PXsoyOYr4SyRQzk9e43FUxM+bPbZJT9lgyOWkZOVT25CSn4tQbNf34H5dC0NZXP63DzS9Lyox2Jg9fMsXsS6F2UZc/8G/0hrd+g/aPCrr2jd+vFz75QR01nj/poWVyCiXz2foyfu4dKp97sz70h7r64Xe3VVl4k8YvfMdJDyuTUyqZYvalc7ij0GmrdSQlSaKxXEudg62THlYmp1QyV7YvzZu/rY3P/z96Iv99aqug9c//gnav/dZJDyuTUypZusSvl6+pNP0NyoWg/c1nlLR3H+blMzl9ctd0SWYxTZJOUwdrnz/pYWSSSRZjZpLJKEqmmJlkMoKSKWYmmYygZIqZSSYjKJliZpLJCEqmmJlkMoKSKWYmmYygZIqZSSYjKJliZpLJCEqmmJlkMoKSKWYmmYygZIqZSSYjKJliZpLJCEqmmJlkMoKSKWYmmYygZIqZSSYjKJliZpLJCEqmmJlkMoKSKWYmmYygZIqZSSYjKJliZpLJCEqmmJlkMoKSKWYmmYyg3LNihhAuhhB+LYTw5RDC74cQfrz/+kwI4VdCCM/2f0/3Xw8hhH8aQnguhPBUCOHtD+omMsnktSb3YzHbkv73JEneJOnbJf3FEMKbJH1E0q8mSfJGSb/a/1+S/qikN/Z/Pizpo/dx7UwyeU3LPStmkiRLSZJ8rv/3jqSvSDov6QOSPt4/7OOSvr//9wck/UzSk9+WNBVCOHuv188kk9eyPJAYM4TwiKS3SXpS0mKSJEv9t5YlLfb/Pi/pmn3sev+1TDLJJCX3vXdJCGFc0n+W9FeSJGmEEOJ7SZIkr3RjoBDCh9VzdTPJ5NTKfVnMEMKYekr575Ik+bn+y7dwUfu/V/qv35B00T5+of/akCRJ8kSSJO+82y5ImWRyGuR+UNkg6WOSvpIkyT+ytz4t6UP9vz8k6VP2+p/to7PfLmnbXN5MMsnEJUmSe/qR9G5JiaSnJH2h//M+SbPqobHPSvpvkmb6xwdJ/1zS1yR9UdI7X8Y1kuwn+3kN//zu3eZ+tnFtJpmcnNx149qM+ZNJJiMomWJmkskIykhv9V4oFDQ1NfVQrxlCUC43vF4lSSLSQHdz/UMIQ++l//dz3OmzdzqG/1/q2mlJkiTew8v9THq8r/QYXr/bPd7tfP759H3e6f/09V/OuO90PaTb7arb7b7sMT8oabfb2trauuv7I62YLzX4By3lclmLi4uanZ3V7OysarWa9vf3tb+/ryRJ1O12tbe3p3a7LUlx8t9JmVEoPpfL5VQoFIaO63a7CiEon8+r2+2q0+lEpeLzY2NjyufzcVIxkZjInLPdbg+BB/65Tqejo6MjdToddbtd5fN5FQoFJUkSr1koFOI42u12HBv3xr24EjCOQqEQx48wnnw+r7GxsXjfjInz+rU5Z7vdjmP14ySlwcGhZ8P9tFqt+Hzy+bxKpZIKhYLa7bZyuZwmJiY0Njam3d1dra6u6tq1a1pZWRka/3HLSy0mI62YkqISPCwplUoKIQxNCiZlLpdTsViM4+p0OnGSsepyHBOf11EQJk2hUIgT0ld9/vcJXCgU4t+cB6VIT0xXxvSzY4J3u10dHR3F18fGxlQoFKKS5vN5FYvFeF7G3el0lM/nlc/nlcvl1O121Wq11Gq14nVRQN7vdDpDCxjPFcXJ5XI6OjqKiuwegiu+K38ulxs6J98D/4+NjcX7YGHgdZ7B0dGRkiTRxMSExsfHdevWrYc+115MRl4xH5aEEFQul1Wv13X27FmNj4/HibS/v6+jo6MhJcvn8/E1JjYTBkXieBQwSRK12+04yVGwdrsdr4/lKBaLd7TCHM9n+C0NlCGXyw0tML6IFAq9r5yxoCiMRdKQtcRCc16E+/H7dreQ13zB8UUE61UsFqMi+eKDhXdB2dIWnM/zGXd9eUZIsVgcGgsKO2qSKWZf8vm8JiYmVCqVVC6XNTk5OTSJkiRRq9WKx0oD65rL5TQ2NhaVKe12uZUoFosqlUpRWSRFK8zEHhsbi38z8bEePuE9LnNrJClOUFcWdwtRUlc8Jqlbf44rlUrRVfT/UW7Gx/PBfc7n86rVatFacS8oJhbNFxwWkVKpNPRs3MpyfKvVivfM/RweHkYrzH1zLhaDUqkUx10ul1UsFuP3OwqSKaYJlkaSms2mJGlvb08HBwdDEzKfz2tvb0/NZnPI4iCs4m45sTatVmvof47h8x7jcQwTCUUIIahYLEa3DreTSY1FcHeXcfnkZkFxpS6VSkMxbqvV0uHhYVwkEM7Jb7/39ALCPfK+Wym/d3eTUVoUnGMZM8qJC+5WtFKpDHkk/M7lcqpUKkPK3mq1hnCDUZFMMftSKpU0Pj4+ZDHHxsbia81mMypFCEHVajW6XB5XopDSIF7EdUVJarWaarWajo6OYswpKU6iw8NDtdttFQqF6Hox6bDMTLwkSeKYAThQbCycT0xXSmkAQribilIw8T3W9c+4N8Dn3bI54MSCJikqW9p99ev734yb/3FbeX6ckx8/l5/76OhIuVwuPrdms6krV67o+eef18HBwQOZRw9KMsXsC9aCCYSijY+PR4VotVpxsqE4lUpFkmK8yYQDDeQHtw4go1QqqVKpxNUaS4Pyu5vqkxOlccVydJaYjTGiDD65HflNu7LSwGpzHwBZfh4Uz11sFC4NXrFY8HmEOB2L6W4si5SDNiglrqorY5rSxrh4pgcHByoUCqrX65qamlKSJFpfX9fq6urIWUspU8woe3t7WlpaimmSg4ODISUplUpxwne7XdXr9aFJwMQuFotx8nhaoFwuD6GlTHxcZNyrvb29IQvkKGk+n1e73Y7Kz+d90ntKJm1dmdiejkjnDB3E4RkgHksCaHEtv6/Dw0Pl83lVKpX4zA4ODuK4XZHwGBiju6lu+aWeS18sFuMCeHh4GL0LSfHeeQ+FR6krlYq63a6azab29va0u7t72z0+DGFxfjErnSlmX7rdrlZWVvTMM8+o3W5rYWEhxl+eNsFKYjFIFzh8j5XxlAHX8BXeQSFHfh0o4gtsNpvxdYATSVEZ3JJiJff399VoNKIl40dSBFy4JikRSXEBwP31hQFl8vQGi8rBwUF8Hm7FuVfG1Ww21Ww243Eopt97esGQeopZqVSipeUcaSCMzzloRoiB13NwcBCV8qVyig9aJicnNTs7q2efffaux2SKadJqtXTz5k21223t7OyoWq2qUqlocnIyTgippwxjY2M6OjrS9va2dnd345frqRN3FaWeu4vr6m4j4I67y1hXV1b+dnAHt85zmKVSSbVaTd1uV9vb29rf35fUm9ge92HFJEWFYyJjRfL5vMrlcvzhGsSbR0dH2t3djc/F87RYd3fzfXEqlUoxVvdn6AuEg0OMK01YcCucTvOwsPC8d3d34zM7PDzUwcHBQ1fMjY0NbWxsvOgxmWKa4IKurq7q6OhI5XJZlUpF1Wo1Ip+4pcSWu7u72tjY0MHBQVzRsXQ+SVih9/f3h1w5FKBcLg8hop68vxPwIikipj5x0wsEriWT1q+Ztt4ujiIfHByoXq+rVqupWq1GdxDFRuFwb4vFYswDM75ms6mdnZ2hPKM0AItYFHjNx+HndXaSg0gc655LoVDQ5ORkTH2xKIG+p0ODUZJMMU1YVVutljY2NjQ2NhYnOCswYA7vYTFgv1SrVY2Pj6tcLkvqWREmJW6nAySSooVhQo+Pj6vVamlra0tHR0dDY0iDHpLiZCN1wDVQco/VPF2SdgEdbWV8pCcAq0qlkra3t4eUPa3gKJA0yGnyfB2gwTtA0sqRtoT878/PQSrieFz7yclJTU1NxZylg1KtViu6wr5QjopkimlCusPjo8PDQ+3u7t6WpHdaGhYjl8tFYKFSqWh/f1/NZnOINeSxk6/0IJv7+/s6PDyMk8ypb7iunj9FATwXyeRz4IRjGAMucfoYjhsbG1O1WlWhUFC1WlW1Wr3NbSaX66kPSWo0GlpaWrotTrxTPPdiyuCLSKVSUS6X097eXlwscHl5frjH0oBG2Gw2tb+/P5QbDSHE+Pvw8DAiyqMkWaF0X3BRSY+4tdnb24uTIO3yeUK9Wq3GVRzFS+fr3BVLu2AQz6UB8aBQKOjw8DC6wL5AIG6JPG/naLGnKTzFkXZjWXA85cO4cCclaWtrS3t7e/H6afDGn0/aIqfH/WLCM6tUKioUCtrb27uNKwwDC9IC7rvH9/l8PnojSZJoZ2dniFhwQnpw10LpzGJqUF42NzcXY8nx8XHlcjltbm5qZWVlyPKlXT4m2N7e3hBp2uMmaQBmgOru7+/HXGOlUomxay6Xi1C6ExZAPd0dTKOPrgTpyUbqxMfk4+dzTGhcPMaC9URJnAnkrqZf111ml7RSphcavwfAuHQ+V5JqtZoWFhZUKBS0s7Ojzc1NHR4eSlJM1/hCSKVQkiQql8tDBP1RMlKn1mKiIPv7X1aSXD6uy2SiKwrh9Xd8xxcS/82cRPnd23Dl94IBYlXcfdJNtVptaKErl8sxZga53t3d1dbWlprN5kNzafv3mVnMtORyOZ0/f17PPntZ73nPd+mZZ55Ro9FQvV7X+fPnNTc3J0kRSd3d3Y3J/3w+r8PDQ21ubmp/fz9aoFqtpkuXLml6enoo6U+sub+/H62oc1C9HpO8YKFQULlcju4xkxLurqQI9hweHsbcogMZTFwmo6dfnJhwtzwg73Efzv319IyndNJuc6s1IFrgbnJOt1L89vpNALFqtRoRc0A2z9UyXjwP4mGuh3teLBZVr9c1MTERrf7u7q46nY729vaia37csrCwoDNnzuipp5666zGnVjG73a52dnYkSe9+97v16KOP6stf/rLW1ta0ubmpRqMRJ+zu7q52dnYi0ikpEg0qlYqazaaSJFGtVoswPIrXbre1u7sbARtcJ9IIDtYwGVGSWq2mqampoZQLcR5KBQUPIIa4FDcTxYXcno57pWHgyJUKcZedYzzO82JtFN7PhUuJpSJuTRMqoC2yOE1MTKhcLkdXtt1uD5XDtdvtaBEB34iBC4VCJIBUq1XNzs5qampK9Xo9AmutViuGD1SfYJGPU1ZWVrSysvKix5xaxQSul3pW8fHHH9ejjz6qa9eu6erVq1pbW9PW1la0iJ4jQ7ngX2I99vf39fzzz8eVemJiIp4fBSSfyeTzmM9XdVIupVIpXqvVamlsbEzlcjkCQoyPFIJbSNImoKmIWxJPeaStV7oiRhpUgKQrVjzlUyqVNDk5qYmJCT31lPTOd75TpVJJ9Xo9Wiqv8GBxcZCLeyE2BISjnIsUladSnBzB8y6Xy5qfn9fc3JwqlUokeXS7XU1OTkqSxsfH44K3t7en9fX1E+fPnlrFdCFFcenSJc3Pz6tareq5556L7iBJfojrR0dHcQKlJy1WKJfLaXd3d4j7yeSFnIBLjCWG3M7qPTMzo8nJSe3s7ERlAXGEfCBJ9Xo9KiLXaLfb2t/fH0rrMDbPK2Lp+N8VFKS6Wq0O3Z+7yYwVF7RSqcSuAKVSSU89JV28ePE2i1QoFDQxMRHdcVzUcrkcwZpGoxG9EcaBxUxbbnel0zEqoQEey9TUVKy9RdklRRbXKKROTq1iOtl7c3MzVpIsLCzorW99q8bGxnRwcBBjQCY6CpkmThPjScN5NaxjGmSDbI0ilUql6LpBNJientbc3JwWFxc1PT0dY1pYN9PT01EZKL7m3FRUMOmwIsS3Tu9jvCwyLB7kBJ26hudQKBSiqz0+Pj5Uf+pWmM93Op3ojqLwoNgcSwwuDdBiigpgHpGLhCXlYYEzmljA0hxhFti1tTXt7e1pa2tLW1tbajQayufzqtfr2tnZOfEysFOrmPV6XY8//rh+67cGVLxGo6FKpaKpqSm9+c1vVqfT0ec+9zmtrKzEiYiColTETr5qe8mTNEispwkF3pPGqX/EX9evX9fGxkac+LOzszG+7HQ6EXUcHx+XpCG31ReKdNzH2Ihbd3Z2onUiSZ+uw8S1JhfovFmU+/DwMOYZ/f59knthNkrOgoC1rVQqURGpKEmnc7yWlXCC7/Do6CiOH3edxQXwzBdcXsMNzizmCQsxxvve9z5Jiq4qcdpjjz0mSfq93/s9XblyRUdHRxHcCSEMVVN4ftMrKbBAKCPF104NQ6GdRXR0dKRKpRIr7MfHx1Wr1TQ/P696vR4VGv4qkwxmEJONicYkJrZrNpux7CmEMITc4pqXy2VNTExEywiAAs0Oqh9KQNrBkWZJunDhwlAjLLiq8FglxZwu7+H6QrDY2dmJ98ACgHLyWZ4/yg8Ixz2Sg93e3tba2loE5yAbkKvOFPMEZW9vLzL8C4WCzp49q0qlEicbgmUrFou6evWqdnd345ecjscQj4GwgLiG5XJZtVpNIYQ44ZiQ/DAhK5WK5ubmND8/H2Oi8fFx1ev1aEWIxUIIsf0I7iQTzXm2BwcHcWLv7u5G95GqjWq1GpWIXO/U1FR0WX0B4L6oKsGKUyfpMW06HoWswEJXrVaj9WIxIbYG7PH4D/fUrTPPrFarqdVqRQAvzQIijFhfX4+833a7HcGpZrOptbW1EyUcnFrFZKWUpGeeeUY7Ozs6c+ZMzG3RSkTqrfjEgM8995y2traUJEmcWMRIrtTFYlGTk5Oq1WqxnQjAkq/03t2A2HJmZkZzc3OamZmJYAioa6vV0q1bt4bqCSVF9xPUcmdnR7u7u5FUf3h4eNskJUfrTB8UlB8s0+7urm7dujWEelYqFY2Pj6vT6USUlFpLb/Fx8+bNuJjxA7PK0yHkErnXJEniNaanp1WtVmMYAYmAdBDX2t3d1fLy8m1WlvujEIHnNjk5qfPnz+sNb3iDzp07p1arpS984Qv6zGc+M4RkP0iZnZ3V3Nycnn766bsec6oV89atW5Kk5eXlWFtJXAMVDXBhbGxMr3/961WpVHTt2jU1Go1Y2ycNN5IiFsPKTE5Oql6vR7eNapPNzU0dHBzEukSs6dzcnGZnZxVC0MrKitbW1qJlw/VFcVAGJpGXYeES7u7uRuXyyhCfrAA6IM4sEiCnFGvjatLwanl5Ocan7mKSs5R6aCdW2/OZnU5HW1tbOjg40Pb2dnweXoTNMyHmJJ504gcorn+OZ4Vi4vrzXY2Pj+vRRx/VN3zDN+gbv/EbVavVtLKyomvXrsVF47hke3s7Nnu7m5xaxXQO6DPPPKOJiQlNTU1penpaMzMzmpmZicgfsVGtVtPk5KTOnDkTlQGU0H+Y5CCV/C314p+5ubnoakrDxb7FYlHlclnb29taXl7WxsaGtre3ozVJ9wdCsbCUxHwoAJ3sibVYSHAJiYU9L0u51NmzZzU/Px/zf41GI46BUKDRaMTFa3d3N8bp5GAlRfd7cXExKli6HSbPjGctDdpRHhwcRF4raDPxMa0xsfqeV5U0tJDg5k5MTMSfyclJdTod3bp1S41GI5IRjlMxvQfS3eTUKqY06OfKCru9va1WqxUtC8AKSXHyaZOTk9rb29POzk6cEM6mgcUCQuoF0lgLJi6dAUgX7O/vR/SQhLszh1Akp+Fh2UnWo0iOWtL/BwtIHMsERomwUjMzM6pWq2o2m1paWtL29nZMI3j6Ic0C4hxuoS5fvqyZmZmY3sGtdkogqHc+n4+FzaSbut1ubJHivFniYXfFiY1ZFCuViqanpzU9PR0LBPhpNpva2toaInasr69rbW3txAGgU62YWA9crWq1GnN8h4eHqlarUTlZXcfHx+MXvL+/H1MNa2trcRVHIZkw7lKhJKCTLA5O0cOas/J3Op24QMCXdVTV41oQSawpaG+73Va1WtX09LQKhUIkVZC7XVhYiGhvo9HQrVu3dPXq1bio7O3tRXc6XaGCi4q7Pj8/r/Pnz2t+fl6//dvSN3/zNyuEEK3sysqKbt26FeN4t96eYnJ3tNvtxu9BGuQ5PQXk/WuxkoBRW1tb2t3djcwkSXGRAWV2NPuk5b4VM4SQl/S7km4kSfL+EMLrJH1CvZ2lPyvph5MkaYUQSpJ+RtI7JK1L+sEkSa7c7/XvVZywDSqH1drZ2YmIIxZuenpaZ8+ejfEm1o7OawAKjUZDKysrWl1djTA+gAixF8gkSC1xHSVg3W43lif56o7CUnnvNZyAR25l3KUD5USh4N1y32tra/F5QFtz0j33gauKQgCKnT17Vq973ev0+te/XrOzs0PWuN1ua3l5Wevr62o2m5Fhg9V3dJvrODHCecG4+jCBJicnh1g9XhdKjE+aBy9BUrwGLjyppEKhcOyu7MuRB2Exf1zSVyRN9v//e5L+cZIknwgh/LSkH5X00f7vzSRJ3hBC+GD/uB98ANe/Z3EwgC+UDmxMGOhzjUZD7XZbS0tLtwE85PqYwExc3FDcUyYgSu+oKi6h83DTcL0za7zSg/gMhQZpxZ3jPCg/bjTbAty8eXOI9UM3OY53N5UJOzExofPnz+vy5ct69NFHdf78+ZjGofRqe3tbknTjxo0h7jFKT4w+Pj4ey6+4b5hXLEgAXYwDr4aOBjwfXNcLFy5EiiWkdQgI29vb8dnncrmIL6DkuMonKfelmCGEC5K+T9LfkfTXQs8MfbekH+of8nFJf0s9xfxA/29J+qSkfxZCCMkJJYucSI1Sepc3FKvZbMa4EnCGSQK6x+uQz6VBnIWy0FDKd8dyAMRznh5HOkPIea0grqRXcEMd7SX+xPpKGuoHBIDkTCEvuOa65Ehx66enp3Xp0iVdvnxZCwsLmpycjJYfaw7ZQJKWlpZi+obUxvT0dGzYxbGwc0jHSL10hrualHl5jyTQ2mKxqPPnz+vNb36zLl26pFqtpsPDQ62trUVrTajCvU5MTGh2dlYLCwuqVqvR23m1x5j/RNJflzTR/39W0laSJEBO1yWd7/99XtI1SUqSpB1C2O4fP/ChJIUQPizpw/c5rpeUsbExTU5OamVFkSxAYhzLifLQ9nF7ezu6T95bhg55uHQzMzO6cOGCrl69OlQaRlwHuANXNN1qhNgJxSKfh2VON8gCvEJRy+VyTMl4ATE/Tptz1JnSMCcBgLDOzc3p4sWLWlxcjPlZrpUkiRqNxlAucmdnJ1rMlZUVdbvdeJ6JiYn4mbW1tYhOk6t0qwkqi+uNVScXOjk5GXOji4uLunjxoqanp9XtdrW8vByRbeppWRzGxsYiKFQsFrW1taWrV69qfX095rePS9gi48VKv+5ZMUMI75e0kiTJZ0MI33Wv50lLkiRPSHqif41js6a4W1IvVQDDRVKktaEcDi4A2WPVnI0CI+bcuXN6/PHHNTc3p8nJSV29elU3btzQysqKdnd3YymXI6dY03R9ItYLEAlk14utJycno3t6dHQUrSYoqlde4CZ7jEv1CUgqRPqJiQktLi7q3LlzUSE9ntvZ2Ymd/FgIAKa8n86FCxeiS7m9va2VlRVtb2+r0+nEOD1JBk2vuV9iWp6F91WCpQTxH2LI9evXY24TK8wPYE+xWNTCwkIsaqcL/8HBgcbGxjQzM3OsMabXqt5N7sdifqekPxZCeJ+ksnox5k9JmgohFPpW84KkG/3jb0i6KOl6CKEgqa4eCHQiAvCRFidZuzuDmwvSRymYU8McYVxYWNDMzIwee+yxoeLf7e3tCGg0m81IVIAEwCLAGJ0M7t0JsNyTk5PRol+7dk0bGxuReeNtJaVBJ3PG4nxUJv3MzIxmZ2c1MzMTLTQpDsYJjVAaAEXeE4n3arVavO7m5uZQCw/vr4vVJj/JazxTb3NJKkRS7DgASi4pFhxwT3gIxKIzMzM6d+6cHnnkES0sLKjb7UbeLAXcNLA+LuE5vpjcs2ImSfITkn5CkvoW8/9IkuTPhBB+VtIPqIfMfkjSp/of+XT////Rf/8zJxVfhhC0uLioqakpLS0N74kh9YCN6enpaDlRPFbuVqsVy4RogMwq7pzMM2fOaH5+PiKX169fj1YTyzkxMRFzqLBgUHpqDwGbsG7SwOJvbm7GMcEBlQZb9wH+uPsH04j4jPYdoJh8BqXY3NzU1tZW7OruKSHiNVd63GIWqa9//evRdcai4656HOu5Udx1FgxcasbrSs1CyqKFZ8E5aVFZr9d18eJFnTt3TsViMca+sJX4Dj0mPyk5jjzm35D0iRDC35b0eUkf67/+MUn/NoTwnKQNSR88hmu/LAkh6E1vepMWFxf1uc/1uIus+vBgmRSAIsRzzuyRFIkAuJ1MIiwjsP758+fjZ7zCxOsRcduY1Lg8VFTs7OwMuafpJtGUi6EsTG7QysnJSU1PT8ec7PT0tGq1WnQjWRS2trZifArlD8R0f38/gktpiwciLQ26PEiD3aMBuOhuwILIPXp/XuJl+uOAAzAWpwFC4qBOleeYy+ViiFKv12NMmc/ntbm5GRcawCYKwr0w/aTkgShmkiT/XdJ/7//9dUnvusMxB5L+5IO43v1KLpfTW9/6Vn3rt36rPvYx6b3vfa9u3rypq1ev6vr16xFOZzUmnsStZEK6GwmRmwmyvb0dydcLCwuan5/X5cuXValUtLy8rNXV1aE9OlB8Z6lQ8gWaiFVgXM4cIh52ZBV3HEX1zvLkRpnMxIQsFFNTU1HZdnZ2Ynx9cHAQFRsqoaTYfpO4j1jwF39Rete73jW0kABksZAQ60O+wFuAPkd3hmazqY2NjVhy521VAJdYoFhYWQQZM+kvxuFdHzxF43nuk5BTy/zpdrsxBnrPe94TGS8vvPCCvvrVr+rKlStDFDSPR71AmGQ0FhOFQKmKxWKMYUDjLl26pEKhoJWVlWhl3J0mJcC5vOUIcSHsIeJa3EmPkb0Y+fDwUOvr67FEzNNDjUYjtiGp1+uan5+PqQwUb2JiQjMzM9E9xKsAxYWZ5KVduN3vec974v0Aonm6hsUG5SC+xFvBzV9dXdXS0lJ8niyQ5XJZZ86ciSQCUkAQP2q1WuRAu1fhuWLGsrOzE3OafA8nIadSMZMk0fb2tm7c6OFSS0tLkaD+9re/XW9605t08+ZNLS0taWtrK3bNw30DgPAepbidnm/EegHsHB0dxVYhMzMzsZE0Cigp5kyTJNHW1pY2NjZi20wnYhPbOpVPUnQ9vWE07R/X19djoykHlkIIkU5HBwEsK5Ud3BtjdTfchRhSGhDl19fXo9cBIAPQRbzr7TVbrVbMe5JLhkDfaDQicZ7vAbR5Z2dnyPOAADI3N6eFhYWYpgFN5vsEVW40GvF5AzCdlJxaxbx+/XrsHfv1r39d0qAvjlc4kCObn5+PltN7wgIcYB0AEJiIkBfgp66tranVaqlWq0UEFEre9va2tre3tb6+rv39/aiYTHQAHawAipIkg411Pf8JQYE0jpdFUSM6PT0dFZJyJ+I9V2CeG6kHd5HJAafbrfAMl5eX4/+45MSbnq5x1hOFBDs7OxGMoagay4jbPDU1Fb0RkGpSPt6ekkWL7/ng4CA+X5QUiiWx8oOQfD6v7/iO79CTTz4ZAS+8qrvJqVTMEILW19f1xS9+UZL067/+60N9btx6uZIRDzHJiQ2h8QHKgB6CbtLKEmvUaDQi/xYwgkQ6gASb3jQaDW1ubsbUAAwi31PTyQBp1g5jOXPmjGZmZiJyirAlBMrIvQBqeWsSFiSQTCwiisUzIaaDhLG/v6+NjY3YTAzX3FuAsLCxMKLAtDdxFBkEuV6vx2J1XGi+R7wJyA4AanChnY9bLpejl4OVfqmyrFci3W6vVSY50rm5Oe3t7WWF0mkplUqxkbIkra2tDXUEAFgh1vAerNQiUnvIys/kckCDhlWADKzKWM8QQlzpQQ+LxaJ2dna0tLSkvb29oVpMYioUCTfVQReAlampqVhXClUQahsAB8rnPYewatwjOUcvSYNFg0sL8CP1FAuCA5RHr1Jh0cNyoKRewUOumAWI8XvLE2+4jSKyuBFj8oNXAaIM4uwUQu6brnkPsntBkiT6jd/4De3v7+vatWu6du3aS37mVCrm2NiYHnnkkThxKPcifuKLZEcrCOGs8l4XiaUgBmNSEPeBrpI4h+UiKdYYksIAcKEwGLYMOURXTCY1MSmkgHq9HruN45aiXCgf1p30iDSoTSWmI56jUBvkFusjKbrULE5YX+JvxHO8VHHgis7OzsZ4ECQXV5c41zcEdvQZQjrKDXKMC+9d+MAKQLlBXil8pxvD5uZmzIM+SHmlNL9TqZiSIm9Tkt72trfFyYiVwLqhdO7mekGuu41eheH8VybTzs6ODg8Ph3i4WMN6vR67hUPlw23c2NiInyPvh7XErSM2wt2khQjW0IuZGbePl/cPDg60srISOabe1hKlw5rBPoI3S3qDNpwo5zve8Y5oMcfGxmInAxQa8MY71DFGricpAj6SogfSbrdjO5Z6vT60VUQIIaZY6P/jPXNJNbXbvT6ztEnhmZ2knErFTJJkKG7Dai0uLkaQgkkLyOHlRulJ7iwZrC0WlBjUUyl8lo7pTLL19fXo1pI6KJVKsYkXhG0ss5eTgYYyCdnTExcYpcbVpZMB8SRspLW1Na2urkYLz8Lk7TnIK7qUSiXNzs5qcXFR1Wp1iFp49uzZaPl4Dlx3e3t7aKEhZYS76gQM0id4IFh7ngHla9KgXQvuK8+BZ+2laSxIpVJJMzMzkcN7knIqFdO7ukm9TuydTmdoDwsvC8MqooQOljiSyw/XkAbEdtwr3DNJ0RVlItMwiwni5VjA/IeHhzGmgnu6ubk5RFpn8XDmCznbZrOp1dVVrays3IYGM3mxjtVqNW6/QG4R74HFY3JyUjMzM1pcXNTi4mKsOEGBJen8+fNx8rN/pVfSONpNjpOxOSk+XSnjfXU3NjbidwQmAHCVJImmpqYiewgX3rsV0Jdoe3tbGxsbrw3mz6tN2u22XnjhhThZX3jhhdgJnY52oH2egvAeNU5HgxaHQuNGUfVA5QWuF2ipEwWIazqdjlZWVrSzs6O5uTlNTU3F1hggilLPcqOUABYOyIBmAqbQlQ7rurGxodXV1ZgjRYmJXbHWWHUsCvQ/QCusXLVaHXL1fa9JxuVtUJyZ5HRErCxWUlJ0nd2KuvI5c8cJD77lXrFY1P7+vq5fvx7bf+7u7kYUGaX1RfMk5VQqZrfbjYWzkoZqByFNU40vaejLx51DmUjCb29vR2QWlxH6F5xOjzuZQMSMTFLewzICpNAkGstGU2JcMUq0nIDAeXBVvT/srVu3IueVfj30/cGKYTknJib0yCOPxPIvOg4Qi7MIoQDEvjy/er0ey61g8gAiYVWx9HQUWFhYiDxeLDb507GxsVigjUvsCwuLS7fbje795uZm7MsEakzM22g0YkzM9/daaC3yqhQv+1pbW4t5QEnxi3O301dyFIlJA0GauMStqTRwvbiG50khvUuDqgo+T9w5OTmphYUFzc7OqlQqaX19XUdHR1FxnflDxQr1iSiBu6sALQ6sYOVYCAqFQky5gBh7G0vQZ6yzK8Uw++gxPf3009HtHB8fjwsECxbUOLrz0RXC2TlYaGLXRqOhra2teC8hBM3MzGhhYSGOc3t7W9euXdONGzdi13VCmHS/JO+uh5t+XDI/P6/FxUV96Utfuusxp1IxcTVRTFZOcoKAFKQOQAOdcueKubm5qZWVldjZAOvnsSmxmTeywi1FQYhdvWCZdAs9XykRI1XARIP+h8UnBUR3OO4Dl5yJ6deh9ysoL3lcJ+7D/vH2HygXMSgLAyklGl0T82GNOScuPQgv1hgvg2eGVwAdkeeARaVfLfnCq1ev6ubNm7FLgvOHy+Wypqam4l4whBRHR72tE45TVldXh5qf3UlOrWKy+Sv/Az5IihQwUhwQv31nZC9X8naHxFkkzZ1jivJgNbE60qDaAQvGJAF59RynpxZw26TBtvSwhmCyEO85GkxRNPuSzM/P6+zZs5qbm4sWUxreI4S/eV7EvDwvlNwJ95L0la98ZShNwwIEOwgKHZ4E3gzPApCJZ9BqtVQsFrW4uKgzZ87EBfTo6EgvvPCCVlZWYj6yXC7Hrn18jw7k0XUe8gWUyAfJ/LmTvBS4dGoVk1SBpKF0CKjd/v5+tCqzs7NDyXHyX84JxdJhcaGHoWCesPYYk9WeRPjR0VGMz1Bc7/0jDRpxYVkPDg4isZsf78g3OzsbAS0WClw2lPLcuXOam5tTt9uNNEDiPvKj8G9ZxMgR4gIDtBCj9hTtQ3rmmWciCQDXEbCNhYwFxLsgYIFLpVJ0XX3xCiHEhYvnymI1Pj4+xLSClsj4eQ6Hh4daXl4eajo2MzOj8+fPRw71ScipVMxCoaDFxcXoSrqLBRoJ+EPcSOoCi0fVBfGQ96Rh1yjSMgBLDuZ4o2c6wPN5CqzJF3p6p9sddFvHshFX4sa5NZ2ZmdHZs2djUTQAh6ToTuZyubivJDGrJ9nhuvpGS1ATYQQlSaKNjQ11OoNtHgB2SL3galYqlViCRaUHCwo81UKht+M0CwplZVIPA1hfX4/uPfdFOsbJ4bis0CI99eIbLBUKhfiMIHycpJxKxaSaf35+Xr/5m9Jb3/rWWKvowIjXCLK6s9KOj49rbm4udlnDrYV2R04Ud5SVmt6m5POwpOmaTBgovokQysjkduK9NFA0rAQxs7NcnOiOZZMU0UiabFE4jdXG+jt3FrfWe776ooEre+bMGc3Ozsa2JSxOsJ6oe3SwDUriuXPnhjrQO5LsyCkK1+l0hpp08cw3NjYiKX5xcTEWins7SzojeCHDScmpVMx8Ph9bTkq9Fv4wRNbW1mKzKQckmAjEi/wNOkrOE8u6vr4e9x4BCPJyMY8LsYJexIvbyuKA++rbNnilCcAI/WWxWjBciE1pT+I1lSCypF6IUbG6xNTkL5087t0Q2D7dFyhJesMb3jBU33h0dBR7H4HsOkqKC9psNrW+vj6UhqHNiDd6lgbuPYsUcSOeBmMi/GBhcfYUi4ykTDFPQkh2O4n94sWLsVfP6upqTEl4KsDdTl5vt9sRlYU2Rm0isYy7t+yPCarJik0HARSWfGe5XI41k6RNqHghh5ckier1egRzcK0Bt8i5gmKmq0qIzUA2UW7aXMLuoeUJDbGcu9tut2P9KDEfSjIzMxO70uMB0PgZAgRIt/NhUWAQWe4dK8qiwHODdLG2thbdaknx2N3dXa2srETPyJlM7iqDL5yknErFBEllVXz22We1tLQUaVu1Wi3mwTyniSuIQrRarRgHoox8od4DFveVSQdk724gsSQACTEavW/q9bqmpqY0NjZ2Wy0klRuNRiO6qc4CAn30UiZcVm/u5cR0mELQ55ylxNg7nU68Di7q1NTUUK0jzwK3kWZc1FNSwMw+oDwj0FOeOdf2+k2UitwvIYKniPw5sICAmnsFDikUz20el9Dm5Gtf+9pdjzmViskEYnKzirJjMhQxvkQnF3h1BZbOq/JZ9XkNa0DCnAmLshFT4s6CxnplC1YYlg0TDEXmmGazGatccLGZdN5Fz11a31YvXTPKWOGsrq2taXp6OlbB8HwYNx4DC4eDPyg9lD4ssqRo4ek5hLvJsdSPdru9Js7r6+sRWYcyCGgEmR9eMfeF0nF/fAdYSjpCsKks3sZxCM/9xeRUKub4+Li+6Zu+SfV6Xf/lv0jvfve7Y4pkd3c3InvO6/ROcqCEkmIahdUbhaKyxFMEVNnfqdCZ/7GoXI/JRLwFAsnKjtLShJoeqlhS0g0oAm5sLpeL7i8uOnEbMS4LBqgqbit1mdwnpWfFYjGmUACvpN6mQlisZrMZC4XpzUMaZ35+PnbfAxi6efNmLD3j2dBeslgsRhIERc/cXwghEkVAtXkuhBDQMvF6YEgxB45LnHd9NzmVilkqlXTmzBldunRJkvR93/d98QsDwaNoFmWEzcOX1mg04qrqvXgAgZxUnWbO8MOqjzvl6QvfkQveqzTYMtApat7xzbvoARaxKExPT8frYlFIUfDDokH8TYw8MzOjM2fOxBQHyCh0PcquqGl0xPm5556L7imc1omJCeXz+Ribk0PEsnvqhOocYskzZ87o7NmzGhsbG0LTC4WC6vX6UKkXaSieAUAS593a2opdAh0Qy0jsJyC4XJDYm81mrMav1+u6cOGCHnvssVgVQnqkWOxtgrqyshJ3WUY5ve+OW8W0S+yJehSDvJtbKKynpJi+QQmIjRy8cVaON7miznJjYyPeX6VS0erqqm7duhWBDrrkpRt2AVgdHBzo+eefH8pxlkqlmFqhXQefxRWVpLm5ObVarbhISYpkdgAuCr3ZXBZrilsL4gpKjJXP5/O6cOFCdO/h4XqHhPHx8aEFcWtrS1euXIlAFSAWsTHP9iTlVCpmq9XS8vJyjKGWl5djThBiOaAJZG62Ouh0OjFpzz4czlFlxaffD24oaKjXFzp3FWtH7o/tGSgVwwrDVU0XZJOTpJwJEIqJXCqV4sSD7O0F3pKGXF7i1UKhEJXA85qgyl4EjiJPTExofn5es7OzkqSpqalYYoYi4lEUi8XYLRDFJJ4GkYaYLim6t5ubmzEmhRfsYQSxcLVajdRB2qWsr6/HrSXACggDCAsedGuRVyqnUjGlXtzIxi5Xr16N7mQ+n48Tq16vq1AoxHIhlJcvdHFxUdLAvXSl9gLd+fn5uAfI6uqqNjY2ojVzYIY41YEkJhvpDRQOS8nK7m1CsNqekwMIIbc6Pz+vRx99VBcvXow1iuRcPS51ihsUuL29veiCe8qHMUCIgDVFzIk7yn0RsxIi7OzsDOVCy+Wybty4EdtSsq0CoBrVN6urq0PN1IjB8QYgJpA3BWwC+GLMEPkrlYpeeOGFhzof03IqFRMFxNXygmevM7x582aME3FTUSBcOUAgFIcJ2e12b9t2AESR5tEkvpnkxDis2N60GUuZrtDnGt7KA1qdv+7Ee1z4druthYUFfdM3fZP29vZ048YN3bhxQxsbG3HiAkCxF+XCwoKKxeJQW01caKyzpLhZrKTYEcDrHPES3CX3/SuTJIk1qMTt5FFBw5MkiVRCUjZOZfRNdAG3eEY8a+/T69srPP/88w9pNt5ZTqViHhwc6Pr163GCUsnOl+SADKswbg4rLOgooAzHeLkXCs3/TBZiSy8ORiFhGUErw22ETgZA5VsEcD5XDrqbAwZJg64A3BNuL1zas2fPxr63m5ubWl5e1s7OTvwclTaShpLzWEZP9nvZF8/TNz+iUobFhR94smx6tLCwoAsXLujy5cuRFsk95PP5uAESCwTuttMT8SqcewstkzHR7JpzP/nkk8c2/4jbX6y87FQqZqfT0a1bt+IkW15ejqRlXB3cQlwvXMB071nylCiydyIABPLJAYgDWRruKdbWz8XEB6hytxUriMtJesLHhAXy6+Omsn1ACEE3b96M7u0jjzwSKy6wLr6fI5ZpYmIidvOD9cMxTkaQFKtOYCbhPaDof+kf/kNN94n+r0TOSnr7K/xM+8IFbX3+80Oejy9+oNwXL16MrvyDFhbhF5NTqZieQkDSWx04P5XqDTqlA/psbW3FyeslYd6jBrcU8IeEPAoPpxSrw3l9N2VHa/mBuYJbOzk5Ga014wD5JOHvfWDZW3Nvb0/j4+MxxeDdCs6ePasQQqwskQb9Z7vdbsz3Tk9P6/z587ftDg2o9Nhjj8Wd0aAG0hDs4OBA09vb+r/+5t+MLjNd7NOUO7wZaVCziWUnPMCtdVKD99z943/iT+jq1auanZ2NY/VmYCxCkDmOQ45149pXuwBqSIqTnFpCbynpHdclDaU5UBwvqsYiwSYB5sdiQWPztIY0KM6WBjtPcR1vVULfIVBaH4OkiNzC3pmeno4sJ9xfPkNSHqtdKpW0vLwci4tRuLGxsbhtoMeqLAoQM3iWUO6Iv4vFovb29nTr1i2trq7GPS59O3jSTgBELEAANXwHuIGutPQHhipJuIEi01aETgown1gYSJ+x6MLRPUm5L8UMIUxJ+peS3iIpkfTnJT0t6T9KekTSFUl/KkmSzdBbfn5K0vsk7Un6c0mSfO5+rn+v0ul0hiYTQAz5TCaeWz1QUFxNAJ50jpLVluJlOudh+VxJyfn5317Fwg8sJFxS4jLiPCcsOIjk1ufg4CBWztypGwFxM+Vda2trarfbOnfunC5duqTp6enYs5Z7BrjyJsqcb21tLd4biK9v8V4oFDQ3Nxct8MLCQnzGcHdhQHkFjrcH4ZkSn3I/xIqASFyPhXhjYyN6ASywjlhLOtEt+KT7t5g/Jen/S5LkB0IIRUlVSf+npF9NkuTvhhA+Iukj6u0y/UclvbH/822SPtr//dAFy0Mec3NzM67OvI/7RDwAY4SSJ+8857xNb3/oq7631GByeamRWwHOKQ1IB7jZjAmAg/8BfbwtJrGf5xiJPbEquPQhhJiywBXGwrLx7uTkpFZXV2PbEp4V7j4AUXprQIgEDkpB4+N5TU5OxkWH2DMNuIEo44qzAZOkuAD698ciAW+Y7/tLX/pSRF9BeOH24omQNz0puWfFDCHUJf0hSX9OkpIkaUlqhRA+IOm7+od9XL2dpv+GpA9I+pmk53P9dghhKoRwNkmSpXse/T0KExahMZLHlxQWS4quFJMYF/Lw8DC6utIgl8ikIHXg1RGO3rKak5Lgmo5scn1vG+LNrNJW23euIr/qBc3SoN8Mn2X8jJP4DPez2WxqZmYmxonlcnlIWXhuIMrsd4J72mw2Y45wYmJCkmJFiVehOEUObyZNRYTPi3cCKOYUxDTjivcY5/PPPx/H6veA5wFr6CTlfizm6yStSvrXIYRvlvRZST8uadGUbVnSYv/v85J8m6Pr/deGFDOE8GFJH76Pcb2k8GUzKSB1Q7RmIjvf04uPiQs97+Z1jV6QizXDepD8x+qhHCg4FhCygqR4PupCW61WBJMAO5xE4OVSsGgAT3B104AS7h+vwzZCicnrku/l/IQF5HFJ33j1BEo9MzOjdrutW7duxfvlO7hx48bQLmReTgYZBK/Bq3ikwTZ6LFrOhOLeUEBpgBp7iCINthNsNpva3NyMr5+E3I9iFtRDq/9ykiRPhhB+Sj23NUqSJEkI4RXdXZIkT0h6QpJe6WdfrqR70iwsLESldCvpsWe6ZhHl5DN88aRSUEq3Trik3W5X4+PjsdVl/15jjEOOD36p09+wplDfvMkVcZW3C8G19biWOBZgiXHh7qZ7A+E2uzuO+47FPzo60ubmZlRwiOhSr13j9va2bt68ObQgsGmSpGhhaTVCasprSt0jAbBBCXmfFEjaQ3AAj3alION4DDz7fD7/infneiWCEXBvIS33o5jXJV1PkoRM7CfVU8xbuKghhLOSgLduSLpon7/Qf+2hS5IksSZQUgQJYJjgrqIIPEjI5+vr63GC4H6yCkMC8HSINKg5dNL52NhYTF+k3UqviKB8CWtB/SPbIMCQgUiOgmPJAImInxyAwhOgQJzNdHEJife8/QZglC9iaZYTi4TUUzpP81AcTSpHUtwCwjfLxbXkOXqTMRYnZxvxnL3vLd+fNOioAMXS3VkWADyaF1Oa+xU2r/3qV79612PuWTGTJFkOIVwLITyWJMnTkr5H0pf7Px+S9Hf7vz/V/8inJf2lEMIn1AN9tk8ivpQGTavSKyrujVfvc6xXfHj6gnMQc6HQnl5pt9sxJYNrxThyuV47D3rceOUJSo3SEH8yhna7rdXVVS0vLw8xibBK0sBtx1KWSqU4VphG3qaDomPum/QF2wJiHSnJcuTU0xveIYI2LtSW0p7Fu9mBEjN+rL2nS7yAHMVD+VhseGacB3SaHK6keM8sPumUGBjCccna2tqxN3z+y5L+XR+R/bqkH5GUk/SfQgg/KumqpD/VP/YX1EuVPKdeuuRH7vPa9yx8GUwMqtePjoa7cDtU7zGek8fd6uKiSYppE6hy1FA6uELRr/cfwmqzWLAo4PahVCgO9+OWhVwi7iqWq16vq1arxaJwPosF5BnQBBm3EMQznb5h8pOcT9PgmOhzc3ND1EEvOnd3mfO40qGsjvK6+8n34TlOnhcxKa1XsM7UruKOk792SywNLOxJyH0pZpIkX5D0zju89T13ODaR9Bfv53oPSljRiSM2NjbiviC4TN7hDmvnltApXUwkSAm8DqPHWUF8DgAIhUPJ0pUhVO+nW2ikOalOYuc8xMFYJ5qMeXrFEWY/h7uBNKsipkzHrlRx4H7zDDnXneJZfrNAkNYBwGEMDsxIwx3MUSIWDo8bQbQBiYh/JcXnR4kXoBhAE+DaqxX8edUKbh1fHu4ZXzLWgyoNrwJhQjJpHHQAMGH15vO8T0zpRHkq7L2DHCs5yCc5z3w+H2NSqiFIyGONmIhra2vRerkl8goMFJExelqBMfjCg0WCuAAIA38YwVqx2KRd4PR9ShpSYo+B0xUpkCGcplcsFqMVxoPxRcdBJGmg0Cw2LCSg7N1uNy4UJyWnTjFRGt9ygI1zcF2xQo7sOeHZmTgoLAAN//vEdxAHZWXHZi/P8uoGlIlKCHeBJUWloGpjYmJiiAHjxGwsNs2/WGywatwLn+Fazlf1fCUWlu0Bp6enY0rCmzg7swplYmHy4/1+YBA5DU8abEXv6atcLhcVEvEYldiZe/YYkngay81W8CwIcINPSk6dYkq9L5VUgzRMh3MX11uFoGiSotIA0DjaiDVFIXxSu6vmFgZrgUKQ30NxW61WVAA24EHZfasEOtmtr6/HGkiQYyY8yoDyO0PJwRPuM12i5QXRLCr00KH8ilSH5zJZ6HgOeBzE1vV6PdIMGYuzo7zoAE+AhRAldTQX99aVkUUWVhRK7psxkYIhN31ScioVky8UuL9Wq0UQxl1PABhP2DtIQcqD8wD2HB4eDsU+bhUdXWTCcR1cZhSdVAikBlxIiOlUgWBRAa/orepbz5GP9c1micfSLiVjcFoiMRgLB2Ml5vUds0GHse7pVh1O5uDZLSwsxO3qiXe5nlPsvA7UASVf3Ny7ceK/hy7pZ88CSFMvQo8HUfZVLpd17tw5XblyJVrkl5JTqZgoS6PRULm8rF/4hf960kN6zcrU1FaMR1FqSdGCIxMTEzEOpC8sriYLohMMJA1ZVWlQCO7sK+JIkFppQCRwF57FgK4WjgLfr1QqFf3BP/gHtbS0pDNnzmhhYUHNZjPbuNYln+91VRsfH9fe3p7e9a4fvI0tg4UECHLWC1Q74HfP1wGe4OZ5+VKaBiYN8qdYZY97nECPS+2gEACSE9cd8fR415lKTrZ3VJl8K/dFzSB9WlEIQDEnQJBu4HpeetazvvkhvrBX2eA6O8CWpjeisO7mM3Z3iwkZAH5ou8lGtxTGe64TL0dSzF+nEe4HMeeWl5fV6XR05coVXb169SWBpVOnmCEE1et1zc3NRZ7nxsaG1tfXIyDDak63cTraoWwecwLYMIGY4ExkjvOu4F5algZaPA51a8BkpACaiYj15zhAFwAR3F3G6nt7OqDDIsHmQHQ+cAvE/ZMrBdnkHO4244I6AuqIKe4343QGlJMl+IwjtVwLQRG9VA5gCqX0wm3uCTec50/4wjN8uW7nS8na2pp+6Zd+Kf7/ctDeU6eY3W43MmWkwapJbpE4g3jH0yGs5g7NpxG+NFMIC0kc4wglCuqxXj6f19TUlKRB2oExOOEdi5DL5WJLRpTBFwbiP1w7zznyw6SnvSOuHouGWyZ2FvMu817JgdUuFApxpzTu0YusWQiwTLiru7u7QykV6IfEh75YMB7OiVXmeI/xXTGxznyG5+opHu77pOTUKSasH8AcuhR4W0Nyg9IAzcN9lXRbqiSdb2Ti+fmYFOnEt7uF5Dk9LcJEx90kTsLt8soVlAkL6WPwNAyTDouNpcKFph8sribHkoKAGUPqKI0ys5BAdQMVxgNxsj3PmeZozWYzAnPFYjE22vZFhOeD++9W2vOePHfatfh3ym/AKhYzXuf8JyWnTjGl4Z2UPbbDXWTS475Kg54+zv7BdWUXK2equHX1dIz3UoVcwDiwnF4lQVxG5QeTzXcI889yLkrGfB9I7znk1hUlxvqki8I9CU9Jl3f4kxTPQ3UKz6xUKsUNfsk7kjNMb65DmFCv1+Oz8fI5L2R2vjDX8VgTeiHd9OmeICl6HCieKz3/+3d/EnIqFROLSHoEK4RLR9c6ABQndPMaExsrysR0xoyDPx6b4jp5qoIJ7hxPt8xMNi98dkvGuDgWsMdZOVTxY71J6mPhyO+5S+48XiwcgJCk6I4DSKU3j6WoutFoRMVsNBra2tqKz3NjYkIf/emffijf/Y3+PXrsyoLL/7jBfCcnIadOMckR1mq1GHc48doheG8k7F+SKxO5Lq96QImcoOD7jUgaciFJzDvdz1drL9XysXrch2Kw2tM8i147xGSOBDMWR44dwMFCpSmIjNPpft4TlzFjeX2B8k4DXO/v/9iPxV22vC7UN1ZyYj8IOjxXAK5Op1cbyrk2Nja0trYWaz1jrax9hx6mcC8nHV9Kp1AxkyRRs9kcStzD/AAUcBfJXV4UQRrwNl35QghR0bFmDv3jkrm1w0VEgbCsXE8abFjkDB0U0ycsyoV1T1dqOJ2N83uawZUWBUO5SVm4q+uUPkCmcrkcGyh7mw+vvAFokwZb3XuaAwX2dA4LErS+mZkZzc/PR74wSgglEaKH72nCeHH7eTbuueBVHKcbm21cewfBfVtfX4+TO205UAQqQ1hJpQEY5GyetAvpriMcWiYJky9NfUu7tr7BEACQv4fbSWohbQEYt3N3fYKjHN7+hNQQJWsoD/dJemNra2sILSWWZrt6aHl0aECpUQgWIbrS8yx49l7j2u12h3r70qh6cnIy0hBp3MUCA0pNysPRc5TPz1+pVOIual4QflyS3nH7TnLqFLNYLMYvFaSTOGyQEA8xgc9kAUxxmB4F8HhSUiQZcDzKj+V0Pq2kIUXmPCicWzlcN1Z6R0UZM/WIKALHOL0N6wFhH1cYcSVyS+VUOPiwbt09PkMhcrlc5KFyjBcCuMWVNPQMsc7kQFG87e1tra2txRpYFtv9/X01Go3YPY9FQRr0BcICez2nA1heUHBcypkuEr+TnDrF9JQGLpMjlcSW0gCtSyeaUTaELzftKrpLjPDFk0LAOnjceCdGjoNQjiZ6PSGfIy/rOVbSNumiYy/RSqPO3AvW0UEST3k4J5VKDSpouAYTkfFCAuA78dyud/rzptt8D74IuAfCYgYwhVvtPFtvsObhhKOwPIOXQwQ4Ljl1ismE5EumDs8nq6/k7uoxKX1ioJAOAAH1cw0vA0PBmDy4ml6si9LiXmLR3f2UBk2ySGOw4Dg6jFVvtVpD98lYUELGD9gDSOO1o+nJnQajHERyi+RxMe6jp2zStaHb29vxvFhX58p6vpZnvrOzEzsWOsXRY1TGCPPJFzu8jPQCeVJy6hRTGqRF0nQzL651Ohx/e6zCpHHwQBok1/3LdsFdxfXCnaxWq0OEa3JzXhDNpHQgQxo0XZYUlckZPc6w8a0dXLhntnCnPxFxJkpFfEh+loUE19cbgTlxwrehYNHheaGgfM7DCgeyPFZ1KwdA5bFwPp+P52PBIrZ01xsldeIHSl2r1V5yj5HjklOpmCiOu3i+wgLu+IrK/97lwK2El0g5aioNdhBj0kgDN9lRWVxXzsUGt1hKFgSPx1ggsEQk973WNJ139W5zjMXBIp4HJAJpYJ3dtXbLTVoJS+RjYjFzr8PzxCxm7H5G3EwsxnN2C5iOSx1dZvHhN+KF0L6YObgmKSK/LEonIadOMXGVnPiMgjoTx1MaHjN6Dk0abHrr50qDBq6AaZSQSUnrxsPDw6G0irul6XjXFTWdp3Rmi1txSZFS5wuPs4jcGqUrRSQNWeJ8Pn9HBfIqFo8B/R5cMVFkV7B0eogx8F344uILLM+OxZbnLQ1vYej5YX+mUAFPcpuEU6eY5XJZly9fjolo4i4sgMc6Hs9Ig5203LpIA8vFMUwCB208n+juqDRoWyJpqKJCGsTCTmhw604c5PGQ514RR4LdmjlazKLBOTy14l4DY8WCukVPKyb3m3bDuZ6T3x3sQpnwUOgb5F4O341X+7giQ2B378Z5tzxfz7WC2nth9YMWXPrNzc27HnMqFfMtb3mL2u22bty4EZUzzQJyIOjo6GgoZ8ek9hWdlhS4R0wct7rOZXWX0vOgTGKujSVjMhH/OSCTttAeE0saWmiY/L4fqAMhLATEgiCoxI8QAvy6TjekWZd3cvdnyn17nIp4Lhir52QBLCK/0/Wt7o7yOZ4DbCWeI+dhQYVsQqyftqQPUqjQyRTThNVzfHxcFy5c0MLCQgRi2J4NChdKwoREKXDHUBiUkPN4IS+WAzc3ndd0l8oV05WVyeUWEuVw9BOLTnzlXeOkQSzmhAp3sVE6X6Q8liRX6kQIZzLxP9fwRQu32K1oPp+PJAP3Jty99pjWO9uBVuPxuFeC9eS7QtkkxbIySbeR47lPFtDjymM2Go1YtH03OXWKeXR0pKWlJU1MTAyhhp7OcMDAEVxJMVnvtYocJw2sFRYCK5SO+aRB71fyeR5fYaEkDY0PZXIQiM8y2V35JA1ZB87r7qCnOfgc98GxnqyXBvGtLzxcA3fc00c8I19AJEUF49yermAhAvXlnBQgoLCUnTmIh1WCnJDuE+z3iPdBR8I0eHcScuoUs9vtxnxXuVzWzMxMVFKvrqhWqzFhDQyPwrkwIbwrGyux5/gcmAAIIf/I6klMxUrPhL8TGOWpElxorwn1psncN0pG+iGNRvv5yQ22222Nj49HK+XIrVt3LGQ6HEiSJHYTwFo6ESD9PDnGvy+srtd+soj5syV1U61WYzE3n/WYk0WpUChoamoqHisNdoA7aTl1ipkkSdxQRpLW19e1t7cX+adTU1PK5/OxYTLiq7ikIWslDfKDXjLklScOyDhYgoIgXueIZeHaMG/cJSQOZaLh6roVI89HDpIKCiycx5FpwjmK4SkiLDiWzN3udC7YN8VFGR1wcZ4wSpNWfkkxX+megTRcGYPFg2CPSw6YhAdTq9U0Nzen2dnZGA93Oh1tbW1pdXV1CI0+KTl1ignCR8zChKDBr7NR0hPPk+xMsDTY45bJ3WNcLmlA2QOUYAyOTrLCe9sMjy+5vpMN/HrSwFWGc8qeJlzP0xzSILeKoLjuinI959qmGTZU7jC5Dw8PYyzpSDWW2ythiO+4TweaeMZOxWOBgqSRy+WGKIB4EzyT8fFxzc/Px8oUzkHpWqVSGdrO4aTk1Clmt9uNVe1A4xMTE3FiYbE8d4gL5aghSiop8jrTVhTlTBPC3a111okjsP4aPFUmmLuUnn5hMqKwzsrBSqfJ9xzv6KRbT3dXvZIEwIaxYkWxyt65z8fLOFkQHBlNKyDPyONzCPdcz/eVabfbQ21i/Fx81/QhkhQVcHd3N9ZwOok9s5gPUTzmkHqb2WxubsZJyGqNNfPYgyZTnh7wicvn/Ut1S4myO7jgDBannRFnegUJcaSTIbwZldPtsJ58No2KoixMfixVp9OJ8TIKzeLBfXFNV0zcYUm3WXeO45oOJjl5gGfi+VBf2NyF9YWJZwsSjTvsdEBidxYlJ+CzizUNsh3cOw7h3tLxtcupU0xpgFK6hXJ6GcpEWROcVdwk0D2nmDlDCGX1uMbdWK6Dm8jq7sAOJV9YWCZbemNc8qe4bP6Fs0ig4LiPbg2dhYQ34FYpTSpgYZGG3Xd2x8JKe+7X3VaetxMBWLDSHoWP3YkXxLmSooX2uNjTVCyunhJhwaLNJwsdCxqKfFyUvLm5OS0sLOj3f//373rMqVRMd1FYrZmoDpN7DtN37/IKEi+5YjJ5PMrkpxICq+gIr8d1vMcYfDISg6XZOWmSgedBOYdbcqweiuV5Vywa1MC05fLzYy19awkWixBCVArykelY1lMwaVCHa7uyplM2njJyC+2WEs/GG4u12+2hHbdBjmH8sLAeF/NndXVVq6urL3rMfV05hPBXJf0FSYmkL6q3Ge1ZSZ+QNCvps5J+OEmSVgihJOlnJL1D0rqkH0yS5Mr9XP9exN2mNDcT4W8mdqfT6yWTZuQ4G8e5rGlUkQnHJMGCunK6FXRmkceaTHiQTp/Ukobix3S5lFs8d7XdHb0T2OXuvXNq0yQFnhuf4x739vYiCOQxpBPkXRFw4T325NwoK4CRu8zO1vJwhF5EMzMzsUMBeVNQXEjr7DJGd/bx8fGh+3uYcs+KGUI4L+l/k/SmJEn2Qwj/SdIH1ds1+h8nSfKJEMJPS/pRSR/t/95MkuQNIYQPSvp7kn7wvu/gFQqrLvGZNFz0CzLnAIxPTmkQN6aRUCat82g93eFxmyfmmbCe8EapWDzcuiFp14/FghgJK50GrpwNxHHj4+NDMS2WyN16t3gojrvvLFTp/KYvhnzeWVC+MHIPPBNvX4mCs3Dx3fj1x8bGND4+rrm5Oc3NzWlqaipaSTrzAQb5tff29rSxsRG/Vw8/TkLu11YXJFVCCEeSqpKWJH23pB/qv/9xSX9LPcX8QP9vSfqkpH8WQgjJQ16O4Fem3SOUhElLzOYEaVwgaGDEnAhKgZIxuYgPneDOxPIKDAqMHS31rm24mn5uJiWW1ie1pCGlR7zqwpPxTMa7kRA8H+vK5JY//dzc9ae0DUvHMU4q4FxYO3flOcbTTcSPKOXk5KQWFxc1PT0d78kLFiRFK0loQd9ZFgsU/6Xafxyn3LNiJklyI4TwDyS9IGlf0i+r57puJUlC3uC6pPP9v89Lutb/bDuEsK2eu7t2r2O4FymVSpqamhqafExuJgkWDiuIxZEG/Vo6nU50G6WBO0YMiVVGmQAW3EVl0uHW+XU9heGxHUrmltFrL13c0qJc0mDyYxnb7XaMrYip02VUjBtk2O8bhWHRIG5j/FwDq5Z+zxcaT/s4Msr3Q7tKYnvOXSwWNTU1pdnZWdVqNR0eHmp7eztaYNhH4AaARq6U/vzcTT4JuR9Xdlo9K/g6SVuSflbS997vgEIIH5b04fs9z92kUCioVqvFieeTHEXkC/Jkurt2WB+H8N3aet5OGi5EdvfN+/1wfp/oTpfjPBzvvFcmvFt3jsOF9gbGLCK4qAcHB3FHMGclMV4fsxMK0sQI7hHX0V1bPuuLVpoOiML6oujW2VFW8qQeR05MTAzxYp0EkiS9tqXsHcoCynfsKDXf16vVlf3Dkp5PkmRVkkIIPyfpOyVNhRAKfat5QdKN/vE3JF2UdD2EUJBUVw8EGpIkSZ6Q9ET/nA/cza1UKpqcnIyoo3+BrNas5kzo/rjiRPHGXUwkTwMgTD5HVN0aYpEkxdSCW2lPZ/SfRxyLu4+ck+4HDgI50MQ1uE/ccMZCbWVa4Xjf61Jp2IxCcX1ocB4HIixkHtt7GslRY7f2WMparabJycnYt5YG04zZ87jE061WS41GI+YqHR12t93HyhhfrcyfFyR9ewihqp4r+z2SflfSr0n6AfWQ2Q9J+lT/+E/3//8f/fc/87DjS0mqVquanZ2Nm83gDpL+CCHE0i0sGa9jUdJunEP26RjPAQYUyduLcB7PNXqlhFtozofS8HlfAFAkB2E8XnJl98UBC5G+N8AhR4dRPlxNd6lRWBePlbHGbh25lsewUCJrtVqkE2IpUVBiY2f8dLvdISIBikftp1Mo3WIzDiwz8etxyPz8vBYXF49n49okSZ4MIXxS0ucktSV9Xj1L918lfSKE8Lf7r32s/5GPSfq3IYTnJG2oh+A+VAkh6MyZM5HLiWWhpQcF0wAD6dVbGl7ZURBcJaeC+ReeRiSxjD4xPRWBoNR3sl5O6fNJ7TnS9CKChWKiYil8MrobjavIOf06HMMYIAB4OODgGt4C58UldiDKqYMcx36eniLC+q2trUWUmTiyWq3GMjpiz2q1qo2NjaH9UrgH7zrI98NnX4yZcz+ytram9fXbnMUhuS9UNkmSn5T0k6mXvy7pXXc49kDSn7yf692vsGLzxbOy7uzs3IZcuusp3b4lXTrWlAaIbzpW8mN5z5FWVzhn4aQtiTQoi/J8oE8gJx+g2I7gMl6/D8blNDYvISPe5FgHrhgHbKi05eEYP9Y7J6Cknj7yjYn8WbsScT90TQCdhTwvKZLmJcU0WKczKGbnXrCuINsQInwnsgcpvtDfTU4d84fYygGEcrms7e1tdbvd+KU5OORMHAds3EL5ZHdlwDoCNjhw4pMYtDMNIHlaBVaLE7vTyX5+p60VyoAicI10Yt7zki5pL8AbODt66akeZ1P5vaOUuKK+KJJeQnxxxLXm/l2p9vb2Yu0sljOXM/6wBkrupWC+AKOQeBWeCnvYcqoUM0kSXblyRQcHB6rVasrn80O5v06nE2MWrESaAufQ/50YPm4tmQjkC/nyXelQljspr09Ecnu85pZBGsRxbqlRGP/b0zK852AJSsK9cf/eW8fvGaVzb4HxpC2ou6koYJrdIw24zA5IsWjwPCuViiYmJiJ32J8Xiw/7rOw193TQOoxWkOd7eHiobqejTrujvYP9oVrYdnuwE/dJSDipC78cOQ5Utn/eOyKofLFucRwZlHRbbvFOVQhuLdPXw7I62HInEAJXzS2co7vScIsSFgqP5dL3l87R4j14+sJJ+E6+gDQg3Z57xAI6od+tqFs8lBMPAeFeHZnlnrlHxuZkDNBrf27tdltLS0tqbDd02OoxljrdREedlpo7TXXaHXWTrrrt/t4x3bZarSPJFhz/ro9RPpskyTvv9MapspiIB/9peaXuC5PC/7/Tb8TZQem0AJPOY0EHSFxB+eEafj3PN7pr7dxUT/VwTiStYFhsgJN07J0ey52u7Swi75vLYkOczzndIsPYkQYkC7yPu+Uau91E5Wpef+j7z2hnu6ud9UO95W1lffG39vX1L+7pO94/q+ef3lNtvKhvfNuUrj23r8//5lqP9T0CcioV80FK2uNwF/FOcj8czLSS30k8BZP+HL9RfreevO+AkTQMgvk50ymcu+X8XNketpx5pKAzbxiTrib67K8ta+F1Z7V+q61iLaeLbx5XdSanWi3ol/79TdWnyi99wnsUQMdXsuhnivkqkpcTdrwc9+thckBPMlTab3a1tdpWrd7RmUs1He0n2l5t6Q1vm9LBXkcLZ8va3e3q0bdMqlwu6tYLe0qOwWTOzs7q/e9/vz7xiU+oXq9renpaBwcHunLlyl0/kylmJq9RCVp5oa0vfGZLR0cd7awfafVab4/O5eebuvqVHVWqQYmCzr2+pmef3jy2RQTQKpfLaWdnZ4hddtfRn0bwJ5PTJEEvHTi+nGPuYwR9IC/dE0ovAv6cHH0+k0weirwchTve9R+g6pVIppiZZDKCkilmJpmMoGSKmUkmIyiZYmaSyQhKppiZZDKCkilmJpmMoGSKmUkmIyiZYmaSyQhKppiZZDKCkilmJpmMoGSKmUkmIyiZYmaSyQhKppiZZDKCkilmJpmMoGSKmUkmIyiZYmaSyQhKppiZZDKCkilmJpmMoGSKmUkmIyiZYmaSyQhKppiZZDKCkilmJpmMoGSKmUkmIygvqZghhH8VQlgJIXzJXpsJIfxKCOHZ/u/p/ushhPBPQwjPhRCeCiG83T7zof7xz4YQPnQ8t5NJJq8NeTkW899I+t7Uax+R9KtJkrxR0q/2/5ekPyrpjf2fD0v6qNRTZPV2nv429Xab/kmUOZNMMrldXlIxkyT5DUkbqZc/IOnj/b8/Lun77fWfSXry25KmQghnJf0vkn4lSZKNJEk2Jf2Kblf2TDLJpC/3uqnQYpIkS/2/lyUt9v8+L+maHXe9/9rdXr9NQggfVs/aZpLJqZX73u0rSZLkQW7+kyTJE5KekLJNhTI5vXKvqOytvouq/u+V/us3JF204y70X7vb65lkkskd5F4V89OSQFY/JOlT9vqf7aOz3y5pu+/y/pKkPxJCmO6DPn+k/1ommWRyJ2Gr77v9SPoPkpYkHakXG/6opFn10NhnJf03STP9Y4Okfy7pa5K+KOmddp4/L+m5/s+PvNR1+59Jsp/s5zX887t3m/vZxrWZZHJykm1cm0kmrybJFDOTTEZQMsXMJJMRlEwxM8lkBCVTzEwyGUHJFDOTTEZQMsXMJJMRlEwxM8lkBCVTzEwyGUHJFDOTTEZQ7rvs65hlV9LTJz2Il5A5SWsnPYgXkWx89yfHOb7Ld3tj1BXz6btxCUdFQgi/O8pjzMZ3f3JS48tc2UwyGUHJFDOTTEZQRl0xnzjpAbwMGfUxZuO7PzmR8Y10PWYmmZxWGXWLmUkmp1IyxcwkkxGUkVXMEML3hhCe7m+38JGX/sSxjOFiCOHXQghfDiH8fgjhx/uvv+ItIo55nPkQwudDCD/f//91IYQn++P4jyGEYv/1Uv//5/rvP/KQxjcVQvhkCOGrIYSvhBD+wCg9wxDCX+1/v18KIfyHEEL5xJ/hy2mK9bB/JOXVa+j1eklFSb8n6U0nMI6zkt7e/3tC0jOS3iTp70v6SP/1j0j6e/2/3yfpF9VrSvbtkp58SOP8a5L+vaSf7///nyR9sP/3T0v6X/t//5ikn+7//UFJ//Ehje/jkv5C/++ipKlReYbqNR5/XlLFnt2fO+ln+FAn+it4WH9A0i/Z/z8h6SdGYFyfkvRe9dhIZ/uvnVWPCCFJ/0LSn7bj43HHOKYL6nUs/G5JP9+f0GuSCulnqV7L0D/Q/7vQPy4c8/jq/YkfUq+PxDPUYJeAmf4z+Xn1tvQ40Wc4qq7sy95S4WFJ32V5m6Qn9cq3iDhO+SeS/rqkbv//WUlbSZK07zCGOL7++9v9449TXidpVdK/7rvb/zKEUNOIPMMkSW5I+geSXlCvTeu2pM/qhJ/hqCrmSEkIYVzSf5b0V5Ikafh7SW/pPJGcUwjh/ZJWkiT57Elc/2VKQdLbJX00SZK3SWpqsDucpBN/htPqbYb1OknnJNU0AhtejapijsyWCiGEMfWU8t8lSfJz/Zdf6RYRxyXfKemPhRCuSPqEeu7sT6m3yxo8aB9DHF///bqk9WMcn9SzNteTJHmy//8n1VPUUXmGf1jS80mSrCZJciTp59R7rif6DEdVMf+npDf2kbGiekH2px/2IEIIQdLHJH0lSZJ/ZG+90i0ijkWSJPmJJEkuJEnyiHrP6DNJkvwZSb8m6QfuMj7G/QP944/VUiVJsizpWgjhsf5L3yPpyxqRZ6ieC/vtIYRq//tmfCf7DI8z8L/PoPx96qGgX5P0N09oDO9Wz8V6StIX+j/v0z1sEfEQxvpdGqCyr5f0O+ptR/Gzkkr918v9/5/rv//6hzS2b5H0u/3n+P9Kmh6lZyjp/5b0VUlfkvRvJZVO+hlmlLxMMhlBGVVXNpNMTrVkiplJJiMomWJmkskISqaYmWQygpIpZiaZjKBkiplJJiMomWJmkskIyv8PCGpyBKY+rYcAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
    "source": [
     "from sparse_coding_torch.pnb.video_loader import classify_nerve_is_right\n",
     "from matplotlib.pyplot import imshow\n",
@@ -241,7 +266,8 @@
     "\n",
     "labels = [name for name in os.listdir(video_path) if os.path.isdir(os.path.join(video_path, name))]\n",
     "\n",
-    "videos = [('Positives', os.path.abspath(os.path.join(video_path, 'Positives', '93', '3. 93 AC_Video 2.mp4')))]\n",
+    "# videos = [('Positives', os.path.abspath(os.path.join(video_path, 'Positives', '93', '3. 93 AC_Video 2.mp4')))]\n",
+    "videos = [('Positive', 'pnb_same_frame_362.mp4')]\n",
     "\n",
     "label, path = videos[0]\n",
     "vc = torchvision.io.read_video(path)[0].permute(3, 0, 1, 2)\n",
@@ -255,7 +281,7 @@
     "orig_width = vc.size(3)\n",
     "bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)\n",
     "\n",
-    "nerve_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==1][0]\n",
+    "nerve_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==0][0]\n",
     "needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2][0]\n",
     "\n",
     "nerve_center_x = round((nerve_bb[2] + nerve_bb[0]) / 2 * orig_width)\n",
@@ -271,14 +297,14 @@
     "ax.imshow(frame, cmap=cm.Greys_r)\n",
     "\n",
     "# Create a Rectangle patch\n",
-    "# nerve_rect = patches.Rectangle((nerve_bb[0] * orig_width, nerve_bb[3] * orig_height), (nerve_bb[2] - nerve_bb[0]) * orig_width, (nerve_bb[3] - nerve_bb[1]) * -orig_height, linewidth=1, edgecolor='r', facecolor='none')\n",
-    "# needle_rect = patches.Rectangle((needle_bb[0] * orig_width, needle_bb[3] * orig_height), (needle_bb[2] - needle_bb[0]) * orig_width, (needle_bb[3] - needle_bb[1]) * -orig_height, linewidth=1, edgecolor='b', facecolor='none')\n",
+    "nerve_rect = patches.Rectangle((nerve_bb[0] * orig_width, nerve_bb[3] * orig_height), (nerve_bb[2] - nerve_bb[0]) * orig_width, (nerve_bb[3] - nerve_bb[1]) * -orig_height, linewidth=1, edgecolor='r', facecolor='none')\n",
+    "needle_rect = patches.Rectangle((needle_bb[0] * orig_width, needle_bb[3] * orig_height), (needle_bb[2] - needle_bb[0]) * orig_width, (needle_bb[3] - needle_bb[1]) * -orig_height, linewidth=1, edgecolor='b', facecolor='none')\n",
     "# print(needle_bb)\n",
     "\n",
     "# # Add the patch to the Axes\n",
-    "# ax.add_patch(nerve_rect)\n",
-    "# ax.add_patch(needle_rect)\n",
-    "plt.scatter([needle_bb[0]*orig_width], [needle_bb[3]*orig_height], color=[\"red\"])\n",
+    "ax.add_patch(nerve_rect)\n",
+    "ax.add_patch(needle_rect)\n",
+    "# plt.scatter([needle_bb[0]*orig_width], [needle_bb[3]*orig_height], color=[\"red\"])\n",
     "plt.show()\n"
    ]
   },
@@ -287,7 +313,41 @@
    "execution_count": null,
    "id": "c18f383d-86a6-42c6-bbf9-3df2b3a86d8a",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "100%|██████████| 1/1 [00:03<00:00,  3.32s/it]"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "112.38454817660158\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPbElEQVR4nO3cf6zddX3H8edLqp3IUn5VBEq5ROpcCft5VrZMMyK//2BlilkVY2NYuiWSzBk367oMRFnEqJBFt6QRQ8OI6MiMNc51pY5ojFFuEQdFWSsIFBEKJSysGVB974/zxVzubuk995ze08vn+Uhu7vl+vp97z/ube+nznnPuJVWFJKldrxj3AJKk8TIEktQ4QyBJjTMEktQ4QyBJjVs07gHm4vjjj6+JiYlxjyFJC8r27dufqKql09cXZAgmJiaYnJwc9xiStKAkeXCmdZ8akqTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGjSQESS5Mcl+SXUnWz3B+cZIvdOe/k2Ri2vnlSZ5J8oFRzCNJmr2hQ5DkCOAzwEXASuAdSVZO23Y58FRVnQ5cB1w77fyngK8NO4skaXCjeESwCthVVfdX1XPALcDqaXtWA5u627cC5yQJQJJLgAeAHSOYRZI0oFGE4GTg4SnHu7u1GfdU1X7gaeC4JEcBHwQ+fLA7SbIuyWSSyT179oxgbEkSjP/F4quA66rqmYNtrKqNVdWrqt7SpUsP/WSS1IhFI/gcjwCnTDle1q3NtGd3kkXAEuBJ4Czg0iQfB44Gfp7kf6vq0yOYS5I0C6MIwR3AiiSn0f8Hfw3wzml7NgNrgW8DlwJfr6oC3vzChiRXAc8YAUmaX0OHoKr2J7kC2AIcAXyuqnYkuRqYrKrNwA3ATUl2AXvpx0KSdBhI/wfzhaXX69Xk5OS4x5CkBSXJ9qrqTV8f94vFkqQxMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1DhDIEmNMwSS1LiRhCDJhUnuS7IryfoZzi9O8oXu/HeSTHTr5yXZnuTu7v1bRjGPJGn2hg5BkiOAzwAXASuBdyRZOW3b5cBTVXU6cB1wbbf+BHBxVZ0JrAVuGnYeSdJgRvGIYBWwq6rur6rngFuA1dP2rAY2dbdvBc5Jkqr6XlX9pFvfAbw6yeIRzCRJmqVRhOBk4OEpx7u7tRn3VNV+4GnguGl73gbcWVXPjmAmSdIsLRr3AABJzqD/dNH5L7FnHbAOYPny5fM0mSS9/I3iEcEjwClTjpd1azPuSbIIWAI82R0vA74EvLuqfnSgO6mqjVXVq6re0qVLRzC2JAlGE4I7gBVJTkvyKmANsHnans30XwwGuBT4elVVkqOBrwLrq+pbI5hFkjSgoUPQPed/BbAF+AHwxarakeTqJH/YbbsBOC7JLuD9wAu/YnoFcDrwt0nu6t5eO+xMkqTZS1WNe4aB9Xq9mpycHPcYkrSgJNleVb3p6/5lsSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1zhBIUuMMgSQ1biQhSHJhkvuS7Eqyfobzi5N8oTv/nSQTU859qFu/L8kFo5hHmm83330zE9dP8IoPv4KJ6ye4+e6bxz2SNGuLhv0ESY4APgOcB+wG7kiyuarunbLtcuCpqjo9yRrgWuCPk6wE1gBnACcBtyV5Q1X9bNi5pPly8903s+4r69j3/D4AHnz6QdZ9ZR0Al5152ThHk2ZlFI8IVgG7qur+qnoOuAVYPW3PamBTd/tW4Jwk6dZvqapnq+oBYFf3+aQFY8O2Db+IwAv2Pb+PDds2jGkiaTCjCMHJwMNTjnd3azPuqar9wNPAcbP8WACSrEsymWRyz549IxhbGo2Hnn5ooHXpcLNgXiyuqo1V1auq3tKlS8c9jvQLy5csH2hdOtyMIgSPAKdMOV7Wrc24J8kiYAnw5Cw/VjqsXXPONRz5yiNftHbkK4/kmnOuGdNE0mBGEYI7gBVJTkvyKvov/m6etmczsLa7fSnw9aqqbn1N91tFpwErgO+OYCZp3lx25mVsvHgjpy45lRBOXXIqGy/e6AvFWjCG/q2hqtqf5ApgC3AE8Lmq2pHkamCyqjYDNwA3JdkF7KUfC7p9XwTuBfYD7/U3hrQQXXbmZf7DrwUr/R/MF5Zer1eTk5PjHkOSFpQk26uqN319wbxYLEk6NAyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDXOEEhS4wyBJDVuqBAkOTbJ1iQ7u/fHHGDf2m7PziRru7Ujk3w1yQ+T7EjysWFmkSTNzbCPCNYD26pqBbCtO36RJMcCVwJnAauAK6cE4xNV9UbgN4HfT3LRkPNIkgY0bAhWA5u625uAS2bYcwGwtar2VtVTwFbgwqraV1X/AVBVzwF3AsuGnEeSNKBhQ3BCVT3a3f4pcMIMe04GHp5yvLtb+4UkRwMX039UIUmaR4sOtiHJbcDrZji1YepBVVWSGnSAJIuAzwN/X1X3v8S+dcA6gOXLlw96N5KkAzhoCKrq3AOdS/JYkhOr6tEkJwKPz7DtEeDsKcfLgNunHG8EdlbV9QeZY2O3l16vN3BwJEkzG/apoc3A2u72WuDLM+zZApyf5JjuReLzuzWSfBRYArxvyDkkSXM0bAg+BpyXZCdwbndMkl6SzwJU1V7gI8Ad3dvVVbU3yTL6Ty+tBO5McleSPxlyHknSgFK18J5l6fV6NTk5Oe4xJGlBSbK9qnrT1/3LYklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklq3FAhSHJskq1JdnbvjznAvrXdnp1J1s5wfnOSe4aZRZI0N8M+IlgPbKuqFcC27vhFkhwLXAmcBawCrpwajCRvBZ4Zcg5J0hwNG4LVwKbu9ibgkhn2XABsraq9VfUUsBW4ECDJUcD7gY8OOYckaY6GDcEJVfVod/unwAkz7DkZeHjK8e5uDeAjwCeBfQe7oyTrkkwmmdyzZ88QI0uSplp0sA1JbgNeN8OpDVMPqqqS1GzvOMlvAK+vqr9IMnGw/VW1EdgI0Ov1Zn0/kqSXdtAQVNW5BzqX5LEkJ1bVo0lOBB6fYdsjwNlTjpcBtwO/B/SS/Lib47VJbq+qs5EkzZthnxraDLzwW0BrgS/PsGcLcH6SY7oXic8HtlTVP1bVSVU1AbwJ+C8jIEnzb9gQfAw4L8lO4NzumCS9JJ8FqKq99F8LuKN7u7pbkyQdBlK18J5u7/V6NTk5Oe4xJGlBSbK9qnrT1/3LYklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMYZAklqnCGQpMalqsY9w8CS7AEeHPccQzgeeGLcQxwiL9dr87oWFq9rZqdW1dLpiwsyBAtdksmq6o17jkPh5XptXtfC4nUNxqeGJKlxhkCSGmcIxmPjuAc4hF6u1+Z1LSxe1wB8jUCSGucjAklqnCGQpMYZgkMgyeeSPJ7knilrxybZmmRn9/6Ybn1Jkq8k+X6SHUneM77JX9oBruvt3dw/T9Kbtv9DSXYluS/JBfM/8ewMcl1JzkuyPcnd3fu3jGfqgxv069WdX57kmSQfmN9pBzOH78VfS/Lt7vzdSX5p/qc+uAG/F1+ZZFN3PT9I8qG53q8hODRuBC6ctrYe2FZVK4Bt3THAe4F7q+rXgbOBTyZ51TzNOagb+f/XdQ/wVuAbUxeTrATWAGd0H/MPSY6Yhxnn4kZmeV30/5jn4qo6E1gL3HTIp5u7G5n9db3gU8DXDuFMo3Ijs/9eXAT8E/BnVXUG/f/Onj/0I87Jjcz+a/Z2YHH3vfjbwJ8mmZjLnS6aywfppVXVN2b4gqym/w0IsAm4HfggUMAvJwlwFLAX2D8vgw5opuuqqh8A9Md/kdXALVX1LPBAkl3AKuDb8zDqQAa5rqr63pTDHcCrkyzurvOwMuDXiySXAA8A/zMP4w1lwGs7H/jPqvp+t+/J+ZhxLga8rgJe04Xu1cBzwH/P5X59RDB/TqiqR7vbPwVO6G5/GvhV4CfA3cCfV9XPxzDfqJ0MPDzleHe39nLyNuDOwzECg0pyFP0fTD487lkOgTcAlWRLkjuT/NW4BxqRW+lH+1HgIeATVbV3Lp/IRwRjUFWV5IXf270AuAt4C/B6YGuSb1bVnMqu+ZHkDOBa+j9tvhxcBVxXVc/M9GhhgVsEvAn4HWAfsC3J9qraNt6xhrYK+BlwEnAM8M0kt1XV/YN+Ih8RzJ/HkpwI0L1/vFt/D/Av1beL/kPzN45pxlF6BDhlyvGybm3BS7IM+BLw7qr60bjnGZGzgI8n+THwPuCvk1wx1olGZzfwjap6oqr2Af8K/NaYZxqFdwL/VlXPV9XjwLeAOf1/iAzB/NlM/8VFuvdf7m4/BJwDkOQE4FeAgYt+GNoMrEmyOMlpwArgu2OeaWhJjga+Cqyvqm+NeZyRqao3V9VEVU0A1wN/V1WfHu9UI7MFODPJkd3z6X8A3DvmmUbhIfrPJJDkNcDvAj+c02eqKt9G/AZ8nv7zds/T/2nkcuA4+r8ttBO4DTi223sS8O/0Xx+4B3jXuOcf8Lr+qLv9LPAYsGXK/g3Aj4D7gIvGPf8orgv4G/rPy9415e21476GUXy9pnzcVcAHxj3/iL8X30X/xf17gI+Pe/4RfS8eBfxzd133An851/v1fzEhSY3zqSFJapwhkKTGGQJJapwhkKTGGQJJapwhkKTGGQJJatz/ASqiqtNgTwbUAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
    "source": [
     "from sparse_coding_torch.pnb.video_loader import classify_nerve_is_right\n",
     "from matplotlib.pyplot import imshow\n",
@@ -356,6 +416,151 @@
     "plt.savefig('nerve_plot.png')"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": 43,
+   "id": "f45ae150-db84-48f3-8ed0-218b8898d703",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "pnb_same_frame_124.mp4\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "  0%|          | 0/1 [00:00<?, ?it/s]"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "10\n",
+      "0.7170215845108032\n",
+      "0.5800381302833557\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "100%|██████████| 1/1 [00:01<00:00,  1.47s/it]"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "0.14535117212543647\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAQg0lEQVR4nO3cfYxldX3H8fcHVqiUyOOKyLI7KDTNGoy2t1jb2lB5EExwiZIUJe02xWzV8kdrTIohEeUhBaOVGmybjZCuhgqINa7xgeIq0VRFZikCq8KuK8gC6gqUhG4qrn77xz2Ll3Fmd2fvnb1z+3u/kps553d+c+5n78yZzz3nzGyqCklSuw4YdwBJ0nhZBJLUOItAkhpnEUhS4ywCSWrcknEH2BdHH310TU1NjTuGJE2UjRs3/rSqls4cn8gimJqaYnp6etwxJGmiJHlotnEvDUlS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpnEUhS40ZSBEnOSnJ/ki1JLp5l+8FJbuq235Fkasb25UmeTvKuUeSRJO29oYsgyYHAR4CzgZXAm5OsnDHtQuDJqjoR+BBw9Yzt/wB8YdgskqT5G8UZwSnAlqraWlXPADcCq2bMWQWs65ZvAU5LEoAk5wI/ADaNIIskaZ5GUQTHAQ8PrG/rxmadU1U7gaeAo5IcCvwd8L49PUmSNUmmk0xv3759BLElSTD+m8XvBT5UVU/vaWJVra2qXlX1li5duvDJJKkRS0awj0eA4wfWl3Vjs83ZlmQJcBjwOPAq4Lwk7wcOB36Z5H+r6toR5JIk7YVRFMGdwElJTqD/A/984C0z5qwHVgPfAM4DvlxVBbxm14Qk7wWetgQkaf8augiqameSi4BbgQOB66tqU5LLgOmqWg9cB3w8yRbgCfplIUlaBNJ/Yz5Zer1eTU9PjzuGJE2UJBurqjdzfNw3iyVJY2YRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1biRFkOSsJPcn2ZLk4lm2H5zkpm77HUmmuvEzkmxMcm/38bWjyCNJ2ntDF0GSA4GPAGcDK4E3J1k5Y9qFwJNVdSLwIeDqbvynwDlVdTKwGvj4sHkkSfMzijOCU4AtVbW1qp4BbgRWzZizCljXLd8CnJYkVfVfVfVoN74JeH6Sg0eQSZK0l0ZRBMcBDw+sb+vGZp1TVTuBp4CjZsx5E3BXVf1sBJkkSXtpybgDACR5Gf3LRWfuZs4aYA3A8uXL91MySfr/bxRnBI8Axw+sL+vGZp2TZAlwGPB4t74M+DTw51X1/bmepKrWVlWvqnpLly4dQWxJEoymCO4ETkpyQpKDgPOB9TPmrKd/MxjgPODLVVVJDgc+B1xcVf85giySpHkaugi6a/4XAbcC3wVurqpNSS5L8oZu2nXAUUm2AO8Edv2K6UXAicB7ktzdPV44bCZJ0t5LVY07w7z1er2anp4edwxJmihJNlZVb+a4f1ksSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjRlIESc5Kcn+SLUkunmX7wUlu6rbfkWRqYNu7u/H7k7xuFHmk/e2Ge29g6popDnjfAUxdM8UN994w7kjSXlsy7A6SHAh8BDgD2AbcmWR9VX1nYNqFwJNVdWKS84GrgT9NshI4H3gZ8GLgS0l+q6p+MWwuaX+54d4bWPPZNez4+Q4AHnrqIdZ8dg0AF5x8wTijSXtlFGcEpwBbqmprVT0D3AismjFnFbCuW74FOC1JuvEbq+pnVfUDYEu3P2liXLLhkmdLYJcdP9/BJRsuGVMiaX5GUQTHAQ8PrG/rxmadU1U7gaeAo/bycwFIsibJdJLp7du3jyC2NBo/fOqH8xqXFpuJuVlcVWurqldVvaVLl447jvSs5Yctn9e4tNiMoggeAY4fWF/Wjc06J8kS4DDg8b38XGlRu/K0KznkeYc8Z+yQ5x3CladdOaZE0vyMogjuBE5KckKSg+jf/F0/Y856YHW3fB7w5aqqbvz87reKTgBOAr41gkzSfnPByRew9py1rDhsBSGsOGwFa89Z641iTYyhf2uoqnYmuQi4FTgQuL6qNiW5DJiuqvXAdcDHk2wBnqBfFnTzbga+A+wE/trfGNIkuuDkC/zBr4mV/hvzydLr9Wp6enrcMSRpoiTZWFW9meMTc7NYkrQwLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMYNVQRJjkxyW5LN3ccj5pi3upuzOcnqbuyQJJ9L8r0km5JcNUwWSdK+GfaM4GJgQ1WdBGzo1p8jyZHApcCrgFOASwcK4wNV9dvAK4E/THL2kHkkSfM0bBGsAtZ1y+uAc2eZ8zrgtqp6oqqeBG4DzqqqHVX1FYCqega4C1g2ZB5J0jwNWwTHVNVj3fKPgGNmmXMc8PDA+rZu7FlJDgfOoX9WIUnaj5bsaUKSLwEvmmXTJYMrVVVJar4BkiwBPgF8uKq27mbeGmANwPLly+f7NJKkOeyxCKrq9Lm2JflxkmOr6rEkxwI/mWXaI8CpA+vLgNsH1tcCm6vqmj3kWNvNpdfrzbtwJEmzG/bS0Hpgdbe8GvjMLHNuBc5MckR3k/jMbowkVwCHAX8zZA5J0j4atgiuAs5Ishk4vVsnSS/JRwGq6gngcuDO7nFZVT2RZBn9y0srgbuS3J3krUPmkSTNU6om7ypLr9er6enpcceQpImSZGNV9WaO+5fFktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1bqgiSHJkktuSbO4+HjHHvNXdnM1JVs+yfX2S+4bJIknaN8OeEVwMbKiqk4AN3fpzJDkSuBR4FXAKcOlgYSR5I/D0kDkkSfto2CJYBazrltcB584y53XAbVX1RFU9CdwGnAWQ5FDgncAVQ+aQJO2jYYvgmKp6rFv+EXDMLHOOAx4eWN/WjQFcDnwQ2LGnJ0qyJsl0kunt27cPEVmSNGjJniYk+RLwolk2XTK4UlWVpPb2iZO8AnhpVf1tkqk9za+qtcBagF6vt9fPI0navT0WQVWdPte2JD9OcmxVPZbkWOAns0x7BDh1YH0ZcDvwaqCX5MEuxwuT3F5VpyJJ2m+GvTS0Htj1W0Crgc/MMudW4MwkR3Q3ic8Ebq2qf66qF1fVFPBHwAOWgCTtf8MWwVXAGUk2A6d36yTpJfkoQFU9Qf9ewJ3d47JuTJK0CKRq8i6393q9mp6eHncMSZooSTZWVW/muH9ZLEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJalyqatwZ5i3JduCheX7a0cBPFyDOqJlztMw5WpOQcxIywnhyrqiqpTMHJ7II9kWS6arqjTvHnphztMw5WpOQcxIywuLK6aUhSWqcRSBJjWupCNaOO8BeMudomXO0JiHnJGSERZSzmXsEkqTZtXRGIEmahUUgSY2b2CJIclaS+5NsSXLxLNv/OMldSXYmOW9gfEU3fneSTUneNrDtzUnuTXJPki8mOXocGQe2vyDJtiTXDoz9bpdxS5IPJ8kwGRciZ5JDknwuyfe61/iqYTMuRM4Z29YnuW+x5kxyUJK1SR7oXtc3LdKcIz2Ghs2Z5BfdsX53kvUD4yckuaPb501JDlqkOW/o9nlfkuuTPG/YnLOqqol7AAcC3wdeAhwEfBtYOWPOFPBy4GPAeQPjBwEHd8uHAg8CLwaWAD8Bju62vR947zgyDmz/R+DfgGsHxr4F/D4Q4AvA2eN6LefKCRwC/MnA6/21xZhzYPyN3fh94/ze3MPX/X3AFd3yAbu+TxdTzlEfQ6PICTw9x35vBs7vlv8FePsizfl6+sd6gE8Mm3Oux6SeEZwCbKmqrVX1DHAjsGpwQlU9WFX3AL+cMf5MVf2sWz2YX50V7Xqxf7N7l/0C4NFxZIT+O3/gGOA/BsaOBV5QVd+s/nfJx4Bzh8i4IDmrakdVfaVbfga4C1i22HJ244cC7wSuGDLfguYE/hL4++7zf1lVw/5F6kLkHPUxNHTO2XTZXgvc0g2tY8zH0Vyq6vPVof8mcNjjaFaTWgTHAQ8PrG/rxvZKkuOT3NPt4+qqerSqfg68HbiX/jfvSuC6cWRMcgDwQeBds+xz277sczcWIufgnMOBc4AN+x4RWLicl3fbdgyZb5eR5+xeQ4DLu0sLn0xyzGLLuQDH0FA5O7+RZDrJN5Oc240dBfx3Ve3cx33ur5zP6i4J/RnwxaFSzmFSi2AoVfVwVb0cOBFYneSY7oV+O/BK+peK7gHePaaI7wA+X1Xb9jhzvHabM8kS+qezH66qrfs12XPNmjPJK4CXVtWnx5Lq1831ei6h/07w61X1O8A3gA/s73AD5no9F9MxtMuK6v83Dm8Brkny0jHnmcuecv4T8NWq+tpCPPmShdjpfvAIcPzA+rJubF6q6tHuBuFr6P4Tu6r6PkCSm4Ffu+GznzK+GnhNknfQv49xUJKn6V+THTw13Kd/90LnrKpdr9taYHNVXTNkxgXJSf9r3kvyIP1j4YVJbq+qUxdZznfTP2P5927eJ4ELh8i4UDk/BSM9hobNSVU90n3cmuR2+iX1KeDwJEu6s4JxH0dz5dz1Ol4KLAX+asiMuw0wcQ/6B+1W4AR+dWPmZXPM/Veee7N4GfD8bvkI4AHgZPrvYB4DlnbbLgc+OI6MM7b9Bbu/Wfz6cb2We8h5Bf0D7oBxf813l3NgfIrR3CxeqNfzRuC1A9s+udhyjvoYGjZnd3zv+sWQo4HNdDdw6Zfp4M3idyzSnG8Fvk73M2uhHgu244V+0L+b/gD91rykG7sMeEO3/Hv0r9P9D/A4sKkbP4P+Keu3u49rBvb5NuC73fhngaPGkXHGPmb+QOgB93X7vJbur8MXU076ZVvda3l393jrYss5Y3yKERTBAn7dVwBf7b43NwDLF2nOkR5Dw+QE/oD+/Ypvdx8vHNjnS+i/qdpCvxQOXqQ5d3b723UcvWcU36MzH/4XE5LUuCZvFkuSfsUikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY37P8rGO7C7IiJhAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "from sparse_coding_torch.pnb.video_loader import classify_nerve_is_right\n",
+    "from matplotlib.pyplot import imshow\n",
+    "from matplotlib import pyplot as plt\n",
+    "from matplotlib import cm\n",
+    "import matplotlib.patches as patches\n",
+    "import math\n",
+    "from tqdm import tqdm\n",
+    "import glob\n",
+    "from os.path import join, abspath\n",
+    "import torch\n",
+    "import random\n",
+    "\n",
+    "labels = [name for name in os.listdir(video_path) if os.path.isdir(os.path.join(video_path, name))]\n",
+    "\n",
+    "# videos = []\n",
+    "# for label in labels:\n",
+    "#     videos.extend([(label, abspath(join(video_path, label, f)), f) for f in glob.glob(join(video_path, label, '67', '*.mp4'))])\n",
+    "\n",
+    "videos = [('Positives', 'pnb_same_frame_124.mp4', 'pnb_same_frame_124.mp4')]\n",
+    "\n",
+    "all_distances = []\n",
+    "all_colors = []\n",
+    "for label, path, vid_f in videos:\n",
+    "    print(vid_f)\n",
+    "    vc = torchvision.io.read_video(path)[0].permute(3, 0, 1, 2)\n",
+    "    is_right = classify_nerve_is_right(yolo_model, vc)\n",
+    "    \n",
+    "    orig_height = vc.size(2)\n",
+    "    orig_width = vc.size(3)\n",
+    "    \n",
+    "    nerve_bb = []\n",
+    "    needle_bb = []\n",
+    "    \n",
+    "    for i in tqdm(random.sample(range(0, vc.size(1)), 1)):\n",
+    "        frame = vc[:, i, :, :].swapaxes(0, 2).swapaxes(0, 1).numpy()\n",
+    "\n",
+    "        bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)\n",
+    "\n",
+    "        nerve_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==0]\n",
+    "        needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2]\n",
+    "        \n",
+    "        if len(nerve_bb) > 0 and len(needle_bb) > 0:\n",
+    "            nerve_bb = nerve_bb[0]\n",
+    "            needle_bb = needle_bb[0]\n",
+    "        else:\n",
+    "            continue\n",
+    "\n",
+    "#     if len(nerve_bb) == 0 or len(needle_bb) == 0:\n",
+    "#         continue\n",
+    "\n",
+    "        nerve_x = (nerve_bb[2] + nerve_bb[0]) / 2\n",
+    "        nerve_y = (nerve_bb[3] + nerve_bb[1]) / 2\n",
+    "\n",
+    "        needle_x = needle_bb[2]\n",
+    "        needle_y = needle_bb[3]\n",
+    "\n",
+    "        if not is_right:\n",
+    "            needle_x = needle_bb[0]\n",
+    "            \n",
+    "        print(i)\n",
+    "        print(nerve_x)\n",
+    "        print(nerve_y)\n",
+    "        \n",
+    "        torchvision.io.write_video('pnb_same_frame_{}.mp4'.format(i), np.stack([frame] * 60, axis=0), fps=20)\n",
+    "        distance = math.sqrt((nerve_x - needle_x)**2 + (nerve_y - needle_y)**2)\n",
+    "        print(distance)\n",
+    "#         if i > 5:\n",
+    "#             raise Exception\n",
+    "\n",
+    "        all_distances.append(distance)\n",
+    "        if label == 'Positives':\n",
+    "            all_colors.append('green')\n",
+    "        elif label == 'Negatives':\n",
+    "            all_colors.append('red')\n",
+    "        else:\n",
+    "            raise Exception('Bad Label')\n",
+    "\n",
+    "plt.scatter(all_distances, [0]*len(all_distances), color=all_colors)\n",
+    "plt.savefig('nerve_plot.png')"
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -964,11 +1169,21 @@
  ],
  "metadata": {
   "kernelspec": {
-   "display_name": "",
-   "name": ""
+   "display_name": "Python (pocus_project)",
+   "language": "python",
+   "name": "darryl_pocus"
   },
   "language_info": {
-   "name": ""
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.7"
   }
  },
  "nbformat": 4,
diff --git a/sparse_coding_torch/onsd/classifier_model.py b/sparse_coding_torch/onsd/classifier_model.py
index f97cad6..ac9aa14 100644
--- a/sparse_coding_torch/onsd/classifier_model.py
+++ b/sparse_coding_torch/onsd/classifier_model.py
@@ -7,15 +7,22 @@ import torchvision as tv
 import torch
 import torch.nn as nn
 from sparse_coding_torch.utils import VideoGrayScaler, MinMaxScaler
+from sparse_coding_torch.sparse_model import SparseCode
     
 class ONSDClassifier(keras.layers.Layer):
     def __init__(self, sparse_checkpoint):
         super(ONSDClassifier, self).__init__()
         
-        self.sparse_filters = tf.squeeze(keras.models.load_model(sparse_checkpoint).weights[0], axis=0)
+#         self.sparse_filters = tf.squeeze(keras.models.load_model(sparse_checkpoint).weights[0], axis=0)
 
-        self.conv_1 = keras.layers.Conv2D(48, kernel_size=8, strides=2, activation='relu', padding='valid')
-        self.conv_2 = keras.layers.Conv2D(64, kernel_size=4, strides=2, activation='relu', padding='valid')
+#         self.conv_1 = keras.layers.Conv2D(32, kernel_size=(8, 8), strides=2, activation='relu', padding='valid')
+#         self.conv_2 = keras.layers.Conv2D(32, kernel_size=(8, 8), strides=2, activation='relu', padding='valid')
+#         self.conv_3 = keras.layers.Conv2D(32, kernel_size=(8, 8), strides=2, activation='relu', padding='valid')
+#         self.conv_4 = keras.layers.Conv2D(32, kernel_size=(8, 8), strides=2, activation='relu', padding='valid')
+#         self.conv_5 = keras.layers.Conv2D(32, kernel_size=(4, 4), strides=1, activation='relu', padding='valid')
+#         self.conv_6 = keras.layers.Conv2D(32, kernel_size=(8, 8), strides=2, activation='relu', padding='valid')
+        self.conv_1 = keras.layers.Conv1D(10, kernel_size=3, strides=1, activation='relu', padding='valid')
+        self.conv_2 = keras.layers.Conv1D(10, kernel_size=3, strides=1, activation='relu', padding='valid')
 
         self.flatten = keras.layers.Flatten()
 
@@ -23,17 +30,21 @@ class ONSDClassifier(keras.layers.Layer):
 
 #         self.ff_1 = keras.layers.Dense(1000, activation='relu', use_bias=True)
 #         self.ff_2 = keras.layers.Dense(500, activation='relu', use_bias=True)
-#         self.ff_2 = keras.layers.Dense(20, activation='relu', use_bias=True)
+#         self.ff_2 = keras.layers.Dense(100, activation='relu', use_bias=True)
         self.ff_3 = keras.layers.Dense(20, activation='relu', use_bias=True)
         self.ff_final_1 = keras.layers.Dense(1)
-        self.ff_final_2 = keras.layers.Dense(1)
+#         self.ff_final_2 = keras.layers.Dense(1)
 
 #     @tf.function
     def call(self, activations):
-        x = tf.nn.conv2d(activations, self.sparse_filters, strides=4, padding='VALID')
-        x = tf.nn.relu(x)
-        x = self.conv_1(x)
+#         x = tf.nn.conv2d(activations, self.sparse_filters, strides=(1, 4), padding='VALID')
+#         x = tf.nn.relu(x)
+        x = self.conv_1(activations)
         x = self.conv_2(x)
+#         x = self.conv_3(x)
+#         x = self.conv_4(x)
+#         x = self.conv_5(x)
+#         x = self.conv_6(x)
         x = self.flatten(x)
 #         x = self.ff_1(x)
 #         x = self.dropout(x)
@@ -42,43 +53,100 @@ class ONSDClassifier(keras.layers.Layer):
         x = self.ff_3(x)
 #         x = self.dropout(x)
         class_pred = self.ff_final_1(x)
-        width_pred = tf.math.tanh(self.ff_final_2(x))
+#         width_pred = tf.math.tanh(self.ff_final_2(x))
 
-        return class_pred, width_pred
+        return class_pred
     
 class ONSDSharpness(keras.Model):
     def __init__(self):
         super().__init__()
-        self.encoder = tf.keras.applications.DenseNet121(include_top=False)
+#         self.encoder = tf.keras.applications.DenseNet121(include_top=False)
+#         self.encoder.trainable = True
+        self.conv_1 = keras.layers.Conv2D(32, kernel_size=4, strides=2, activation='relu', padding='valid')
+        self.conv_2 = keras.layers.Conv2D(32, kernel_size=4, strides=2, activation='relu', padding='valid')
+        self.conv_3 = keras.layers.Conv2D(32, kernel_size=4, strides=2, activation='relu', padding='valid')
+        self.conv_4 = keras.layers.Conv2D(32, kernel_size=4, strides=2, activation='relu', padding='valid')
+        self.conv_5 = keras.layers.Conv2D(32, kernel_size=4, strides=2, activation='relu', padding='valid')
+        self.conv_6 = keras.layers.Conv2D(32, kernel_size=2, strides=1, activation='relu', padding='valid')
         
         self.flatten = keras.layers.Flatten()
         
-        self.ff_1 = keras.layers.Dense(100, activation='relu', use_bias=True)
-        self.ff_2 = keras.layers.Dense(1, activation='sigmoid')
+        self.ff_1 = keras.layers.Dense(1000, activation='relu', use_bias=True)
+        self.ff_2 = keras.layers.Dense(100, activation='relu', use_bias=True)
+        self.ff_3 = keras.layers.Dense(1)
         
     @tf.function
     def call(self, images):
-        x = self.encoder(images)
+#         x = self.encoder(images)
+        x = self.conv_1(images)
+        x = self.conv_2(x)
+        x = self.conv_3(x)
+        x = self.conv_4(x)
+        x = self.conv_5(x)
+        x = self.conv_6(x)
         
         x = self.flatten(x)
         
         x = self.ff_1(x)
         x = self.ff_2(x)
+        x = self.ff_3(x)
 
         return x
 
     
+# class MobileModelONSD(keras.Model):
+#     def __init__(self, classifier_model):
+#         super().__init__()
+#         self.classifier = classifier_model
+
+#     @tf.function
+#     def call(self, images):
+# #         images = tf.squeeze(tf.image.rgb_to_grayscale(images), axis=-1)
+#         images = tf.transpose(images, perm=[0, 2, 3, 1])
+#         images = images / 255
+
+#         pred = tf.math.sigmoid(self.classifier(images))
+
+#         return pred
+
 class MobileModelONSD(keras.Model):
-    def __init__(self, classifier_model):
+    def __init__(self, sparse_weights, classifier_model, batch_size, image_height, image_width, clip_depth, out_channels, kernel_size, kernel_depth, stride, lam, activation_lr, max_activation_iter, run_2d):
         super().__init__()
+        if run_2d:
+            inputs = keras.Input(shape=(image_height, image_width, clip_depth))
+        else:
+            inputs = keras.Input(shape=(1, image_height, image_width, clip_depth))
+        
+        if run_2d:
+            filter_inputs = keras.Input(shape=(kernel_size, kernel_size, 1, out_channels), dtype='float32')
+        else:
+            filter_inputs = keras.Input(shape=(1, kernel_size, kernel_size, 1, out_channels), dtype='float32')
+        
+        output = SparseCode(batch_size=batch_size, image_height=image_height, image_width=image_width, clip_depth=clip_depth, in_channels=1, out_channels=out_channels, kernel_size=kernel_size, kernel_depth=kernel_depth, stride=stride, lam=lam, activation_lr=activation_lr, max_activation_iter=max_activation_iter, run_2d=run_2d)(inputs, filter_inputs)
+
+        self.sparse_model = keras.Model(inputs=(inputs, filter_inputs), outputs=output)
         self.classifier = classifier_model
 
+        self.out_channels = out_channels
+        self.stride = stride
+        self.lam = lam
+        self.activation_lr = activation_lr
+        self.max_activation_iter = max_activation_iter
+        self.batch_size = batch_size
+        self.run_2d = run_2d
+        
+        self.sparse_weights = sparse_weights
+
     @tf.function
     def call(self, images):
 #         images = tf.squeeze(tf.image.rgb_to_grayscale(images), axis=-1)
-        images = tf.transpose(images, perm=[0, 2, 3, 1])
+#         images = tf.transpose(images, perm=[0, 2, 3, 1])
         images = images / 255
 
-        pred = tf.math.sigmoid(self.classifier(images))
+        activations = tf.stop_gradient(self.sparse_model([images, tf.stop_gradient(self.sparse_weights)]))
+
+        pred = tf.math.sigmoid(self.classifier(tf.expand_dims(activations, axis=1)))
+#         pred = tf.math.sigmoid(self.classifier(activations))
+#         pred = tf.math.reduce_sum(activations)
 
-        return pred
+        return pred
\ No newline at end of file
diff --git a/sparse_coding_torch/onsd/generate_images_to_label.py b/sparse_coding_torch/onsd/generate_images_to_label.py
new file mode 100644
index 0000000..f62d496
--- /dev/null
+++ b/sparse_coding_torch/onsd/generate_images_to_label.py
@@ -0,0 +1,97 @@
+from os import listdir
+from os.path import isfile
+from os.path import join
+from os.path import isdir
+from os.path import abspath
+from os.path import exists
+import csv
+import glob
+import os
+from tqdm import tqdm
+import torchvision as tv
+import cv2
+import random
+
+video_path = "/shared_data/bamc_onsd_data/revised_onsd_data"
+
+labels = [name for name in listdir(video_path) if isdir(join(video_path, name))]
+        
+count = 0
+
+valid_frames = {}
+invalid_frames = {}
+with open('sparse_coding_torch/onsd/good_frames_onsd.csv', 'r') as valid_in:
+    reader = csv.DictReader(valid_in)
+    for row in reader:
+        vid = row['video'].strip()
+        good_frames = row['good_frames'].strip()
+        bad_frames = row['bad_frames'].strip()
+        if good_frames:
+            for subrange in good_frames.split(';'):
+                splitrange = subrange.split('-')
+                valid_frames[vid] = (int(splitrange[0]), int(splitrange[1]))
+        if bad_frames:
+            for subrange in bad_frames.split(';'):
+                splitrange = subrange.split('-')
+                invalid_frames[vid] = (int(splitrange[0]), int(splitrange[1]))
+
+videos = []
+for label in labels:
+    videos.extend([(label, abspath(join(video_path, label, f)), f) for f in glob.glob(join(video_path, label, '*', '*.mp4'))])
+
+if not os.path.exists('sparse_coding_torch/onsd/individual_frames'):
+    os.makedirs('sparse_coding_torch/onsd/individual_frames')
+    
+files_to_write = []
+
+vid_idx = 0
+for txt_label, path, f_name in tqdm(videos):
+    vc = tv.io.read_video(path)[0].permute(3, 0, 1, 2)
+    
+    label = videos[vid_idx][0]
+    f_name = f_name.split('/')[-1]
+    
+#     print(f_name)
+    write_path = os.path.join('sparse_coding_torch/onsd/individual_frames', label, f_name[:f_name.rfind('.')])
+    if not os.path.exists(write_path):
+        os.makedirs(write_path)
+
+    frame_key = path.split('/')[-2]
+    if frame_key in valid_frames:
+        start_range, end_range = valid_frames[frame_key]
+
+        for j in range(start_range, end_range, 1):
+            if j == vc.size(1):
+                break
+            frame = vc[:, j, :, :]
+            
+            files_to_write.append((os.path.join(write_path, str(j) + '.png'), frame.numpy().swapaxes(0,1).swapaxes(1,2), label))
+
+#             cv2.imwrite(os.path.join(write_path, str(j) + '.png'), frame.numpy().swapaxes(0,1).swapaxes(1,2))
+
+    vid_idx += 1
+    
+num_positive = 50
+num_negative = 50
+
+curr_positive = 0
+curr_negative = 0
+
+random.shuffle(files_to_write)
+
+with open('sparse_coding_torch/onsd/individual_frames/onsd_labeled_widths.csv', 'w+') as csv_out:
+    out_write = csv.writer(csv_out)
+    
+    out_write.writerow(['Video', 'Distance'])
+    
+    for path, frame, label in files_to_write:
+        if label == 'Positives' and curr_positive < num_positive:
+            cv2.imwrite(path, frame)
+            out_write.writerow([path])
+            curr_positive += 1
+        elif label == 'Negatives' and curr_negative < num_positive:
+            cv2.imwrite(path, frame)
+            out_write.writerow([path])
+            curr_negative += 1
+            
+        
\ No newline at end of file
diff --git a/sparse_coding_torch/onsd/generate_tflite.py b/sparse_coding_torch/onsd/generate_tflite.py
index 8090340..5131b5a 100644
--- a/sparse_coding_torch/onsd/generate_tflite.py
+++ b/sparse_coding_torch/onsd/generate_tflite.py
@@ -12,11 +12,19 @@ import argparse
 
 if __name__ == "__main__":
     parser = argparse.ArgumentParser()
-    parser.add_argument('--checkpoint', default='sparse_coding_torch/classifier_outputs/32_filters_no_aug_3/best_classifier.pt/', type=str)
+    parser.add_argument('--checkpoint', default='sparse_coding_torch/classifier_outputs/onsd_all_train_2/best_classifier_0.pt/', type=str)
     parser.add_argument('--batch_size', default=1, type=int)
     parser.add_argument('--image_height', type=int, default=200)
     parser.add_argument('--image_width', type=int, default=200)
     parser.add_argument('--clip_depth', type=int, default=1)
+    parser.add_argument('--kernel_size', default=15, type=int)
+    parser.add_argument('--kernel_depth', default=1, type=int)
+    parser.add_argument('--num_kernels', default=32, type=int)
+    parser.add_argument('--stride', default=2, type=int)
+    parser.add_argument('--max_activation_iter', default=200, type=int)
+    parser.add_argument('--activation_lr', default=1e-2, type=float)
+    parser.add_argument('--lam', default=0.05, type=float)
+    parser.add_argument('--sparse_checkpoint', default='sparse_coding_torch/output/onsd_frame_level_32/best_sparse.pt/', type=str)
     
     args = parser.parse_args()
     #print(args.accumulate(args.integers))
@@ -25,18 +33,20 @@ if __name__ == "__main__":
     image_height = args.image_height
     image_width = args.image_width
     clip_depth = args.clip_depth
+    
+    recon_model = keras.models.load_model(args.sparse_checkpoint)
         
     classifier_model = keras.models.load_model(args.checkpoint)
 
-    inputs = keras.Input(shape=(clip_depth, image_height, image_width))
+    inputs = keras.Input(shape=(image_height, image_width, 1))
 
-    outputs = MobileModelONSD(classifier_model=classifier_model)(inputs)
+    outputs = MobileModelONSD(sparse_weights=recon_model.weights[0], classifier_model=classifier_model, batch_size=batch_size, image_height=image_height, image_width=image_width, clip_depth=clip_depth, out_channels=args.num_kernels, kernel_size=args.kernel_size, kernel_depth=args.kernel_depth, stride=args.stride, lam=args.lam, activation_lr=args.activation_lr, max_activation_iter=args.max_activation_iter, run_2d=True)(inputs)
 
     model = keras.Model(inputs=inputs, outputs=outputs)
 
     input_name = model.input_names[0]
     index = model.input_names.index(input_name)
-    model.inputs[index].set_shape([batch_size, clip_depth, image_height, image_width])
+    model.inputs[index].set_shape([batch_size, image_height, image_width, 1])
 
     converter = tf.lite.TFLiteConverter.from_keras_model(model)
     converter.optimizations = [tf.lite.Optimize.DEFAULT]
diff --git a/sparse_coding_torch/onsd/generate_tflite_valid.py b/sparse_coding_torch/onsd/generate_tflite_valid.py
new file mode 100644
index 0000000..7c363ce
--- /dev/null
+++ b/sparse_coding_torch/onsd/generate_tflite_valid.py
@@ -0,0 +1,43 @@
+from tensorflow import keras
+import numpy as np
+import torch
+import tensorflow as tf
+import cv2
+import torchvision as tv
+import torch
+import torch.nn as nn
+from sparse_coding_torch.utils import VideoGrayScaler, MinMaxScaler
+from sparse_coding_torch.onsd.classifier_model import MobileModelONSD
+import argparse
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--checkpoint', default='sparse_coding_torch/onsd/valid_frame_model_2/best_classifier.pt/', type=str)
+    parser.add_argument('--batch_size', default=1, type=int)
+    parser.add_argument('--image_height', type=int, default=512)
+    parser.add_argument('--image_width', type=int, default=512)
+    
+    args = parser.parse_args()
+    #print(args.accumulate(args.integers))
+    batch_size = args.batch_size
+
+    image_height = args.image_height
+    image_width = args.image_width
+        
+    classifier_model = keras.models.load_model(args.checkpoint)
+
+    input_name = classifier_model.input_names[0]
+    index = classifier_model.input_names.index(input_name)
+    classifier_model.inputs[index].set_shape([batch_size, image_height, image_width, 3])
+
+    converter = tf.lite.TFLiteConverter.from_keras_model(classifier_model)
+    converter.optimizations = [tf.lite.Optimize.DEFAULT]
+    converter.target_spec.supported_types = [tf.float16]
+    converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS]
+
+    tflite_model = converter.convert()
+
+    print('Converted')
+
+    with open("./sparse_coding_torch/mobile_output/onsd_valid.tflite", "wb") as f:
+        f.write(tflite_model)
diff --git a/sparse_coding_torch/onsd/load_data.py b/sparse_coding_torch/onsd/load_data.py
index c4d5dc7..137f148 100644
--- a/sparse_coding_torch/onsd/load_data.py
+++ b/sparse_coding_torch/onsd/load_data.py
@@ -3,17 +3,17 @@ import torchvision
 import torch
 from sklearn.model_selection import train_test_split
 from sparse_coding_torch.utils import MinMaxScaler
-from sparse_coding_torch.onsd.video_loader import get_participants, ONSDLoader
+from sparse_coding_torch.onsd.video_loader import get_participants, ONSDGoodFramesLoader, FrameLoader
 from sparse_coding_torch.utils import VideoGrayScaler
 from typing import Sequence, Iterator
 import csv
 from sklearn.model_selection import train_test_split, GroupShuffleSplit, LeaveOneGroupOut, LeaveOneOut, StratifiedGroupKFold, StratifiedKFold, KFold, ShuffleSplit
     
-def load_onsd_videos(batch_size, input_size, yolo_model=None, mode=None, n_splits=None):   
+def load_onsd_videos(batch_size, input_size, crop_size, yolo_model=None, mode=None, n_splits=None):   
     video_path = "/shared_data/bamc_onsd_data/revised_onsd_data"
     
     transforms = torchvision.transforms.Compose(
-    [#torchvision.transforms.Grayscale(1),
+    [torchvision.transforms.Grayscale(1),
      MinMaxScaler(0, 255),
      torchvision.transforms.Resize(input_size[:2])
     ])
@@ -23,7 +23,7 @@ def load_onsd_videos(batch_size, input_size, yolo_model=None, mode=None, n_split
 #      torchvision.transforms.RandomAdjustSharpness(0.05)
      
 #     ])
-    dataset = ONSDLoader(video_path, input_size[1], input_size[0], transform=transforms, yolo_model=yolo_model)
+    dataset = ONSDGoodFramesLoader(video_path, crop_size[1], crop_size[0], transform=transforms, yolo_model=yolo_model)
     
     targets = dataset.get_labels()
     
@@ -50,4 +50,30 @@ def load_onsd_videos(batch_size, input_size, yolo_model=None, mode=None, n_split
 
         groups = get_participants(dataset.get_filenames())
         
+        return gss.split(np.arange(len(targets)), targets, groups), dataset
+    
+def load_onsd_frames(batch_size, input_size, mode=None, yolo_model=None):   
+    video_path = "/shared_data/bamc_onsd_data/revised_onsd_data"
+    
+    transforms = torchvision.transforms.Compose(
+    [
+     MinMaxScaler(0, 255),
+     torchvision.transforms.Resize(input_size[:2])
+    ])
+
+    dataset = FrameLoader(video_path, input_size[1], input_size[0], transform=transforms, yolo_model=yolo_model)
+    
+    targets = dataset.get_labels()
+    
+    if mode == 'all_train':
+        train_idx = np.arange(len(targets))
+        test_idx = None
+        
+        return [(train_idx, test_idx)], dataset
+    else:
+#         gss = ShuffleSplit(n_splits=n_splits, test_size=0.2)
+        gss = GroupShuffleSplit(n_splits=1, test_size=0.2)
+
+        groups = get_participants(dataset.get_filenames())
+        
         return gss.split(np.arange(len(targets)), targets, groups), dataset
\ No newline at end of file
diff --git a/sparse_coding_torch/onsd/run_tflite.py b/sparse_coding_torch/onsd/run_tflite.py
new file mode 100644
index 0000000..7909b2f
--- /dev/null
+++ b/sparse_coding_torch/onsd/run_tflite.py
@@ -0,0 +1,92 @@
+import torch
+import os
+import time
+import numpy as np
+import torchvision
+import csv
+from datetime import datetime
+from yolov4.get_bounding_boxes import YoloModel
+from sparse_coding_torch.onsd.video_loader import get_yolo_region_onsd
+from sparse_coding_torch.utils import VideoGrayScaler, MinMaxScaler
+import argparse
+import tensorflow as tf
+import scipy.stats
+import cv2
+import glob
+import torchvision as tv
+from tqdm import tqdm
+from sklearn.metrics import f1_score, accuracy_score
+
+if __name__ == "__main__":
+
+    parser = argparse.ArgumentParser(description='Python program for processing ONSD data')
+    parser.add_argument('--classifier', type=str, default='sparse_coding_torch/mobile_output/onsd.tflite')
+    parser.add_argument('--input_dir', default='sparse_coding_torch/onsd/onsd_good_for_eval', type=str)
+    parser.add_argument('--image_width', default=200, type=int)
+    parser.add_argument('--image_height', default=200, type=int)
+    parser.add_argument('--run_2d', default=True, type=bool)
+    args = parser.parse_args()
+
+    interpreter = tf.lite.Interpreter(args.classifier)
+    interpreter.allocate_tensors()
+
+    input_details = interpreter.get_input_details()
+    output_details = interpreter.get_output_details()
+
+    yolo_model = YoloModel('onsd')
+
+    transform = torchvision.transforms.Compose(
+    [torchvision.transforms.Grayscale(1),
+     MinMaxScaler(0, 255),
+     torchvision.transforms.Resize((args.image_height, args.image_width))
+    ])
+    
+    all_gt = []
+    all_preds = []
+
+    for label in ['Positives', 'Negatives']:
+        for f in tqdm(os.listdir(os.path.join(args.input_dir, label))):
+            if not f.endswith('.png'):
+                continue
+
+            frame = torch.tensor(cv2.imread(os.path.join(args.input_dir, label, f))).swapaxes(2, 1).swapaxes(1, 0)
+
+            frame = get_yolo_region_onsd(yolo_model, frame, args.image_width, args.image_height)
+
+            frame = frame[0]
+
+            if args.run_2d:
+                frame = transform(frame).to(torch.float32).squeeze().unsqueeze(0).unsqueeze(3).numpy()
+            else:
+                frame = transform(frame).to(torch.float32).squeeze().unsqueeze(0).unsqueeze(0).unsqueeze(4).numpy()
+            
+#             cv2.imwrite('testing_tflite_onsd.png', frame[0])
+#             print(frame.shape)
+
+            interpreter.set_tensor(input_details[0]['index'], frame)
+
+            interpreter.invoke()
+
+            output_array = np.array(interpreter.get_tensor(output_details[0]['index']))
+
+            pred = output_array[0][0]
+
+            final_pred = float(tf.math.round(pred))
+            
+            all_preds.append(final_pred)
+
+            if label == 'Positives':
+                all_gt.append(1.0)
+            elif label == 'Negatives':
+                all_gt.append(0.0)
+            
+    overall_pred = np.array(all_preds)
+    overall_true = np.array(all_gt)
+
+    overall_true = np.array(overall_true)
+    overall_pred = np.array(overall_pred)
+            
+    final_f1 = f1_score(overall_true, overall_pred, average='macro')
+    final_acc = accuracy_score(overall_true, overall_pred)
+    
+    print("Final accuracy={:.2f}, f1={:.2f}".format(final_acc, final_f1))
\ No newline at end of file
diff --git a/sparse_coding_torch/onsd/train_classifier.py b/sparse_coding_torch/onsd/train_classifier.py
index 9991675..54cc517 100644
--- a/sparse_coding_torch/onsd/train_classifier.py
+++ b/sparse_coding_torch/onsd/train_classifier.py
@@ -25,6 +25,8 @@ import glob
 import cv2
 import copy
 
+tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
+
 # configproto = tf.compat.v1.ConfigProto()
 # configproto.gpu_options.polling_inactive_delay_msecs = 5000
 # configproto.gpu_options.allow_growth = True
@@ -33,7 +35,79 @@ import copy
 # tf.compat.v1.keras.backend.set_session(sess)
 # tf.debugging.set_log_device_placement(True)
 
-def calculate_onsd_scores(input_videos, labels, yolo_model, classifier_model, transform, crop_width, crop_height):
+def split_difficult_vids(vid_list, num_splits):
+    output_array = [[] for _ in range(num_splits)]
+    for i, v in enumerate(vid_list):
+        output_array[(i + 1) % num_splits].append(v)
+        
+    return output_array
+
+def calculate_onsd_scores_measured(input_videos, yolo_model, classifier_model, sparse_model, recon_model, transform, crop_width, crop_height):
+    frame_path = 'sparse_coding_torch/onsd/onsd_good_for_eval'
+    
+    all_preds = []
+    all_gt = []
+    fp = []
+    fn = []
+
+    for vid_f in tqdm(input_videos):
+        split_path = vid_f.split('/')
+        frame_path = '/'.join(split_path[:-1])
+        label = split_path[-3]
+        f = [png_file for png_file in os.listdir(frame_path) if png_file.endswith('.png')][0]
+#     for f in tqdm(os.listdir(os.path.join(frame_path, label))):
+#         if not f.endswith('.png'):
+#             continue
+#         print(split_path)
+#         print(frame_path)
+#         print(label)
+#         print(f)
+#         raise Exception
+
+        frame = torch.tensor(cv2.imread(os.path.join(frame_path, f))).swapaxes(2, 1).swapaxes(1, 0)
+    
+#         print(frame.size())
+
+        frame = get_yolo_region_onsd(yolo_model, frame, crop_width, crop_height, False)
+        
+#         print(frame)
+
+        frame = frame[0]
+        
+#         print(frame)
+
+        frame = transform(frame).to(torch.float32).unsqueeze(3).unsqueeze(1).numpy()
+
+        activations = tf.stop_gradient(sparse_model([frame, tf.stop_gradient(tf.expand_dims(recon_model.trainable_weights[0], axis=0))]))
+
+#             print(tf.math.reduce_sum(activations))
+
+        pred = classifier_model(activations)
+
+        final_pred = float(tf.math.round(tf.math.sigmoid(pred)))
+
+        all_preds.append(final_pred)
+
+        if label == 'Positives':
+            all_gt.append(1.0)
+            if final_pred == 0.0:
+                fn.append(f)
+        elif label == 'Negatives':
+            all_gt.append(0.0)
+            if final_pred == 1.0:
+                fp.append(f)
+            
+    return np.array(all_preds), np.array(all_gt), fn, fp
+
+def calculate_onsd_scores(input_videos, labels, yolo_model, classifier_model, sparse_model, recon_model, transform, crop_width, crop_height):
+    good_frame_model = keras.models.load_model('sparse_coding_torch/onsd/valid_frame_model_2/best_classifier.pt/')
+    
+    resize = torchvision.transforms.Compose(
+    [
+     MinMaxScaler(0, 255),
+     torchvision.transforms.Resize((512, 512))
+    ])
+    
     all_predictions = []
     
     numerical_labels = []
@@ -49,28 +123,41 @@ def calculate_onsd_scores(input_videos, labels, yolo_model, classifier_model, tr
     for v_idx, f in tqdm(enumerate(input_videos)):
         vc = torchvision.io.read_video(f)[0].permute(3, 0, 1, 2)
         
-        all_preds = []
-        for j in range(0, vc.size(1), 20):
+        best_frame = None
+        best_conf = 0
+    
+        for i in range(0, vc.size(1)):
+            frame = vc[:, i, :, :]
+
+            frame = resize(frame).swapaxes(0, 2).swapaxes(0, 1).numpy()
+
+            prepro_frame = np.expand_dims(frame, axis=0)
+
+#             prepro_frame = tf.keras.applications.densenet.preprocess_input(frame)
 
-            vc_sub = vc[:, j, :, :]
+            pred = good_frame_model(prepro_frame)
+
+            pred = tf.math.sigmoid(pred)
             
-            frame = get_yolo_region_onsd(yolo_model, vc_sub, crop_width, crop_height)
+            if pred > best_conf:
+                best_conf = pred
+                best_frame = vc[:, i, :, :]
+                
+        frame = get_yolo_region_onsd(yolo_model, best_frame, crop_width, crop_height, False)
             
-            if frame is None:
-                continue
+        if frame is None or len(frame) == 0:
+            final_pred = 1.0
+        else:
+            frame = frame[0]
 
-            frame = transform(frame).to(torch.float32).unsqueeze(3)
+            frame = transform(frame).to(torch.float32).unsqueeze(3).unsqueeze(1).numpy()
             
-            pred, _ = classifier_model(frame)
-            
-            pred = tf.math.round(tf.math.sigmoid(pred))
+            activations = tf.stop_gradient(sparse_model([frame, tf.stop_gradient(tf.expand_dims(recon_model.trainable_weights[0], axis=0))]))
 
-            all_preds.append(pred)
-                
-        if all_preds:
-            final_pred = np.round(np.mean(np.array(all_preds)))
-        else:
-            final_pred = 1.0
+            pred = classifier_model(activations)
+
+            final_pred = float(tf.math.round(tf.math.sigmoid(pred)))
+#             final_pred = 1.0
             
         if final_pred != numerical_labels[v_idx]:
             if final_pred == 0:
@@ -84,14 +171,14 @@ def calculate_onsd_scores(input_videos, labels, yolo_model, classifier_model, tr
 
 if __name__ == "__main__":
     parser = argparse.ArgumentParser()
-    parser.add_argument('--batch_size', default=12, type=int)
+    parser.add_argument('--batch_size', default=32, type=int)
     parser.add_argument('--kernel_size', default=15, type=int)
-    parser.add_argument('--kernel_depth', default=5, type=int)
-    parser.add_argument('--num_kernels', default=64, type=int)
+    parser.add_argument('--kernel_depth', default=1, type=int)
+    parser.add_argument('--num_kernels', default=32, type=int)
     parser.add_argument('--stride', default=1, type=int)
     parser.add_argument('--max_activation_iter', default=150, type=int)
     parser.add_argument('--activation_lr', default=1e-2, type=float)
-    parser.add_argument('--lr', default=5e-4, type=float)
+    parser.add_argument('--lr', default=5e-5, type=float)
     parser.add_argument('--epochs', default=40, type=int)
     parser.add_argument('--lam', default=0.05, type=float)
     parser.add_argument('--output_dir', default='./output', type=str)
@@ -129,6 +216,7 @@ if __name__ == "__main__":
     random.seed(args.seed)
     np.random.seed(args.seed)
     torch.manual_seed(args.seed)
+    tf.random.set_seed(args.seed)
     
     output_dir = args.output_dir
     if not os.path.exists(output_dir):
@@ -138,6 +226,7 @@ if __name__ == "__main__":
         out_f.write(str(args))
     
     yolo_model = YoloModel(args.dataset)
+#     yolo_model = None
 
     all_errors = []
     
@@ -145,19 +234,29 @@ if __name__ == "__main__":
         inputs = keras.Input(shape=(image_height, image_width, clip_depth))
     else:
         inputs = keras.Input(shape=(clip_depth, image_height, image_width, 1))
+        
+    filter_inputs = keras.Input(shape=(clip_depth, args.kernel_size, args.kernel_size, 1, args.num_kernels), dtype='float32')
 
-    sparse_model = None
-    recon_model = None
+    output = SparseCode(batch_size=args.batch_size, image_height=image_height, image_width=image_width, clip_depth=clip_depth, in_channels=1, out_channels=args.num_kernels, kernel_size=args.kernel_size, kernel_depth=args.kernel_depth, stride=args.stride, lam=args.lam, activation_lr=args.activation_lr, max_activation_iter=args.max_activation_iter, run_2d=args.run_2d)(inputs, filter_inputs)
+
+    sparse_model = keras.Model(inputs=(inputs, filter_inputs), outputs=output)
+    recon_model = keras.models.load_model(args.sparse_checkpoint)
     
-    data_augmentation = keras.Sequential([
-        keras.layers.RandomFlip('horizontal'),
-        keras.layers.RandomRotation(45),
-#         keras.layers.RandomBrightness(0.1)
-    ])
+#     data_augmentation = keras.Sequential([
+# #         keras.layers.RandomFlip('horizontal'),
+# # #         keras.layers.RandomFlip('vertical'),
+# #         keras.layers.RandomRotation(5),
+# #         keras.layers.RandomBrightness(0.1)
+#     ])
+#     transforms = torchvision.transforms.Compose(
+#     [torchvision.transforms.RandomAffine(scale=)
+#     ])
         
     
-    splits, dataset = load_onsd_videos(args.batch_size, input_size=(image_height, image_width), yolo_model=yolo_model, mode=args.splits, n_splits=args.n_splits)
+    splits, dataset = load_onsd_videos(args.batch_size, input_size=(image_height, image_width), crop_size=(crop_height, crop_width), yolo_model=yolo_model, mode=args.splits, n_splits=args.n_splits)
     positive_class = 'Positives'
+    
+    difficult_vids = split_difficult_vids(dataset.get_difficult_vids(), args.n_splits)
 
     overall_true = []
     overall_pred = []
@@ -174,11 +273,27 @@ if __name__ == "__main__":
         train_loader = copy.deepcopy(dataset)
         train_loader.set_indicies(train_idx)
         test_loader = copy.deepcopy(dataset)
-        test_loader.set_indicies(test_idx)
+        if args.splits == 'all_train':
+            test_loader.set_indicies(train_idx)
+        else:
+            test_loader.set_indicies(test_idx)
 
         train_tf = tf.data.Dataset.from_tensor_slices((train_loader.get_frames(), train_loader.get_labels(), train_loader.get_widths()))
         test_tf = tf.data.Dataset.from_tensor_slices((test_loader.get_frames(), test_loader.get_labels(), test_loader.get_widths()))
         
+
+        negative_ds = (
+          train_tf
+            .filter(lambda features, label, width: label==0)
+            .repeat())
+        positive_ds = (
+          train_tf
+            .filter(lambda features, label, width: label==1)
+            .repeat())
+        
+        balanced_ds = tf.data.Dataset.sample_from_datasets(
+            [negative_ds, positive_ds], [0.5, 0.5])
+        
 #         if test_idx is not None:
 #             test_sampler = torch.utils.data.SubsetRandomSampler(test_idx)
 #             test_loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size,
@@ -193,7 +308,7 @@ if __name__ == "__main__":
         if args.checkpoint:
             classifier_model = keras.models.load_model(args.checkpoint)
         else:
-            classifier_inputs = keras.Input(shape=(image_height, image_width, 1))
+            classifier_inputs = keras.Input(shape=((clip_depth - args.kernel_depth) // 1 + 1, (image_height - args.kernel_size) // args.stride + 1, (image_width - args.kernel_size) // args.stride + 1, args.num_kernels))
             classifier_outputs = ONSDClassifier(args.sparse_checkpoint)(classifier_inputs)
 
             classifier_model = keras.Model(inputs=classifier_inputs, outputs=classifier_outputs)
@@ -201,7 +316,7 @@ if __name__ == "__main__":
         prediction_optimizer = keras.optimizers.Adam(learning_rate=args.lr)
         filter_optimizer = tf.keras.optimizers.SGD(learning_rate=args.sparse_lr)
 
-        best_so_far = float('-inf')
+        best_so_far = float('inf')
 
         class_criterion = keras.losses.BinaryCrossentropy(from_logits=True, reduction=keras.losses.Reduction.SUM)
         width_criterion = keras.losses.MeanSquaredError(reduction=keras.losses.Reduction.SUM)
@@ -215,8 +330,9 @@ if __name__ == "__main__":
                 y_true_train = None
                 y_pred_train = None
 
-                for images, labels, width in tqdm(train_tf.shuffle(len(train_tf)).batch(args.batch_size)):
-                    images = tf.transpose(images, [0, 2, 3, 1])
+#                 for images, labels, width in tqdm(balanced_ds.shuffle(len(train_tf)).batch(args.batch_size)):
+                for images, labels, width in tqdm(balanced_ds.take(len(train_tf)).shuffle(len(train_tf)).batch(args.batch_size)):
+                    images = tf.expand_dims(tf.transpose(images, [0, 2, 3, 1]), axis=1)
                     width -= 0.5
 
 #                     torch_labels = np.zeros(len(labels))
@@ -231,11 +347,12 @@ if __name__ == "__main__":
 
                             print(loss)
                     else:
+                        activations = tf.stop_gradient(sparse_model([images, tf.stop_gradient(tf.expand_dims(recon_model.trainable_weights[0], axis=0))]))
                         with tf.GradientTape() as tape:
-                            class_pred, width_pred = classifier_model(data_augmentation(images))
+                            class_pred = classifier_model(activations)
                             class_loss = class_criterion(labels, class_pred)
-                            width_loss = width_criterion(width, width_pred)
-                            loss = width_loss
+#                             width_loss = width_criterion(width, width_pred)
+                            loss = class_loss
 
                     epoch_loss += loss * images.shape[0]
 
@@ -271,19 +388,22 @@ if __name__ == "__main__":
                 test_loss = 0.0
                 test_width_loss = 0.0
                 
-#                 eval_loader = test_loader
+#                 eval_loader = test_tf
 #                 if args.splits == 'all_train':
-#                     eval_loader = train_loader
+#                     eval_loader = train_tf
                 for images, labels, width in tqdm(test_tf.batch(args.batch_size)):
-                    images = tf.transpose(images, [0, 2, 3, 1])
+                    images = tf.expand_dims(tf.transpose(images, [0, 2, 3, 1]), axis=1)
                     width -= 0.5
+                
+                    activations = tf.stop_gradient(sparse_model([images, tf.stop_gradient(tf.expand_dims(recon_model.trainable_weights[0], axis=0))]))
 
-                    pred, width_pred = classifier_model(images)
+                    pred = classifier_model(activations)
                     class_loss = class_criterion(labels, pred)
-                    width_loss = width_criterion(width, width_pred)
+#                     width_loss = width_criterion(width, width_pred)
+                    test_loss += class_loss * images.shape[0]
 
-                    test_loss += (class_loss + width_loss) * images.shape[0]
-                    test_width_loss += width_loss * images.shape[0]
+#                     test_loss += (class_loss + width_loss) * images.shape[0]
+#                     test_width_loss += width_loss * images.shape[0]
 
                     if y_true is None:
                         y_true = labels
@@ -305,15 +425,16 @@ if __name__ == "__main__":
 
                 train_accuracy = accuracy_score(y_true_train, y_pred_train)
 
-                print('epoch={}, i_fold={}, time={:.2f}, train_loss={:.2f}, test_loss={:.2f}, test_width_loss={:.2f}, train_acc={:.2f}, test_f1={:.2f}, test_acc={:.2f}'.format(epoch, i_fold, t2-t1, epoch_loss, test_loss, test_width_loss, train_accuracy, f1, accuracy))
+#                 print('epoch={}, i_fold={}, time={:.2f}, train_loss={:.2f}, test_loss={:.2f}, test_width_loss={:.2f}, train_acc={:.2f}, test_f1={:.2f}, test_acc={:.2f}'.format(epoch, i_fold, t2-t1, epoch_loss, test_loss, test_width_loss, train_accuracy, f1, accuracy))
+                print('epoch={}, i_fold={}, time={:.2f}, train_loss={:.2f}, test_loss={:.2f}, train_acc={:.2f}, test_f1={:.2f}, test_acc={:.2f}'.format(epoch, i_fold, t2-t1, epoch_loss, test_loss, train_accuracy, f1, accuracy))
     #             print(epoch_loss)
-                if f1 >= best_so_far:
+                if epoch_loss < best_so_far:
                     print("found better model")
                     # Save model parameters
                     classifier_model.save(os.path.join(output_dir, "best_classifier_{}.pt".format(i_fold)))
 #                     recon_model.save(os.path.join(output_dir, "best_sparse_model_{}.pt".format(i_fold)))
                     pickle.dump(prediction_optimizer.get_weights(), open(os.path.join(output_dir, 'optimizer_{}.pt'.format(i_fold)), 'wb+'))
-                    best_so_far = f1
+                    best_so_far = epoch_loss
 
             classifier_model = keras.models.load_model(os.path.join(output_dir, "best_classifier_{}.pt".format(i_fold)))
 #             recon_model = keras.models.load_model(os.path.join(output_dir, 'best_sparse_model_{}.pt'.format(i_fold)))
@@ -333,11 +454,12 @@ if __name__ == "__main__":
          torchvision.transforms.Resize((image_height, image_width))
         ])
 
-        test_videos = test_loader.get_all_videos()
+        test_videos = list(test_loader.get_all_videos()) + [v[1] for v in difficult_vids[i_fold]]
 
         test_labels = [vid_f.split('/')[-3] for vid_f in test_videos]
 
-        y_pred, y_true, fn, fp = calculate_onsd_scores(test_videos, test_labels, yolo_model, classifier_model, transform, image_width, image_height)
+#         y_pred, y_true, fn, fp = calculate_onsd_scores(test_videos, test_labels, yolo_model, classifier_model, sparse_model, recon_model, transform, image_width, image_height)
+        y_pred, y_true, fn, fp = calculate_onsd_scores_measured(test_videos, yolo_model, classifier_model, sparse_model, recon_model, transform, crop_width, crop_height)
             
         t2 = time.perf_counter()
 
@@ -361,6 +483,15 @@ if __name__ == "__main__":
             
         i_fold += 1
 
+    if args.splits == 'all_train':
+        transform = torchvision.transforms.Compose(
+        [torchvision.transforms.Grayscale(1),
+         MinMaxScaler(0, 255),
+         torchvision.transforms.Resize((image_height, image_width))
+        ])
+
+        overall_pred, overall_true, fn_ids, fp_ids = calculate_onsd_scores_measured(yolo_model, classifier_model, sparse_model, recon_model, transform, image_width, image_height)
+        
     fp_fn_file = os.path.join(args.output_dir, 'fp_fn.txt')
     with open(fp_fn_file, 'w+') as in_f:
         in_f.write('FP:\n')
diff --git a/sparse_coding_torch/onsd/train_sparse_model.py b/sparse_coding_torch/onsd/train_sparse_model.py
index c439de7..d09e22e 100644
--- a/sparse_coding_torch/onsd/train_sparse_model.py
+++ b/sparse_coding_torch/onsd/train_sparse_model.py
@@ -13,6 +13,8 @@ import tensorflow as tf
 from sparse_coding_torch.sparse_model import normalize_weights_3d, normalize_weights, SparseCode, load_pytorch_weights, ReconSparse
 import random
 from sparse_coding_torch.utils import plot_filters
+from yolov4.get_bounding_boxes import YoloModel
+import copy
 
 def sparse_loss(images, recon, activations, batch_size, lam, stride):
     loss = 0.5 * (1/batch_size) * tf.math.reduce_sum(tf.math.pow(images - recon, 2))
@@ -56,6 +58,8 @@ if __name__ == "__main__":
     image_height = int(crop_height / args.scale_factor)
     image_width = int(crop_width / args.scale_factor)
     clip_depth = args.clip_depth
+    
+    yolo_model = YoloModel(args.dataset)
 
     output_dir = args.output_dir
     if not os.path.exists(output_dir):
@@ -66,16 +70,14 @@ if __name__ == "__main__":
     with open(os.path.join(output_dir, 'arguments.txt'), 'w+') as out_f:
         out_f.write(str(args))
 
-    splits, dataset = load_onsd_videos(args.batch_size, input_size=(image_height, image_width, clip_depth), mode='all_train')
-    train_idx, test_idx = splits[0]
-    
-    train_sampler = torch.utils.data.SubsetRandomSampler(train_idx)
-    train_loader = torch.utils.data.DataLoader(dataset, batch_size=args.batch_size,
-                                           sampler=train_sampler)
+#     splits, dataset = load_onsd_videos(args.batch_size, input_size=(image_height, image_width, clip_depth), mode='all_train')
+    splits, dataset = load_onsd_videos(args.batch_size, input_size=(image_height, image_width), yolo_model=yolo_model, mode='all_train', n_splits=1)
+    train_idx, test_idx = list(splits)[0]
     
-    print('Loaded', len(train_loader), 'train examples')
+    train_loader = copy.deepcopy(dataset)
+    train_loader.set_indicies(train_idx)
 
-    example_data = next(iter(train_loader))
+    train_tf = tf.data.Dataset.from_tensor_slices((train_loader.get_frames(), train_loader.get_labels(), train_loader.get_widths()))
 
     if args.run_2d:
         inputs = keras.Input(shape=(image_height, image_width, clip_depth))
@@ -117,12 +119,8 @@ if __name__ == "__main__":
         
         num_iters = 0
 
-        for labels, local_batch, vid_f in tqdm(train_loader):
-            local_batch = local_batch.unsqueeze(1)
-            if args.run_2d:
-                images = local_batch.squeeze(1).permute(0, 2, 3, 1).numpy()
-            else:
-                images = local_batch.permute(0, 2, 3, 4, 1).numpy()
+        for images, labels, width in tqdm(train_tf.shuffle(len(train_tf)).batch(args.batch_size)):
+            images = tf.expand_dims(tf.transpose(images, [0, 2, 3, 1]), axis=1)
                 
             activations = tf.stop_gradient(sparse_model([images, tf.stop_gradient(tf.expand_dims(recon_model.trainable_weights[0], axis=0))]))
             
@@ -130,8 +128,8 @@ if __name__ == "__main__":
                 recon = recon_model(activations)
                 loss = sparse_loss(images, recon, activations, args.batch_size, args.lam, args.stride)
 
-            epoch_loss += loss * local_batch.size(0)
-            running_loss += loss * local_batch.size(0)
+            epoch_loss += loss * images.shape[0]
+            running_loss += loss * images.shape[0]
 
             gradients = tape.gradient(loss, recon_model.trainable_weights)
 
@@ -146,7 +144,6 @@ if __name__ == "__main__":
             num_iters += 1
 
         epoch_end = time.perf_counter()
-        epoch_loss /= len(train_loader.sampler)
         
         if args.save_filters and epoch % 2 == 0:
             if args.run_2d:
diff --git a/sparse_coding_torch/onsd/train_valid_classifier.py b/sparse_coding_torch/onsd/train_valid_classifier.py
new file mode 100644
index 0000000..2625f44
--- /dev/null
+++ b/sparse_coding_torch/onsd/train_valid_classifier.py
@@ -0,0 +1,144 @@
+import tensorflow.keras as keras
+import tensorflow as tf
+# tf.debugging.set_log_device_placement(True)
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from tqdm import tqdm
+import argparse
+import os
+from sparse_coding_torch.onsd.load_data import load_onsd_frames
+from sparse_coding_torch.utils import SubsetWeightedRandomSampler, get_sample_weights
+from sparse_coding_torch.sparse_model import SparseCode, ReconSparse, normalize_weights, normalize_weights_3d
+from sparse_coding_torch.onsd.classifier_model import ONSDSharpness
+from sparse_coding_torch.onsd.video_loader import get_yolo_region_onsd
+import time
+import numpy as np
+from sklearn.metrics import f1_score, accuracy_score, confusion_matrix
+import random
+import pickle
+# from sparse_coding_torch.onsd.train_sparse_model import sparse_loss
+from yolov4.get_bounding_boxes import YoloModel
+import torchvision
+from sparse_coding_torch.utils import VideoGrayScaler, MinMaxScaler
+import glob
+import cv2
+import copy
+
+tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--batch_size', default=24, type=int)
+    parser.add_argument('--lr', default=5e-4, type=float)
+    parser.add_argument('--epochs', default=20, type=int)
+    parser.add_argument('--output_dir', default='./output', type=str)
+    parser.add_argument('--seed', default=26, type=int)
+    parser.add_argument('--dataset', default='onsd', type=str)
+    
+    args = parser.parse_args()
+    
+    crop_height = 512
+    crop_width = 512
+
+    image_height = 512
+    image_width = 512
+
+    batch_size = args.batch_size
+    
+    random.seed(args.seed)
+    np.random.seed(args.seed)
+    torch.manual_seed(args.seed)
+    
+    output_dir = args.output_dir
+    if not os.path.exists(output_dir):
+        os.makedirs(output_dir)
+        
+    with open(os.path.join(output_dir, 'arguments.txt'), 'w+') as out_f:
+        out_f.write(str(args))
+
+    all_errors = []
+    
+    yolo_model = YoloModel(args.dataset)
+    
+#     data_augmentation = keras.Sequential([
+# #         keras.layers.RandomFlip('vertical'),
+# #         keras.layers.RandomRotation(10),
+# #         keras.layers.RandomBrightness(0.1)
+#         keras.layers.RandomTranslation(height_factor=(-0.1, 0.1), width_factor=(-0.1, 0.1))
+#     ])
+        
+    
+    splits, dataset = load_onsd_frames(args.batch_size, input_size=(image_height, image_width), mode='balanced', yolo_model=None)
+    
+    train_idx, test_idx = list(splits)[0]
+
+    train_loader = copy.deepcopy(dataset)
+    train_loader.set_indicies(train_idx)
+    test_loader = copy.deepcopy(dataset)
+    test_loader.set_indicies(test_idx)
+
+    train_tf = tf.data.Dataset.from_tensor_slices((train_loader.get_frames(), train_loader.get_labels()))
+    test_tf = tf.data.Dataset.from_tensor_slices((test_loader.get_frames(), test_loader.get_labels()))
+
+    classifier_inputs = keras.Input(shape=(image_height, image_width, 3))
+    classifier_outputs = ONSDSharpness()(classifier_inputs)
+
+    classifier_model = keras.Model(inputs=classifier_inputs, outputs=classifier_outputs)
+
+    prediction_optimizer = keras.optimizers.Adam(learning_rate=args.lr)
+    
+    criterion = keras.losses.BinaryCrossentropy(from_logits=True, reduction=keras.losses.Reduction.SUM)
+
+    best_so_far = float('inf')
+
+    for epoch in range(args.epochs):
+        epoch_loss = 0
+        t1 = time.perf_counter()
+
+        for images, labels in tqdm(train_tf.shuffle(len(train_tf)).batch(args.batch_size)):
+            images = tf.cast(tf.transpose(images, [0, 2, 3, 1]), tf.float32)
+#             images = data_augmentation(images)
+#             images = tf.keras.applications.densenet.preprocess_input(images)
+
+            with tf.GradientTape() as tape:
+                pred = classifier_model(images)
+                loss = criterion(labels, pred)
+
+            epoch_loss += loss * images.shape[0]
+
+            gradients = tape.gradient(loss, classifier_model.trainable_weights)
+
+            prediction_optimizer.apply_gradients(zip(gradients, classifier_model.trainable_weights))
+
+        t2 = time.perf_counter()
+
+        test_count = 0
+        test_correct = 0
+
+        for images, labels in tqdm(test_tf.batch(args.batch_size)):
+            images = tf.keras.applications.densenet.preprocess_input(tf.cast(tf.transpose(images, [0, 2, 3, 1]), tf.float32))
+
+            pred = classifier_model(images)
+            
+            pred = tf.math.sigmoid(pred)
+            
+            for p, l in zip(pred, labels):
+                if round(float(p)) == float(l):
+                    test_correct += 1
+                test_count += 1
+
+        t2 = time.perf_counter()
+
+
+        print('epoch={}, time={:.2f}, train_loss={:.4f}, test_acc={:.2f}'.format(epoch, t2-t1, epoch_loss, test_correct / test_count))
+#         print('epoch={}, time={:.2f}, train_loss={:.2f}'.format(epoch, t2-t1, epoch_loss))
+
+#             print(epoch_loss)
+        if epoch_loss < best_so_far:
+            print("found better model")
+            # Save model parameters
+            classifier_model.save(os.path.join(output_dir, "best_classifier.pt"))
+#                     recon_model.save(os.path.join(output_dir, "best_sparse_model_{}.pt".format(i_fold)))
+            pickle.dump(prediction_optimizer.get_weights(), open(os.path.join(output_dir, 'optimizer.pt'), 'wb+'))
+            best_so_far = epoch_loss
\ No newline at end of file
diff --git a/sparse_coding_torch/onsd/video_loader.py b/sparse_coding_torch/onsd/video_loader.py
index d6b6b3c..0716ece 100644
--- a/sparse_coding_torch/onsd/video_loader.py
+++ b/sparse_coding_torch/onsd/video_loader.py
@@ -24,51 +24,122 @@ import random
 import cv2
 from yolov4.get_bounding_boxes import YoloModel
 import tensorflow as tf
+import torchvision
+
+from matplotlib import pyplot as plt
+from matplotlib import cm
 
 def get_participants(filenames):
     return [f.split('/')[-2] for f in filenames]
     
-def get_yolo_region_onsd(yolo_model, frame, crop_width, crop_height):
+def get_yolo_region_onsd(yolo_model, frame, crop_width, crop_height, do_augmentation, label=''):
     orig_height = frame.size(1)
     orig_width = frame.size(2)
     
     bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame.swapaxes(0, 2).swapaxes(0, 1).numpy())
     
-    all_frames = []
+    eye_bounding_box = (None, 0.0)
+    nerve_bounding_box = (None, 0.0)
+    
     for bb, class_pred, score in zip(bounding_boxes, classes, scores):
-        if class_pred != 0:
-            continue
-        
-        center_x = round((bb[3] + bb[1]) / 2 * orig_width)
-        center_y = round((bb[2] + bb[0]) / 2 * orig_height)
+        if class_pred == 0 and score > nerve_bounding_box[1]:
+            nerve_bounding_box = (bb, score)
+        elif class_pred == 1 and score > eye_bounding_box[1]:
+            eye_bounding_box = (bb, score)
+    
+    eye_bounding_box = eye_bounding_box[0]
+    nerve_bounding_box = nerve_bounding_box[0]
+    
+    if eye_bounding_box is None or nerve_bounding_box is None:
+        return None
+    
+    nerve_center_x = round((nerve_bounding_box[2] + nerve_bounding_box[0]) / 2 * orig_width)
+    nerve_center_y = round((nerve_bounding_box[3] + nerve_bounding_box[1]) / 2 * orig_height)
+    
+    
+    eye_center_x = round((eye_bounding_box[2] + eye_bounding_box[0]) / 2 * orig_width)
+#     eye_center_y = round((eye_bounding_box[3] + eye_bounding_box[1]) / 2 * orig_height)
+    eye_center_y = round(eye_bounding_box[3] * orig_height)
+    
+    crop_center_x = nerve_center_x
+    crop_center_y = eye_center_y + 65
+    
+    all_frames = []
+    if do_augmentation:
+        NUM_AUGMENTED_SAMPLES=10
+        frame_center_y = int(orig_height / 2)
+        frame_center_x = int(orig_width / 2)
         
-        lower_y = center_y - (crop_height // 2)
-        upper_y = center_y + (crop_height // 2)
-        lower_x = center_x - (crop_width // 2)
-        upper_x = center_x + (crop_width // 2)
+        shift_x = (frame_center_x - crop_center_x)
+        shift_y = (frame_center_y - crop_center_y)
         
-#         lower_y = center_y
-#         upper_y = center_y + crop_height
-#         lower_x = center_x - (crop_width // 2)
-#         upper_x = center_x + (crop_width // 2)
-
-        trimmed_frame = frame[:, lower_y:upper_y, lower_x:upper_x]
+#         print(shift_x)
+#         print(shift_y)
         
-#         cv2.imwrite('test_onsd_orig_4.png', frame.numpy().swapaxes(0,1).swapaxes(1,2))
-#         cv2.imwrite('test_onsd_crop_4.png', trimmed_frame.numpy().swapaxes(0,1).swapaxes(1,2))
+#         cv2.imwrite('onsd_not_translated.png', frame.numpy().swapaxes(0,1).swapaxes(1,2))
+        frame = torchvision.transforms.functional.affine(frame, angle=0, translate=(shift_x, shift_y), scale=1.0, shear=0.0)
+#         cv2.imwrite('onsd_translated.png', frame.numpy().swapaxes(0,1).swapaxes(1,2))
 #         raise Exception
         
-        return trimmed_frame
-
-    return None
+        transform_list = []
+#         print(label)
+        if label == 'Positives':
+            transform_list.append(torchvision.transforms.RandomAffine(degrees=5, scale=(1.0, 1.7)))
+        elif label == 'Negatives':
+            transform_list.append(torchvision.transforms.RandomAffine(degrees=5, scale=(0.5, 1.0)))
+        transform = torchvision.transforms.Compose(transform_list)
+        for i in range(NUM_AUGMENTED_SAMPLES):
+            aug_frame = transform(frame)
+            aug_frame = aug_frame[:, frame_center_y:frame_center_y + crop_height, frame_center_x - int(crop_width/2):frame_center_x + int(crop_width/2)]
+#             normal_crop = frame[:, frame_center_y:frame_center_y + crop_height, frame_center_x - int(crop_width/2):frame_center_x + int(crop_width/2)]
+#             cv2.imwrite('onsd_zoomed.png', aug_frame.numpy().swapaxes(0,1).swapaxes(1,2))
+#             cv2.imwrite('onsd_not_zoomed.png', normal_crop.numpy().swapaxes(0,1).swapaxes(1,2))
+#             print(aug_frame.size())
+#             print(frame.size())
+#             raise Exception
+            all_frames.append(aug_frame)
+    else:
+#         print(frame.size())
+#         print(crop_center_y)
+#         print(crop_center_x)
+        trimmed_frame = frame[:, crop_center_y:crop_center_y + crop_height, max(crop_center_x - int(crop_width/2), 0):crop_center_x + int(crop_width/2)]
+#         print(trimmed_frame.size())
+        all_frames.append(trimmed_frame)
+        
+#     cv2.imwrite('test_onsd_orig_w_eye.png', frame.numpy().swapaxes(0,1).swapaxes(1,2))
+#     plt.clf()
+#     plt.imshow(frame.numpy().swapaxes(0,1).swapaxes(1,2), cmap=cm.Greys_r)
+#     plt.scatter([crop_center_x], [crop_center_y], color=["red"])
+#     plt.savefig('test_onsd_orig_w_eye_dist.png')
+#     cv2.imwrite('test_onsd_orig_trimmed_slice.png', trimmed_frame.numpy().swapaxes(0,1).swapaxes(1,2))
+#     raise Exception
+        
+    return all_frames
 
-class ONSDLoader:
+class ONSDGoodFramesLoader:
     def __init__(self, video_path, clip_width, clip_height, transform=None, yolo_model=None):
         self.transform = transform
         self.labels = [name for name in listdir(video_path) if isdir(join(video_path, name))]
         
         self.count = 0
         
+        valid_frames = {}
+        invalid_frames = {}
+        with open('sparse_coding_torch/onsd/good_frames_onsd.csv', 'r') as valid_in:
+            reader = csv.DictReader(valid_in)
+            for row in reader:
+                vid = row['video'].strip()
+                good_frames = row['good_frames'].strip()
+                bad_frames = row['bad_frames'].strip()
+                if good_frames:
+                    for subrange in good_frames.split(';'):
+                        splitrange = subrange.split('-')
+                        valid_frames[vid] = (int(splitrange[0]), int(splitrange[1]))
+                if bad_frames:
+                    for subrange in bad_frames.split(';'):
+                        splitrange = subrange.split('-')
+                        invalid_frames[vid] = (int(splitrange[0]), int(splitrange[1]))
+        
         onsd_widths = {}
         with open(join(video_path, 'onsd_widths.csv'), 'r') as width_in:
             reader = csv.reader(width_in)
@@ -77,48 +148,69 @@ class ONSDLoader:
                 onsd_widths[row[2]] = round(sum(width_vals) / len(width_vals), 2)
         
         clip_cache_file = 'clip_cache_onsd_{}_{}.pt'.format(clip_width, clip_height)
+        difficult_cache_file = 'difficult_vid_cache_onsd_{}_{}.pt'.format(clip_width, clip_height)
         
         self.videos = []
         for label in self.labels:
             self.videos.extend([(label, abspath(join(video_path, label, f)), f) for f in glob.glob(join(video_path, label, '*', '*.mp4'))])
             
+        self.difficult_vids = []
+            
         self.clips = []
         
         if exists(clip_cache_file):
             self.clips = torch.load(open(clip_cache_file, 'rb'))
+            self.difficult_vids = torch.load(open(difficult_cache_file, 'rb'))
         else:
             vid_idx = 0
-            for label, path, _ in tqdm(self.videos):
+            for txt_label, path, _ in tqdm(self.videos):
                 vc = tv.io.read_video(path)[0].permute(3, 0, 1, 2)
                 
-                width_key = path.split('/')[-1]
-                if width_key not in onsd_widths:
-                    continue
-                width = onsd_widths[width_key]
+#                 width_key = path.split('/')[-1]
+#                 if width_key not in onsd_widths:
+#                     continue
+                width = 0.0
                 
-                for j in range(vc.size(1)):
-                    frame = vc[:, j, :, :]
-                    
-                    if yolo_model is not None:
-                        frame = get_yolo_region_onsd(yolo_model, frame, clip_width, clip_height)
-                        
-                    if frame is None:
-                        continue
+                frame_key = path.split('/')[-2]
+                if frame_key in valid_frames:
+                    start_range, end_range = valid_frames[frame_key]
+                
+                    for j in range(start_range, end_range, 1):
+                        if j == vc.size(1):
+                            break
+                        frame = vc[:, j, :, :]
 
-                    if self.transform:
-                        frame = self.transform(frame)
-                        
+                        if yolo_model is not None:
+                            all_frames = get_yolo_region_onsd(yolo_model, frame, clip_width, clip_height, True, txt_label)
+                        else:
+                            all_frames = [frame]
+
+                        if all_frames is None or len(all_frames) == 0:
+                            continue
+
+                        if self.transform:
+                            all_frames = [self.transform(frm) for frm in all_frames]
+
+                        label = self.videos[vid_idx][0]
+                        if label == 'Positives':
+                            label = np.array(1.0)
+                        elif label == 'Negatives':
+                            label = np.array(0.0)
+
+                        for frm in all_frames:
+                            self.clips.append((label, frm.numpy(), self.videos[vid_idx][2], width))
+                else:
                     label = self.videos[vid_idx][0]
                     if label == 'Positives':
                         label = np.array(1.0)
                     elif label == 'Negatives':
                         label = np.array(0.0)
-
-                    self.clips.append((label, frame.numpy(), self.videos[vid_idx][2], width))
+                    self.difficult_vids.append((label, self.videos[vid_idx][2]))
 
                 vid_idx += 1
                 
             torch.save(self.clips, open(clip_cache_file, 'wb+'))
+            torch.save(self.difficult_vids, open(difficult_cache_file, 'wb+'))
             
         num_positive = len([clip[0] for clip in self.clips if clip[0] == 1.0])
         num_negative = len([clip[0] for clip in self.clips if clip[0] == 0.0])
@@ -128,6 +220,9 @@ class ONSDLoader:
         print('Loaded', num_positive, 'positive examples.')
         print('Loaded', num_negative, 'negative examples.')
         
+    def get_difficult_vids(self):
+        return self.difficult_vids
+        
     def get_filenames(self):
         return [self.clips[i][2] for i in range(len(self.clips))]
     
@@ -161,14 +256,21 @@ class ONSDLoader:
             
     def __iter__(self):
         return self
-    
-# class ONSDLoader(Dataset):
-    
-#     def __init__(self, video_path, clip_width, clip_height, transform=None, augmentation=None, yolo_model=None):
+
+# class ONSDLoader:
+#     def __init__(self, video_path, clip_width, clip_height, transform=None, yolo_model=None):
 #         self.transform = transform
-#         self.augmentation = augmentation
 #         self.labels = [name for name in listdir(video_path) if isdir(join(video_path, name))]
         
+#         self.count = 0
+        
+#         onsd_widths = {}
+#         with open(join(video_path, 'onsd_widths.csv'), 'r') as width_in:
+#             reader = csv.reader(width_in)
+#             for row in reader:
+#                 width_vals = [float(val) for val in row[3:] if val != '']
+#                 onsd_widths[row[2]] = round(sum(width_vals) / len(width_vals), 2)
+        
 #         clip_cache_file = 'clip_cache_onsd_{}_{}.pt'.format(clip_width, clip_height)
         
 #         self.videos = []
@@ -184,26 +286,40 @@ class ONSDLoader:
 #             for label, path, _ in tqdm(self.videos):
 #                 vc = tv.io.read_video(path)[0].permute(3, 0, 1, 2)
                 
+#                 width_key = path.split('/')[-1]
+#                 if width_key not in onsd_widths:
+#                     continue
+#                 width = onsd_widths[width_key]
+                
 #                 for j in range(vc.size(1)):
 #                     frame = vc[:, j, :, :]
                     
 #                     if yolo_model is not None:
-#                         frame = get_yolo_region_onsd(yolo_model, frame, clip_width, clip_height)
+#                         all_frames = get_yolo_region_onsd(yolo_model, frame, clip_width, clip_height)
+#                     else:
+#                         all_frames = [frame]
                         
-#                     if frame is None:
+#                     if all_frames is None or len(all_frames) == 0:
 #                         continue
 
 #                     if self.transform:
-#                         frame = self.transform(frame)
-
-#                     self.clips.append((self.videos[vid_idx][0], frame, self.videos[vid_idx][2]))
+#                         all_frames = [self.transform(frm) for frm in all_frames]
+                        
+#                     label = self.videos[vid_idx][0]
+#                     if label == 'Positives':
+#                         label = np.array(1.0)
+#                     elif label == 'Negatives':
+#                         label = np.array(0.0)
+                        
+#                     for frm in all_frames:
+#                         self.clips.append((label, frm.numpy(), self.videos[vid_idx][2], width))
 
 #                 vid_idx += 1
                 
 #             torch.save(self.clips, open(clip_cache_file, 'wb+'))
             
-#         num_positive = len([clip[0] for clip in self.clips if clip[0] == 'Positives'])
-#         num_negative = len([clip[0] for clip in self.clips if clip[0] == 'Negatives'])
+#         num_positive = len([clip[0] for clip in self.clips if clip[0] == 1.0])
+#         num_negative = len([clip[0] for clip in self.clips if clip[0] == 0.0])
         
 #         random.shuffle(self.clips)
         
@@ -212,20 +328,197 @@ class ONSDLoader:
         
 #     def get_filenames(self):
 #         return [self.clips[i][2] for i in range(len(self.clips))]
-        
-#     def get_video_labels(self):
-#         return [self.videos[i][0] for i in range(len(self.videos))]
+    
+#     def get_all_videos(self):
+#         return set([self.clips[i][2] for i in range(len(self.clips))])
         
 #     def get_labels(self):
 #         return [self.clips[i][0] for i in range(len(self.clips))]
     
-#     def __getitem__(self, index):
-#         label, frame, vid_f = self.clips[index]
-#         if self.augmentation:
-#             frame = self.augmentation(frame)
+#     def set_indicies(self, iter_idx):
+#         new_clips = []
+#         for i, clip in enumerate(self.clips):
+#             if i in iter_idx:
+#                 new_clips.append(clip)
+                
+#         self.clips = new_clips
+        
+#     def get_frames(self):
+#         return [frame for _, frame, _, _ in self.clips]
+    
+#     def get_widths(self):
+#         return [width for _, _, _, width in self.clips]
+    
+#     def __next__(self):
+#         if self.count < len(self.clips):
+#             label, frame, vid_f, widths = self.clips[self.count]
+#             self.count += 1
+#             return label, frame, widths
+#         else:
+#             raise StopIteration
+            
+#     def __iter__(self):
+#         return self
+
+class FrameLoader:
+    def __init__(self, video_path, clip_width, clip_height, transform=None, yolo_model=None):
+        self.transform = transform
+        self.labels = [name for name in listdir(video_path) if isdir(join(video_path, name))]
+        
+        self.count = 0
+        
+        valid_frames = {}
+        invalid_frames = {}
+        with open('sparse_coding_torch/onsd/good_frames_onsd.csv', 'r') as valid_in:
+            reader = csv.DictReader(valid_in)
+            for row in reader:
+                vid = row['video'].strip()
+                good_frames = row['good_frames'].strip()
+                bad_frames = row['bad_frames'].strip()
+                if good_frames:
+                    for subrange in good_frames.split(';'):
+                        splitrange = subrange.split('-')
+                        valid_frames[vid] = (int(splitrange[0]), int(splitrange[1]))
+                if bad_frames:
+                    for subrange in bad_frames.split(';'):
+                        splitrange = subrange.split('-')
+                        invalid_frames[vid] = (int(splitrange[0]), int(splitrange[1]))
+        
+        clip_cache_file = 'clip_cache_onsd_frames_{}_{}.pt'.format(clip_width, clip_height)
+        
+        self.videos = []
+        for label in self.labels:
+            self.videos.extend([(label, abspath(join(video_path, label, f)), f) for f in glob.glob(join(video_path, label, '*', '*.mp4'))])
             
-# #         frame = tf.constant(frame)
-#         return (label, frame, vid_f)
+        self.clips = []
         
-#     def __len__(self):
-#         return len(self.clips)
+        if exists(clip_cache_file):
+            self.clips = torch.load(open(clip_cache_file, 'rb'))
+        else:
+            vid_idx = 0
+            for txt_label, path, _ in tqdm(self.videos):
+                vc = tv.io.read_video(path)[0].permute(3, 0, 1, 2)
+                
+                frame_key = path.split('/')[-2]
+                if frame_key in valid_frames:
+                    start_range, end_range = valid_frames[frame_key]
+                
+                    for j in range(start_range, end_range):
+                        if j == vc.size(1):
+                            break
+                        
+                        frame = vc[:, j, :, :]
+
+                        if yolo_model is not None:
+                            all_frames = get_yolo_region_onsd(yolo_model, frame, clip_width, clip_height, True, txt_label)
+                        else:
+                            all_frames = [frame]
+
+                        if all_frames is None or len(all_frames) == 0:
+                            continue
+                            
+                        all_frames = [frm[:, 70:frm.size(1)-200, :] for frm in all_frames]
+
+                        if self.transform:
+                            all_frames = [self.transform(frm) for frm in all_frames if frm.size(1) > 0 and frm.size(2) > 0]
+                        
+                        label = np.array(1.0)
+                        
+                        for frm in all_frames:
+#                             cv2.imwrite('onsd_full_frame_clean.png', frm.swapaxes(0,1).swapaxes(1,2).numpy())
+#                             print(frm.size())
+#                             raise Exception
+                            self.clips.append((label, frm.numpy(), self.videos[vid_idx][2]))
+
+                if frame_key in invalid_frames:
+                    start_range, end_range = invalid_frames[frame_key]
+                
+                    for j in range(start_range, end_range):
+                        if j == vc.size(1):
+                            break
+                        frame = vc[:, j, :, :]
+
+                        if yolo_model is not None:
+                            all_frames = get_yolo_region_onsd(yolo_model, frame, clip_width, clip_height, True, txt_label)
+                        else:
+                            all_frames = [frame]
+
+                        if all_frames is None or len(all_frames) == 0:
+                            continue
+                            
+                        all_frames = [frm[:, 70:frm.size(1)-200, :] for frm in all_frames]
+
+                        if self.transform:
+                            all_frames = [self.transform(frm) for frm in all_frames if frm.size(1) > 0 and frm.size(2) > 0]
+                        
+                        label = np.array(0.0)
+                        
+                        for frm in all_frames:
+                            self.clips.append((label, frm.numpy(), self.videos[vid_idx][2]))
+                    
+#                     negative_frames = [i for i in range(vc.size(1)) if i < start_range or i > end_range]
+#                     random.shuffle(negative_frames)
+                    
+#                     negative_frames = negative_frames[:end_range - start_range]
+#                     for i in negative_frames:
+#                         frame = vc[:, i, :, :]
+
+#                         if self.transform:
+#                             frame = self.transform(frame)
+                        
+#                         label = np.array(0.0)
+                        
+#                         self.clips.append((label, frame.numpy(), self.videos[vid_idx][2]))
+#                 else:
+#                     for j in random.sample(range(vc.size(1)), 50):
+#                         frame = vc[:, j, :, :]
+
+#                         if self.transform:
+#                             frame = self.transform(frame)
+                        
+#                         label = np.array(0.0)
+                        
+#                         self.clips.append((label, frame.numpy(), self.videos[vid_idx][2]))
+
+                vid_idx += 1
+                
+            torch.save(self.clips, open(clip_cache_file, 'wb+'))
+            
+        num_positive = len([clip[0] for clip in self.clips if clip[0] == 1.0])
+        num_negative = len([clip[0] for clip in self.clips if clip[0] == 0.0])
+        
+        random.shuffle(self.clips)
+        
+        print('Loaded', num_positive, 'positive examples.')
+        print('Loaded', num_negative, 'negative examples.')
+        
+    def get_filenames(self):
+        return [self.clips[i][2] for i in range(len(self.clips))]
+    
+    def get_all_videos(self):
+        return set([self.clips[i][2] for i in range(len(self.clips))])
+        
+    def get_labels(self):
+        return [self.clips[i][0] for i in range(len(self.clips))]
+    
+    def set_indicies(self, iter_idx):
+        new_clips = []
+        for i, clip in enumerate(self.clips):
+            if i in iter_idx:
+                new_clips.append(clip)
+                
+        self.clips = new_clips
+        
+    def get_frames(self):
+        return [frame for _, frame, _ in self.clips]
+    
+    def __next__(self):
+        if self.count < len(self.clips):
+            label, frame, vid_f = self.clips[self.count]
+            self.count += 1
+            return label, frame
+        else:
+            raise StopIteration
+            
+    def __iter__(self):
+        return self
\ No newline at end of file
diff --git a/sparse_coding_torch/pnb/pnb_regression.py b/sparse_coding_torch/pnb/pnb_regression.py
index 1fc624a..d352064 100644
--- a/sparse_coding_torch/pnb/pnb_regression.py
+++ b/sparse_coding_torch/pnb/pnb_regression.py
@@ -1,4 +1,4 @@
-from sparse_coding_torch.pnb.video_loader import classify_nerve_is_right
+from sparse_coding_torch.pnb.video_loader import classify_nerve_is_right, load_pnb_region_labels
 import math
 from tqdm import tqdm
 import glob
@@ -12,6 +12,173 @@ import tensorflow as tf
 from yolov4.get_bounding_boxes import YoloModel
 import torchvision
 from sklearn.metrics import f1_score, accuracy_score, confusion_matrix
+import pickle as pkl
+
+def get_distance_data_sme_labels(yolo_model, input_videos, yolo_class):
+    region_labels = load_pnb_region_labels('sme_region_labels.csv')
+    
+    all_data = []
+    for label_str, path, vid_f in tqdm(input_videos):
+        vc = torchvision.io.read_video(path)[0].permute(3, 0, 1, 2)
+        is_right = classify_nerve_is_right(yolo_model, vc)
+        
+        orig_height = vc.size(2)
+        orig_width = vc.size(3)
+        
+        if label_str == 'Positives':
+            label = 1.0
+        elif label_str == 'Negatives':
+            label = 0.0
+        
+        person_idx = path.split('/')[-1].split(' ')[1]
+        
+        if label == 1.0 and person_idx in region_labels:
+            negative_regions, positive_regions = region_labels[person_idx]
+            for sub_region in negative_regions.split(','):
+                sub_region = sub_region.split('-')
+                start_loc = int(sub_region[0])
+                end_loc = int(sub_region[1]) + 1
+                for j in range(start_loc, end_loc, 1):
+                    frame = vc[:, j, :, :].swapaxes(0, 2).swapaxes(0, 1).numpy()
+                    
+                    bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)
+
+                    obj_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==yolo_class]
+                    needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2]
+                    
+                    if len(obj_bb) == 0 or len(needle_bb) == 0:
+                        continue
+                        
+                    obj_bb = obj_bb[0]
+                    needle_bb = needle_bb[0]
+                    
+                    obj_x = round((obj_bb[2] + obj_bb[0]) / 2 * orig_width)
+                    obj_y = round((obj_bb[3] + obj_bb[1]) / 2 * orig_height)
+
+                    needle_x = needle_bb[2] * orig_width
+                    needle_y = needle_bb[3] * orig_height
+
+                    if not is_right:
+                        needle_x = needle_bb[0] * orig_width
+
+                    all_data.append((math.sqrt((obj_x - needle_x)**2 + (obj_y - needle_y)**2), 0.0, path))
+                    
+            if positive_regions:
+                for sub_region in positive_regions.split(','):
+                    sub_region = sub_region.split('-')
+#                                 start_loc = int(sub_region[0]) + 15
+                    start_loc = int(sub_region[0])
+                    if len(sub_region) == 1 and vc.size(1) > start_loc:
+                        frame = vc[:, start_loc, :, :].swapaxes(0, 2).swapaxes(0, 1).numpy()
+                    
+                        bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)
+
+                        obj_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==yolo_class]
+                        needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2]
+
+                        if len(obj_bb) == 0 or len(needle_bb) == 0:
+                            continue
+
+                        obj_bb = obj_bb[0]
+                        needle_bb = needle_bb[0]
+
+                        obj_x = round((obj_bb[2] + obj_bb[0]) / 2 * orig_width)
+                        obj_y = round((obj_bb[3] + obj_bb[1]) / 2 * orig_height)
+
+                        needle_x = needle_bb[2] * orig_width
+                        needle_y = needle_bb[3] * orig_height
+
+                        if not is_right:
+                            needle_x = needle_bb[0] * orig_width
+
+                        all_data.append((math.sqrt((obj_x - needle_x)**2 + (obj_y - needle_y)**2), 1.0, path))
+                            
+                    elif vc.size(1) > start_loc:
+                        end_loc = sub_region[1]
+                        if end_loc.strip().lower() == 'end':
+                            end_loc = vc.size(1)
+                        else:
+                            end_loc = int(end_loc)
+                        for j in range(start_loc, end_loc, 1):
+                            frame = vc[:, j, :, :].swapaxes(0, 2).swapaxes(0, 1).numpy()
+                    
+                            bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)
+
+                            obj_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==yolo_class]
+                            needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2]
+
+                            if len(obj_bb) == 0 or len(needle_bb) == 0:
+                                continue
+
+                            obj_bb = obj_bb[0]
+                            needle_bb = needle_bb[0]
+
+                            obj_x = round((obj_bb[2] + obj_bb[0]) / 2 * orig_width)
+                            obj_y = round((obj_bb[3] + obj_bb[1]) / 2 * orig_height)
+
+                            needle_x = needle_bb[2] * orig_width
+                            needle_y = needle_bb[3] * orig_height
+
+                            if not is_right:
+                                needle_x = needle_bb[0] * orig_width
+
+                            all_data.append((math.sqrt((obj_x - needle_x)**2 + (obj_y - needle_y)**2), 1.0, path))
+                            
+        elif label == 1.0:
+            frames = []
+            for k in range(vc.size(1) - 1, vc.size(1) - 40, -1):
+                frame = vc[:, k, :, :].swapaxes(0, 2).swapaxes(0, 1).numpy()
+                    
+                bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)
+
+                obj_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==yolo_class]
+                needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2]
+
+                if len(obj_bb) == 0 or len(needle_bb) == 0:
+                    continue
+
+                obj_bb = obj_bb[0]
+                needle_bb = needle_bb[0]
+
+                obj_x = round((obj_bb[2] + obj_bb[0]) / 2 * orig_width)
+                obj_y = round((obj_bb[3] + obj_bb[1]) / 2 * orig_height)
+
+                needle_x = needle_bb[2] * orig_width
+                needle_y = needle_bb[3] * orig_height
+
+                if not is_right:
+                    needle_x = needle_bb[0] * orig_width
+
+                all_data.append((math.sqrt((obj_x - needle_x)**2 + (obj_y - needle_y)**2), 1.0, path))
+            
+        elif label == 0.0:
+            for j in range(0, vc.size(1), 1):
+                frame = vc[:, j, :, :].swapaxes(0, 2).swapaxes(0, 1).numpy()
+                    
+                bounding_boxes, classes, scores = yolo_model.get_bounding_boxes_v5(frame)
+
+                obj_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==yolo_class]
+                needle_bb = [bb for bb, class_pred, score in zip(bounding_boxes, classes, scores) if class_pred==2]
+
+                if len(obj_bb) == 0 or len(needle_bb) == 0:
+                    continue
+
+                obj_bb = obj_bb[0]
+                needle_bb = needle_bb[0]
+
+                obj_x = round((obj_bb[2] + obj_bb[0]) / 2 * orig_width)
+                obj_y = round((obj_bb[3] + obj_bb[1]) / 2 * orig_height)
+
+                needle_x = needle_bb[2] * orig_width
+                needle_y = needle_bb[3] * orig_height
+
+                if not is_right:
+                    needle_x = needle_bb[0] * orig_width
+
+                all_data.append((math.sqrt((obj_x - needle_x)**2 + (obj_y - needle_y)**2), 0.0, path))
+        
+    return all_data
+
     
 def get_distance_data(yolo_model, input_videos, yolo_class):
     all_data = []
@@ -100,17 +267,25 @@ for train_idx, test_idx in splits:
 
     print('Processing data...')
     train_videos = [ex for i, ex in enumerate(videos) if i in train_idx]
-    if test_idx:
+    if len(test_idx) > 0:
         test_videos = [ex for i, ex in enumerate(videos) if i in test_idx]
         assert not set(train_videos).intersection(set(test_videos))
     else:
         test_videos = train_videos
     
 #     nerve_train_data = get_distance_data(yolo_model, train_videos, 1)
-    vessel_train_data = get_distance_data(yolo_model, train_videos, 0)
+    if not os.path.exists('sparse_coding_torch/pnb/regression_train.pkl'):
+        vessel_train_data = get_distance_data_sme_labels(yolo_model, train_videos, 0)
+        pkl.dump(vessel_train_data, open('sparse_coding_torch/pnb/regression_train.pkl', 'wb+'))
+    else:
+        vessel_train_data = pkl.load(open('sparse_coding_torch/pnb/regression_train.pkl', 'rb'))
     
 #     nerve_test_data = get_distance_data(yolo_model, test_videos, 1)
-    vessel_test_data = get_distance_data(yolo_model, test_videos, 0)
+    if not os.path.exists('sparse_coding_torch/pnb/regression_test.pkl'):
+        vessel_test_data = get_distance_data_sme_labels(yolo_model, test_videos, 0)
+        pkl.dump(vessel_test_data, open('sparse_coding_torch/pnb/regression_test.pkl', 'wb+'))
+    else:
+        vessel_test_data = pkl.load(open('sparse_coding_torch/pnb/regression_test.pkl', 'rb'))
 
 #     train_nerve_X = np.array([nerve_train_data[i][0] for i in range(len(nerve_train_data))]).reshape(-1, 1)
 #     test_nerve_X = np.array([nerve_test_data[i][0] for i in range(len(nerve_test_data))]).reshape(-1, 1)
@@ -133,14 +308,19 @@ for train_idx, test_idx in splits:
     
     vessel_clf = LogisticRegression().fit(train_vessel_X, train_vessel_Y)
     vessel_score = vessel_clf.score(test_vessel_X, test_vessel_Y)
+    
+#     print(vessel_clf.get_params(deep=True))
 
     print(vessel_clf.intercept_, vessel_clf.coef_)
-#     for j in range(len(train_vessel_X)):
-#         print(vessel_clf.predict(train_vessel_X[j].reshape(-1, 1)))
-#         print(tf.math.sigmoid(vessel_clf.intercept_ + vessel_clf.coef_[0][0] * train_vessel_X[j]))
-#         print(train_vessel_X[j])
-#         print('---------------------------------------')
-#     raise Exception
+#     random.shuffle(train_vessel_X)
+    for j in range(len(train_vessel_X)):
+        if train_vessel_Y[j][0] == 1:
+            print(vessel_clf.predict_proba(train_vessel_X[j].reshape(-1, 1)))
+            print(tf.math.sigmoid(vessel_clf.intercept_ + vessel_clf.coef_[0][0] * train_vessel_X[j]))
+            print(train_vessel_X[j])
+            print(train_vessel_Y[j])
+            print('---------------------------------------')
+            raise Exception
     
     print('Vessel accuracy: {:.2f}'.format(vessel_score))
     
diff --git a/sparse_coding_torch/sparse_model.py b/sparse_coding_torch/sparse_model.py
index 12ef1be..934293d 100644
--- a/sparse_coding_torch/sparse_model.py
+++ b/sparse_coding_torch/sparse_model.py
@@ -15,15 +15,16 @@ def load_pytorch_weights(file_path):
     return weight_tensor
 
 # @tf.function
-def do_recon(filters_1, filters_2, filters_3, filters_4, filters_5, activations, image_height, image_width, stride, padding='VALID'):
+# def do_recon(filters_1, filters_2, filters_3, filters_4, filters_5, activations, image_height, image_width, stride, padding='VALID'):
+def do_recon(filters, activations, image_height, image_width, stride, padding='VALID'):
     batch_size = tf.shape(activations)[0]
-    out_1 = tf.nn.conv2d_transpose(activations, filters_1, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
-    out_2 = tf.nn.conv2d_transpose(activations, filters_2, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
-    out_3 = tf.nn.conv2d_transpose(activations, filters_3, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
-    out_4 = tf.nn.conv2d_transpose(activations, filters_4, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
-    out_5 = tf.nn.conv2d_transpose(activations, filters_5, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
+    recon = tf.nn.conv2d_transpose(activations, filters, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
+#     out_2 = tf.nn.conv2d_transpose(activations, filters_2, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
+#     out_3 = tf.nn.conv2d_transpose(activations, filters_3, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
+#     out_4 = tf.nn.conv2d_transpose(activations, filters_4, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
+#     out_5 = tf.nn.conv2d_transpose(activations, filters_5, output_shape=(batch_size, image_height, image_width, 1), strides=stride, padding=padding)
 
-    recon = tf.concat([out_1, out_2, out_3, out_4, out_5], axis=3)
+#     recon = tf.concat([out_1, out_2, out_3, out_4, out_5], axis=3)
 
     return recon
 
@@ -107,7 +108,8 @@ class SparseCode(keras.layers.Layer):
         activations = tf.nn.relu(u - self.lam)
 
         if self.run_2d:
-            recon = do_recon(filters[0], filters[1], filters[2], filters[3], filters[4], activations, self.image_height, self.image_width, self.stride, self.padding)
+            recon = do_recon(filters, activations, self.image_height, self.image_width, self.stride, self.padding)
+#             recon = do_recon(filters[0], filters[1], filters[2], filters[3], filters[4], activations, self.image_height, self.image_width, self.stride, self.padding)
         else:
             recon = do_recon_3d(filters, activations, self.image_height, self.image_width, self.clip_depth, self.stride, self.padding)
 
@@ -115,12 +117,13 @@ class SparseCode(keras.layers.Layer):
         g = -1 * u
 
         if self.run_2d:
-            e1, e2, e3, e4, e5 = tf.split(e, 5, axis=3)
-            g += conv_error(filters[0], e1, self.stride, self.padding)
-            g += conv_error(filters[1], e2, self.stride, self.padding)
-            g += conv_error(filters[2], e3, self.stride, self.padding)
-            g += conv_error(filters[3], e4, self.stride, self.padding)
-            g += conv_error(filters[4], e5, self.stride, self.padding)
+            g += conv_error(filters, e, self.stride, self.padding)
+#             e1, e2, e3, e4, e5 = tf.split(e, 5, axis=3)
+#             g += conv_error(filters[0], e1, self.stride, self.padding)
+#             g += conv_error(filters[1], e2, self.stride, self.padding)
+#             g += conv_error(filters[2], e3, self.stride, self.padding)
+#             g += conv_error(filters[3], e4, self.stride, self.padding)
+#             g += conv_error(filters[4], e5, self.stride, self.padding)
         else:
             convd_error = conv_error_3d(filters, e, self.stride, self.padding)
 
@@ -143,8 +146,7 @@ class SparseCode(keras.layers.Layer):
 
 #     @tf.function
     def call(self, images, filters):
-        if not self.run_2d:
-            filters = tf.squeeze(filters, axis=0)
+        filters = tf.squeeze(filters, axis=0)
         if self.padding == 'SAME':
             if self.run_2d:
                 output_shape = (len(images), self.image_height // self.stride, self.image_width // self.stride, self.out_channels)
@@ -216,6 +218,7 @@ class ReconSparse(keras.Model):
 #     @tf.function
     def call(self, activations):
         if self.run_2d:
+#             recon = do_recon(self.filters_1, self.filters_2, self.filters_3, self.filters_4, self.filters_5, activations, self.image_height, self.image_width, self.stride, self.padding)
             recon = do_recon(self.filters_1, self.filters_2, self.filters_3, self.filters_4, self.filters_5, activations, self.image_height, self.image_width, self.stride, self.padding)
         else:
             recon = do_recon_3d(self.filters, activations, self.image_height, self.image_width, self.clip_depth, self.stride, self.padding)
-- 
GitLab