From 0cea7fe693ea6435e6419df82d55df09c57982e7 Mon Sep 17 00:00:00 2001
From: Filip Lux <xlux@fi.muni.cz>
Date: Mon, 27 Nov 2023 16:33:53 +0100
Subject: [PATCH] clean repository

---
 DATA/challenge                                |    1 -
 DATA/train                                    |    1 -
 global_tracking.ipynb                         |  336 ++--
 tracking/blender/blender_tools.py             | 1420 -------------
 tracking/blender/buckets_with_graphics_pb2.py |  947 ---------
 .../blender/buckets_with_graphics_pb2_grpc.py |  538 -----
 tracking/display_graph.ipynb                  | 1773 -----------------
 tracking/divergence_tools.py                  |  467 ++++-
 tracking/embedtrack.py                        |    4 +-
 tracking/global_tracker.py                    |  437 ++--
 tracking/sys_tools.py                         |    5 +-
 11 files changed, 842 insertions(+), 5087 deletions(-)
 delete mode 120000 DATA/challenge
 delete mode 120000 DATA/train
 delete mode 100644 tracking/blender/blender_tools.py
 delete mode 100644 tracking/blender/buckets_with_graphics_pb2.py
 delete mode 100644 tracking/blender/buckets_with_graphics_pb2_grpc.py
 delete mode 100644 tracking/display_graph.ipynb

diff --git a/DATA/challenge b/DATA/challenge
deleted file mode 120000
index e3a5110..0000000
--- a/DATA/challenge
+++ /dev/null
@@ -1 +0,0 @@
-../EmbedTrack/ctc_raw_data/challenge
\ No newline at end of file
diff --git a/DATA/train b/DATA/train
deleted file mode 120000
index dba3cbe..0000000
--- a/DATA/train
+++ /dev/null
@@ -1 +0,0 @@
-../EmbedTrack/ctc_raw_data/train
\ No newline at end of file
diff --git a/global_tracking.ipynb b/global_tracking.ipynb
index 8e1b82c..cfa6b27 100644
--- a/global_tracking.ipynb
+++ b/global_tracking.ipynb
@@ -5,113 +5,96 @@
    "id": "8666f97c-92c8-42ea-b025-2030dd3f3a9b",
    "metadata": {},
    "source": [
-    "# Run EmbedTrack with a Global Linking\n",
+    "# Cell Tracking by Global optimization\n",
     "\n",
-    "TODO:\n",
-    "- Evaluate by EmbedTrack - generate files XX_DATA\n",
-    "- track data using ctlib\n",
-    "    - generate tracking.txt\n",
-    "    - solve it using libct - generate sol_tracking.txt\n",
-    "- evaluate results (OPTIONAL)\n",
-    "- vizualize results\n",
-    "    - blabla\n",
-    "                        -\n"
+    "Autor: Filip Lux\n"
    ]
   },
   {
-   "cell_type": "markdown",
-   "id": "cfb1c034-3f74-493e-9a1d-47f8e9fbd79c",
-   "metadata": {},
-   "source": [
-    "### 1. Run EmbedTrack\n",
-    "\n",
-    "TODO\n",
-    "- [x] synchronize changes to use the original repository\n",
-    "- [x] add embedtrack to the subfolder, use by a external script\n",
-    "- [ ] add script to run EmbedTrack from global_tracking"
-   ]
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "159f69e5-6a90-40da-b905-dfd27ac5e694",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "The autoreload extension is already loaded. To reload it, use:\n",
+      "  %reload_ext autoreload\n"
+     ]
+    }
+   ],
+   "source": []
   },
   {
    "cell_type": "markdown",
-   "id": "179564f4-549a-45af-b2bb-9e7cf61e4dd5",
+   "id": "3a8304a7-71fe-47ec-b8cf-99162d0ae1f8",
    "metadata": {},
    "source": [
-    "### 2. Cell Tracking\n",
-    "\n",
-    "- [x] create FlowGraph (tracking/display_graph.ipynb)\n",
-    "- [x] convert to tracking.txt\n",
-    "- [x] display tracking.txt in Blender\n",
-    "    - [x] debug z position in a blender\n",
-    "    - [ ] tune parameters to show candidate graph better\n",
-    "        - [x] display split event in send_gt\n",
-    "    - [x] limit the candidate graph of the current version of tracking procedure\n",
-    "- [x] compute solution\n",
-    "- [x] display solution\n",
-    "- [x] generate XX_RES folder\n",
-    "- [x] compute TRAMeasure\n",
-    "- [ ] TrackingVisualizer\n",
-    "    - [x] display gt\n",
-    "    - [x] display ctc tracking result\n",
-    "    - [x] display candidate graph\n",
-    "    - [x] display solution\n",
-    "- [ ] revize, add to gitlab"
+    "### 1. Import packages"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 1,
-   "id": "159f69e5-6a90-40da-b905-dfd27ac5e694",
+   "execution_count": 7,
+   "id": "c9c3ebe6-7e58-45b4-b847-20a5fffcd463",
    "metadata": {
     "tags": []
    },
    "outputs": [],
    "source": [
-    "%load_ext autoreload\n",
-    "%autoreload 2"
+    "from pathlib import Path\n",
+    "import os\n",
+    "\n",
+    "from tracking.global_tracker import GlobalTracker\n",
+    "from tracking.embedtrack import run_embedtrack"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "21576aad-539a-4bfe-b41a-d9045d767208",
+   "metadata": {},
+   "source": [
+    "### 2. Set configuration"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
-   "id": "12d5f97e-9a78-4d3c-9827-596cbced9413",
+   "execution_count": 8,
+   "id": "f2abf94a-3b99-4a06-9109-90a60ed29e22",
    "metadata": {
     "tags": []
    },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "/tmp/tmp.fK0WEipiOf/root/lib/python3/dist-packages\n"
-     ]
-    }
-   ],
+   "outputs": [],
    "source": [
-    "!echo $PYTHONPATH\n"
+    "# according the data you have in DATA folder, set the hyperparameters\n",
+    "dataset_name = 'BF-C2DL-HSC'\n",
+    "subsets = ['train']\n",
+    "seqs = ['01', '02']\n",
+    "max_dist = 42\n",
+    "vertex_thr = 0.95\n",
+    "lm_dist = 80\n",
+    "\n",
+    "experiment = f'MU-CZ_CTC23'"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "c9c3ebe6-7e58-45b4-b847-20a5fffcd463",
+   "cell_type": "markdown",
+   "id": "1cb3c9eb-213e-419e-a0f5-b8771d2d692b",
    "metadata": {
     "tags": []
    },
-   "outputs": [],
    "source": [
-    "from pathlib import Path\n",
-    "import os\n",
-    "\n",
-    "from tracking.global_tracker import GlobalTracker\n",
-    "from tracking.blender.blender_tools import TrackingVisualizer   \n",
-    "from tracking.embedtrack import run_embedtrack\n",
-    "        "
+    "### 3. Run detection and tracking"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 37,
-   "id": "15de7091-eae1-4a2a-9824-057549b7f9db",
+   "execution_count": 9,
+   "id": "90e2394e-5a77-4a97-9df1-c2049403cb5d",
    "metadata": {
     "tags": []
    },
@@ -120,11 +103,9 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "DATA/challenge/BF-C2DL-HSC/09_DATA\n",
       "creating graph\n",
       "GET GRAPH: adding vertices\n",
       "GET GRAPH: adding edges\n",
-      "GET GRAPH: entangling to 2 neighbours\n",
       "creating tracking file\n"
      ]
     },
@@ -132,175 +113,166 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 62/62 [00:00<00:00, 104520.44it/s]\n",
-      "divisions: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 60/60 [00:00<00:00, 384211.05it/s]"
+      "100%|███████████████████████████████████| 201/201 [00:00<00:00, 77358.70it/s]\n",
+      "divisions: 100%|███████████████████████| 179/179 [00:00<00:00, 567483.31it/s]\n"
      ]
     },
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Added 21 division candidates.\n",
-      "computiong solution\n",
-      "running procedure: $ct tracking.txt\n"
+      "Added 99 division candidates.\n",
+      "<itertools.chain object at 0x7f55c7edd040>\n",
+      "CORRECT, 278 99\n",
+      "computing solution\n",
+      "[mem] ctor: size=49392123904B (46GiB) -> memory_=0x7f42e7f72010\n",
+      "Set parameter Username\n",
+      "Academic license - for non-commercial use only - expires 2024-06-21\n",
+      "[mem] finalize: size=69104 (0.0659027 MiB)\n",
+      "it=100 lb=-72.241677663677137 ub=-72.241677663676271 gap=1.1999473680315842e-12% t=0.0055045570000000002\n",
+      "it=200 lb=-72.241677663677109 ub=-72.241677663676271 gap=1.1606048153783854e-12% t=0.010968277\n",
+      "final solution: -72.2416776636763\n",
+      "source file RESULTS/MU-CZ_CTC23/train/BF-C2DL-HSC/56_RES/tracking.txt\n",
+      "source solution RESULTS/MU-CZ_CTC23/train/BF-C2DL-HSC/56_RES/tracking.sol\n",
+      "saving RESULTS/MU-CZ_CTC23/train/BF-C2DL-HSC/56_RES/res_track.txt\n",
+      "storing res images\n"
      ]
     },
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\n"
+      "100%|███████████████████████████████████████| 20/20 [00:00<00:00, 166.44it/s]\n",
+      "rm: cannot remove '/home/xlux/PROJECTS/TRACKING/twin/global-linking/DATA/train/BF-C2DL-HSC/56_RES': No such file or directory\n"
      ]
     },
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "[mem] ctor: size=49392123904B (46GiB) -> memory_=0x7fdfa52b4010\n",
-      "Set parameter Username\n",
-      "Academic license - for non-commercial use only - expires 2024-06-21\n",
-      "[mem] finalize: size=20608 (0.0196533 MiB)\n",
-      "it=100 lb=-380.80612018652505 ub=-380.80612018652505 gap=0% t=0.0032107199999999998\n",
-      "it=200 lb=-380.80612018652505 ub=-380.80612018652505 gap=0% t=0.0062846550000000001\n",
-      "final solution: -380.8061201865252\n",
-      "source file RESULTS/embtck_lbd0.9_e2_avgdst0_mxdst42_vthr0.97_app1000000_ld80_em-2/challenge/BF-C2DL-HSC/09_RES/tracking.txt\n",
-      "source solution RESULTS/embtck_lbd0.9_e2_avgdst0_mxdst42_vthr0.97_app1000000_ld80_em-2/challenge/BF-C2DL-HSC/09_RES/tracking.sol\n",
-      "saving RESULTS/embtck_lbd0.9_e2_avgdst0_mxdst42_vthr0.97_app1000000_ld80_em-2/challenge/BF-C2DL-HSC/09_RES/res_track.txt\n",
-      "storing res images\n"
+      "/home/xlux/PROJECTS/TRACKING/twin/global-linking/RESULTS/MU-CZ_CTC23/train/BF-C2DL-HSC/56_RES\n",
+      "ERROR: procedure $rm /home/xlux/PROJECTS/TRACKING/twin/global-linking/DATA/train/BF-C2DL-HSC/56_RES failed with an output 1\n",
+      "running ./ctc_metrics/TRAMeasure DATA/train/BF-C2DL-HSC 56 4\n",
+      "TRA measure: 0.956236\n",
+      "running ./ctc_metrics/DETMeasure DATA/train/BF-C2DL-HSC 56 4\n",
+      "DET measure: 0.950000\n",
+      "running ./ctc_metrics/SEGMeasure DATA/train/BF-C2DL-HSC 56 4\n",
+      "The directory 'DATA/train/BF-C2DL-HSC/56_GT/SEG/' does not exist!\n",
+      "ERROR: procedure $./ctc_metrics/SEGMeasure DATA/train/BF-C2DL-HSC 56 4 failed with an output 255\n"
      ]
     },
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 31/31 [00:00<00:00, 172.27it/s]\n"
+      "100%|█████████████████████████████████| 980/980 [00:00<00:00, 1335852.43it/s]\n",
+      "100%|█████████████████████████████████| 123/123 [00:00<00:00, 1059341.67it/s]\n"
      ]
     },
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "ground truth masks are not available DATA/challenge/BF-C2DL-HSC/09_GT\n"
+      "running embedtrack with the following arguments: {'batch_size': '1', 'sequence': '01', 'data_path': 'DATA/train/BF-C2DL-HSC', 'model_path': 'EmbedTrack/models/BF-C2DL-HSC/MU-CZ_CTC23'}\n",
+      "running inference(DATA/train/BF-C2DL-HSC/01, EmbedTrack/models/BF-C2DL-HSC/MU-CZ_CTC23/best_iou_model.pth, EmbedTrack/models/BF-C2DL-HSC/MU-CZ_CTC23/config.json, batch_size=1)\n"
      ]
     },
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 296/296 [00:00<00:00, 1270741.03it/s]\n",
-      "100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 126/126 [00:00<00:00, 1124430.43it/s]\n",
-      "/home/xlux/.virtualenvs/et_torch/lib/python3.8/site-packages/numpy/lib/histograms.py:883: RuntimeWarning: invalid value encountered in divide\n",
-      "  return n/db/n.sum(), bin_edges\n"
+      "Traceback (most recent call last):\n",
+      "  File \"EmbedTrack/embedtrack.py\", line 69, in <module>\n",
+      "    main(args['data_path'], args['sequence'], args['model_path'], args['batch_size'])\n",
+      "  File \"EmbedTrack/embedtrack.py\", line 62, in main\n",
+      "    inference(img_path, model_path, config_file, batch_size=batch_size, overlap=0.25)\n",
+      "  File \"/home/xlux/PROJECTS/TRACKING/twin/global-linking/EmbedTrack/embedtrack/infer/infer_ctc_data.py\", line 72, in inference\n",
+      "    os.path.join(raw_data_path, os.listdir(raw_data_path)[0])\n",
+      "FileNotFoundError: [Errno 2] No such file or directory: 'DATA/train/BF-C2DL-HSC/01'\n"
      ]
     },
     {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhYAAAGzCAYAAABzfl4TAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAvgUlEQVR4nO3dfXzN9f/H8efZ1dmZXRrZ5nJMhC8VIUmucvGbivqWRBl+fl1M+Pp+S+jbSP1G+XbxVaFvM5WEJEohvqILKZFyXdQ0ZgyzGbPZdn5/+Do/x+bizHs7Z9vjfrudW/u8P+/P+/36OOTpc2mx2+12AQAAGODl7gIAAEDlQbAAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAFBMgwYN1KdPn8v2s1gsmjhxYtkXVIbWrl0ri8WitWvXursUoFIgWAAeaP369Zo4caKOHz/u7lIAwCU+7i4AQHHr16/XpEmTFBcXp9DQUHeXc1G5ubny8eF/IwD+H0csgAqsqKhIp0+fdtv8/v7+BAsATggWgIeZOHGinnjiCUlSdHS0LBaLLBaLUlJSZLFYNGLECL333ntq3ry5rFarVqxYIUmaNm2aOnTooPDwcNlsNrVu3VqLFi0qcY65c+eqbdu2CggIUFhYmDp16qTPP//8knW9/fbb8vHxcdQmFb/GYuLEibJYLNqzZ4/jaEtISIiGDBmiU6dOOY2Xm5urkSNHqkaNGgoKCtKdd96pAwcOXPF1G9OnT1fz5s0d+9CmTRvNmzfPqc+PP/6o3r17Kzg4WIGBgerWrZs2bNhw2bEBlB7/1AA8zN13361ffvlF77//vl5++WXVqFFDklSzZk1J0po1a7Rw4UKNGDFCNWrUUIMGDSRJr776qu68804NHDhQ+fn5mj9/vu69914tW7ZMsbGxjvEnTZqkiRMnqkOHDnr22Wfl5+en7777TmvWrFGPHj1KrOnNN9/UI488ovHjx+u555677D7cd999io6OVmJiojZv3qy33npL11xzjaZOneroExcXp4ULF+rBBx9U+/bttW7dOqc6L+Vf//qXRo4cqT//+c8aNWqUTp8+rZ9//lnfffedHnjgAUnS9u3bdeuttyo4OFhPPvmkfH19NWvWLHXu3Fnr1q1Tu3btrmguAC6yA/A4L774ol2S/ffff3dql2T38vKyb9++vdg2p06dclrOz8+3t2jRwt61a1dH26+//mr38vKy9+vXz15YWOjUv6ioyPFz/fr17bGxsXa73W5/9dVX7RaLxT558uRic0qyJyQkOJYTEhLskuxDhw516tevXz97eHi4Y3nTpk12SfbRo0c79YuLiys2Zknuuusue/PmzS/Zp2/fvnY/Pz/73r17HW1paWn2oKAge6dOnRxtX3zxhV2S/YsvvrjkeACuDKdCgArmtttuU7NmzYq122w2x8+ZmZnKysrSrbfeqs2bNzvalyxZoqKiIj3zzDPy8nL+42+xWIqN+cILL2jUqFGaOnWqnn766Suu8ZFHHnFavvXWW3X06FFlZ2dLkuP0zWOPPebU7/HHH7+i8UNDQ7V//35t3LixxPWFhYX6/PPP1bdvXzVs2NDRHhkZqQceeEBff/21oxYAZhEsgAomOjq6xPZly5apffv28vf3V/Xq1VWzZk3NmDFDWVlZjj579+6Vl5dXicHkQuvWrdPYsWM1duxYp+sqrkS9evWclsPCwiSdDTyStG/fPnl5eRXbl5iYmCsaf+zYsQoMDFTbtm3VuHFjxcfH65tvvnGsz8jI0KlTp9SkSZNi21533XUqKipSamqqS/sE4MoQLIAK5vwjE+d89dVXuvPOO+Xv76833nhDn332mVatWqUHHnhAdru9VPM0b95cTZo00bvvvqvff//dpW29vb1LbC9tLRe67rrrtHv3bs2fP18dO3bUhx9+qI4dOyohIcHI+ABKj2ABeKCSTktcyocffih/f3+tXLlSQ4cOVe/evdW9e/di/Ro1aqSioiLt2LHjsmPWqFFDq1evlq+vr7p166a0tDSXarqU+vXrq6ioqFhg2bNnzxWPUa1aNfXv31/Jycn6448/FBsbq+eff16nT59WzZo1FRAQoN27dxfbbteuXfLy8lLdunWvej8AFEewADxQtWrVJOmKn7zp7e0ti8WiwsJCR1tKSoqWLFni1K9v377y8vLSs88+q6KiIqd1JR1NqFOnjlavXq3c3FzdfvvtOnr0qGs7chE9e/aUJL3xxhtO7dOnTy/W99SpU9q1a5eOHDniaLuwDj8/PzVr1kx2u11nzpyRt7e3evTooaVLlyolJcXR79ChQ5o3b546duyo4OBgI/sCwBm3mwIeqHXr1pKkCRMm6P7775evr6/uuOOOi/aPjY3VSy+9pF69eumBBx7Q4cOH9frrrysmJkY///yzo19MTIwmTJigyZMn69Zbb9Xdd98tq9WqjRs3KioqSomJicXGjomJ0eeff67OnTurZ8+eWrNmzVX/pdy6dWvdc889euWVV3T06FHH7aa//PKLJOcjNt9//726dOmihIQEx/MtevTooYiICN1yyy2qVauWdu7cqddee02xsbEKCgqSJD333HNatWqVOnbsqMcee0w+Pj6aNWuW8vLy9MILL1xV/QAujiMWgAe66aabNHnyZP3000+Ki4vTgAEDlJGRcdH+Xbt2VVJSktLT0zV69Gi9//77mjp1qvr161es77PPPqvZs2crNzdXEyZM0DPPPKN9+/apW7duFx3/T3/6k5YvX65ffvlFd9xxh3Jzc696H9955x3Fx8fr008/1dixY5Wfn68FCxZIOvtEz0t5+OGHlZOTo5deeknx8fFasmSJRo4cqblz5zr6NG/eXF999ZVatGihxMRETZo0SfXr19cXX3zBMyyAMmSxm7qaCgCu0pYtW3TDDTdo7ty5GjhwoLvLAVAKHLEA4BYlHfV45ZVX5OXlpU6dOrmhIgAmcI0FALd44YUXtGnTJnXp0kU+Pj5avny5li9frv/5n//hjg2gAuNUCAC3WLVqlSZNmqQdO3YoJydH9erV04MPPqgJEybwxlSgAnMpWEycOFGTJk1yamvSpIl27dplvDAAAFDxuPzPgubNm2v16tX/PwD/sgAAAP/hcirw8fFRREREWdQCAAAqOJeDxa+//qqoqCj5+/vr5ptvVmJiYrEXDp0vLy9PeXl5juWioiIdO3ZM4eHhLj+2GAAAuIfdbteJEycUFRVV7O3I53PpGovly5crJydHTZo00cGDBzVp0iQdOHBA27Ztczzt7kIlXZcBAAAqptTUVNWpU+ei66/qrpDjx4+rfv36eumllzRs2LAS+1x4xCIrK0v16tVTamrqFT8WeNEPqZr4yQ5NvKOZmkUF675ZG0rst/Dh9moWFeL6jqDKyPzoIx1+7nld8/QEhZXwVMpzcnfu1B+DHpQk1Zv7rmzXXWd0/Mpq6ZrxSkxfo3ERXXVX1/91dzkADMrOzlbdunV1/PhxhYRc/O/aq7ryMjQ0VNdee+0l30hotVpltVqLtQcHB19xsAgIDJKXNUABgUEKDAqWlzWgxH6BQVc+JqqmwoAAnfL2VnBAwCV/r/gGBirwP6/+Dg4MlO0Kf19d6fiVVUA1q7xt3gqoZq2S+w9UBZe7jOGqnryZk5OjvXv3KjIy8mqGAQAAlYRLweJvf/ub1q1bp5SUFK1fv179+vWTt7e3BgwYUFb1AQCACsSlUyH79+/XgAEDdPToUdWsWVMdO3bUhg0bVLNmzbKqDwAAVCAuBYv58+eXVR0AgCqssLBQZ86ccXcZVZq3t7d8fHyu+lEQPDYTAOBWOTk52r9/v3h1lfsFBAQoMjJSfn5+pR6DYAEAcJvCwkLt379fAQEBqlmzJg9OdBO73a78/HxlZGTo999/V+PGjS/5EKxLIVgAANzmzJkzstvtqlmzpmw2m7vLqdJsNpt8fX21b98+5efny9/fv1TjXNXtpgAAmMCRCs9Q2qMUTmMYqAMAAEASp0IAAB7owPFcZZ7ML7f5wqr5qXYop2JMIFgAADzKgeO56v6Pdco9U1huc9p8vbX6r7cRLgwgWAAAPErmyXzlninUK/2vV8w1gWU+357DORq9YIsyT+ZfcbCIi4vT8ePHtWTJEqf2tWvXqkuXLsrMzFRoaKj5YisAggUAwCPFXBOoFrV5Y3VFw8WbAADAGIIFAAAwhlMhAACUwrJlyxQY6HwNSGFh+V1w6qkIFgAAlEKXLl00Y8YMp7bvvvtOgwYNclNFnoFgAQBAKVSrVk0xMTFObfv373dTNZ6DaywAAIAxHLEAAHikPYdzKtU8VQXBAgDgUcKq+cnm663RC7aU25w2X2+FVfMrt/kqM4IFAMCj1A61afVfb/Pod4XMmTOnxPbOnTvLbrcbqqpiIlgAADxO7VAb7+2ooLh4EwAAGEOwAAAAxhAsAACAMQQLAABgDMECAAAYQ7AAAADGECwAAIAxPMcCAOB5jqdKp46W33wB4VJo3fKbrxIjWAAAPMvxVOn1ttKZU+U3p2+AFP894cIAggUAwLOcOno2VNz9L6nGtWU/35FfpMXDz87rYrD49ttv1bFjR/Xq1Uuffvqpoz0lJUXR0dGO5erVq6t169aaOnWqbrjhBklnH/+9bt06SZLValXDhg01YsQIPfbYYwZ2yn0IFgAAz1TjWinqendXcUlJSUl6/PHHlZSUpLS0NEVFRTmtX716tZo3b679+/dr5MiR6t27t3bt2qXQ0FBJ0vDhw/Xss8/q1KlTeueddxQfH6+wsDANGDDADXtjBhdvAgBQCjk5OVqwYIEeffRRxcbGlvhisvDwcEVERKhNmzaaNm2aDh06pO+++86xPiAgQBEREWrYsKEmTpyoxo0b6+OPPy7HvTCPYAEAQCksXLhQTZs2VZMmTTRo0CDNnj37km82tdnOvlQtP//ib2212WyXXF8RECwAACiFpKQkDRo0SJLUq1cvZWVlOa6ZuNDx48c1efJkBQYGqm3btsXWFxYWau7cufr555/VtWvXMq27rBEsAABw0e7du/X99987roXw8fFR//79lZSU5NSvQ4cOCgwMVFhYmH766SctWLBAtWrVcqx/4403FBgYKJvNpuHDh+svf/mLHn300XLdF9O4eBMAABclJSWpoKDA6WJNu90uq9Wq1157zdG2YMECNWvWTOHh4Y4LNs83cOBATZgwQTabTZGRkfLyqvj/3idYAADggoKCAr3zzjv6xz/+oR49ejit69u3r95//3316tVLklS3bl01atToomOFhIQoJiamTOstbwQLAIBnOvKLR86zbNkyZWZmatiwYQoJCXFad8899ygpKckRLKoiggUAwLMEhJ99Eubi4eU3p2/A2XmvQFJSkrp3714sVEhng8ULL7yg7Oxs0xVWGAQLAIBnCa179vHaHvqukE8++eSi69q2beu45fRSt55K0tq1a6+4vIqEYAEA8DyhdXlvRwVV8S8/BQAAHoNgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjeI4FAMDjHMw5qMy8zHKbL8wapsjAyHKbrzIjWAAAPMrBnIO6a+ldyi3ILbc5bT42Lb1rKeHCAIIFAMCjZOZlKrcgV4m3JqphSMMyn++3rN807qtxyszLvOJgERcXp7fffluS5OPjo+rVq6tly5YaMGCA4uLiHK8/b9CggUaPHq3Ro0dLkn766Sf9/e9/14YNG5Sdna2IiAi1a9dO06dP1zXXXOM0R2Jiop5++mlNmTJFTzzxhNO6OXPmaMiQIZIki8WiqKgo3X777Zo6dapjHIvF4ugfHBysFi1aaPLkyeratavrv0guIFgAADxSw5CGahbezN1lXFSvXr2UnJyswsJCHTp0SCtWrNCoUaO0aNEiffzxx/Lxcf4rNiMjQ926dVOfPn20cuVKhYaGKiUlRR9//LFOnjxZbPzZs2frySef1OzZs4sFC+lsWNi9e7eKior0008/aciQIUpLS9PKlSsdfZKTk9WrVy8dOXJEEyZMUJ8+fbRt2zY1bFh2gY1gAQBAKVitVkVEREiSateurRtvvFHt27dXt27dNGfOHP33f/+3U/9vvvlGWVlZeuuttxyhIzo6Wl26dCk29rp165Sbm6tnn31W77zzjtavX68OHTo49bFYLI75o6KiNHLkSP39739Xbm6ubDabJCk0NFQRERGKiIjQjBkzVLt2ba1atUoPP/yw8V+Pc7grBAAAQ7p27apWrVpp8eLFxdZFRESooKBAH3300WXffJqUlKQBAwbI19dXAwYMUFJS0mXnttlsKioqUkFBwUXXS1J+fv4V7EnpESwAADCoadOmSklJKdbevn17jR8/Xg888IBq1Kih3r1768UXX9ShQ4ec+mVnZ2vRokUaNGiQJGnQoEFauHChcnJyLjrnr7/+qpkzZ6pNmzYKCgoqtv7UqVN6+umn5e3trdtuu+3qdvAyCBYAABhkt9udLpw83/PPP6/09HTNnDlTzZs318yZM9W0aVNt3brV0ef9999Xo0aN1KpVK0nS9ddfr/r162vBggVOY2VlZSkwMFABAQFq0qSJatWqpffee8+pz4ABAxQYGKigoCB9+OGHSkpKUsuWLQ3vsTOCBQAABu3cuVPR0dEXXR8eHq57771X06ZN086dOxUVFaVp06Y51iclJWn79u3y8fFxfHbs2KHZs2c7jRMUFKQtW7Zo27ZtOnnypL788ktde+21Tn1efvllbdmyRenp6UpPT9fgwYPN7mwJuHgTAABD1qxZo61bt+ovf/nLFfX38/NTo0aNHHeFbN26VT/88IPWrl2r6tWrO/odO3ZMnTt31q5du9S0aVNJkpeXl2JiYi45fkRExGX7mEawAAB4pN+yfvPoefLy8pSenu50u2liYqL69Omjhx56qFj/ZcuWaf78+br//vt17bXXym6365NPPtFnn32m5ORkSWePVrRt21adOnUqtv1NN92kpKQkvfjii6Wqt7wQLAAAHiXMGiabj03jvhpXbnPafGwKs4a5tM2KFSsUGRkpHx8fhYWFqVWrVvrnP/+pwYMHOx6Qdb5mzZopICBAf/3rX5Wamiqr1arGjRvrrbfe0oMPPqj8/HzNnTtXY8eOLXG+e+65R//4xz/0v//7v6Xax/JCsAAAeJTIwEgtvWupR78rZM6cOZozZ85l+51/d0jDhg315ptvXrSvn5+fjhw5ctH1Tz75pJ588klJZ5/8GRcXd8m5L3dLa1khWKDKOJOWpoKDB13uV3DwoNS8eVmWBuACkYGRvLejguKuEFRaudu3a2fT67Sz6XU6sXq19sb20ZE3ZkiS0v/+jHK3by+2zZm0NO2N7aP9Ix53tO0f8bjOpKU5xitpOwDAWQQLVFqnd+xw/JzzzTey5+ZedP05BZmZxfqdaz/Xv6TtAABnXVWwmDJliiwWi+OtbQAAoGordbDYuHGjZs2aVeZP8AIAABVHqYJFTk6OBg4cqH/9618KC3Pt9hwAAFB5lSpYxMfHKzY2Vt27d79s37y8PGVnZzt9AABA5eTy7abz58/X5s2btXHjxivqn5iYqEmTJrlcGAAAqHhcChapqakaNWqUVq1aJX9//yvaZty4cRozZoxjOTs7W3Xr1nWtSgBAlXImLU0FmeX3gCyfsDD5RkWV23yVmUvBYtOmTTp8+LBuvPFGR1thYaG+/PJLvfbaa8rLy5O3t7fTNlarVVar1Uy1AIBK79zzZEq69busWGw2Nfp0GeHCAJeCRbdu3ZzeGS9JQ4YMUdOmTTV27NhioQIAAFede55M1IsvyK9hwzKfL/+335T2xJMqyMy84mARFxent99+Ww8//LBmzpzptC4+Pl5vvPGGBg8e7Hjsd2pqqhISErRixQodOXJEkZGR6tu3r5555hmFh4fr0KFDqlOnjt59913df//9xeYbNmyYfvzxR23evFkTJ04s8RKDJk2aaNeuXa7/AhjmUrAICgpSixYtnNqqVaum8PDwYu0AAFwNv4YNZfPgx+nXrVtX8+fP18svvyybzSZJOn36tObNm6d69eo5+v3222+6+eabde211+r9999XdHS0tm/frieeeELLly/Xhg0bVKtWLcXGxmr27NnFgsXJkye1cOFCTZkyxdHWvHlzrV692qmfj49nvKXDM6oAAKCCufHGG7V3714tXrxYAwcOlCQtXrxY9erVU3R0tKNffHy8/Pz89PnnnzsCSL169XTDDTeoUaNGmjBhgmbMmKFhw4apb9+++uOPP5yCyQcffKCCggLHHNLZEBEREVFOe+qaq36k99q1a/XKK68YKAUAgIpl6NChSk5OdizPnj1bQ4YMcSwfO3ZMK1eu1GOPPeYIFedERERo4MCBWrBggex2u/7rv/5LtWrVKvbW1OTkZN19990KDQ0ty10xhneFAABQSoMGDdLXX3+tffv2ad++ffrmm280aNAgx/pff/1Vdrtd1113XYnbX3fddcrMzFRGRoa8vb0d12Wce+X53r179dVXX2no0KFO223dulWBgYFOn0ceeaTsdtQFnAoBAKCUatasqdjYWEcYiI2NVY0aNYr1OxcULmfo0KGaMmWKvvjiC3Xt2lXJyclq0KCBunbt6tSvSZMm+vjjj53agoODS78jBhEsAAC4CkOHDtWIESMkSa+//rrTupiYGFksFu3cuVP9+vUrtu3OnTsVFhammjVrSpIaN26sW2+9VcnJyercubPeeecdDR8+XBaLxWk7Pz8/xcTElNEeXR1OhQAAcBV69eql/Px8nTlzRj179nRaFx4erttvv11vvPGGci94Lkd6erree+899e/f3yk4DBs2TB9++KE+/PBDHThwQHFxceWxG8ZwxAIA4JHyf/utQszj7e2tnTt3On6+0GuvvaYOHTqoZ8+eeu6555xuN61du7aef/55p/733nuvRo4cqYcfflg9evQo8WnVBQUFSk9Pd2qzWCyqVavWVe2LCQQLAIBH8QkLk8VmU9oTT5bbnBabTT5X8bbuS13f0LhxY/3www9KSEjQfffdp2PHjikiIkJ9+/ZVQkKCqlev7tQ/ICBA999/v958881iF22es337dkVGRjq1Wa1WnT59utT7YArBAgDgUXyjotTo02Ue/a6QC28JvdCSJUucluvXr3/Zbc43a9YszZo1q8R1EydO1MSJE694rPJGsAAAeBzfqCje21FBcfEmAAAwhmABAACMIVgAAABjCBYAAMCYChUsjp7M14HjuRddvzHl2CXXAwCAsuWxwWLbgSw1eOpTbTuQpbBqfpKkF1fu1sPvbrroNpM+2aFbpqzRyu3pjm1LGg8AAJQNjw4W5/5bO9RWbP3IrjGa9WDrErf96tcMpzEuHA8AAJSNCvsci6hQW4mBAwBQ8Z04dlqnc86U23z+gb4Kqu5fbvNVZhU2WAAAKqcTx05r3sQNKsgvKrc5ffy89MDE9oQLAwgWAACPcjrnjAryi9R9SDNVj6xW5vMdO3hSq5N36HTOmSsOFnFxcXr77bclST4+PqpevbpatmypAQMGKC4uTl5eZ680aNCggUaPHq3HHntMUVFR+tvf/qannnqq2HiTJ0/Wa6+9pv3798vX19fczrkBwQIA4JGqR1ZTzXpB7i7jonr16qXk5GQVFhbq0KFDWrFihUaNGqVFixbp448/lo/P//8V6+fnp0GDBik5OblYsLDb7ZozZ44eeuihCh8qJA++eBMAAE9mtVoVERGh2rVr68Ybb9T48eO1dOlSLV++vMQXjg0bNky//PKLvv76a6f2devW6bffftOwYcPKqfKyRbAAAMCQrl27qlWrVlq8eHGxdX/605900003afbs2U7tycnJ6tChg5o2bVpeZZYpggUAAAY1bdpUKSkpJa4bNmyYPvjgA+Xk5EiSTpw4oUWLFmno0KHlWGHZIlgAAGCQ3W6XxWIpcd2AAQNUWFiohQsXSpIWLFggLy8v9e/fvzxLLFMECwAADNq5c6eio6NLXBccHKw///nPSk5OlnT2NMh9992nwMDA8iyxTBEsAAAwZM2aNdq6davuueeei/YZNmyYvv76ay1btkzr16+vNBdtnsPtpgAAj3Ts4EmPnicvL0/p6elOt5smJiaqT58+euihhy66XadOnRQTE6OHHnpITZs2VYcOHUpbukciWAAAPIp/oK98/Ly0OnlHuc3p4+cl/0DXniGxYsUKRUZGysfHR2FhYWrVqpX++c9/avDgwY4HZJXEYrFo6NChGj9+vMaNG3e1pXscggUAwKMEVffXAxPbe/S7QubMmVPisyoudLG7Q8aNG1cpQ4VEsAAAeKCg6v68t6OC4uJNAABgDMECAAAYQ7AAAADGECwAAG5nt9vdXQJk5nsgWAAA3Mbb21uSlJ+f7+ZKIEmnTp2SpKt6fTt3hQAA3MbHx0cBAQHKyMiQr6/vJZ//gLJjt9t16tQpHT58WKGhoY7AVxoECwCA21gsFkVGRur333/Xvn373F1OlRcaGqqIiIirGoNgAQBwKz8/PzVu3JjTIW7m6+t7VUcqziFYAADczsvLS/7+PBCrMuBkFgAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMMalYDFjxgy1bNlSwcHBCg4O1s0336zly5eXVW0AAKCCcSlY1KlTR1OmTNGmTZv0ww8/qGvXrrrrrru0ffv2sqoPAABUID6udL7jjjuclp9//nnNmDFDGzZsUPPmzY0WBgAAKh6XgsX5CgsL9cEHH+jkyZO6+eabL9ovLy9PeXl5juXs7OzSTgkAADycyxdvbt26VYGBgbJarXrkkUf00UcfqVmzZhftn5iYqJCQEMenbt26V1UwAADwXC4HiyZNmmjLli367rvv9Oijj2rw4MHasWPHRfuPGzdOWVlZjk9qaupVFQwAADyXy6dC/Pz8FBMTI0lq3bq1Nm7cqFdffVWzZs0qsb/VapXVar26KgEAQIVw1c+xKCoqcrqGAgAAVF0uHbEYN26cevfurXr16unEiROaN2+e1q5dq5UrV5ZVfQAAoAJxKVgcPnxYDz30kA4ePKiQkBC1bNlSK1eu1O23315W9QEAgArEpWCRlJRUVnUAAIBKgHeFAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwxseVzomJiVq8eLF27dolm82mDh06aOrUqWrSpElZ1QeUWuGxzBJ/Pr/tTFraVc1xJi1NZw4edCwX/OfnkuY7f5vz+UZFXVUNAOBJXAoW69atU3x8vG666SYVFBRo/Pjx6tGjh3bs2KFq1aqVVY2Ay86kpSnj5ZcdyydWrizWJ+Pll536XKn0vz/j9N+SnD92gw8Xyda8uaOuvbF9ZM/NlSRZbDY1+nQZ4QJApeFSsFixYoXT8pw5c3TNNddo06ZN6tSpU4nb5OXlKS8vz7GcnZ1dijIB1xRknj1iEDH5WUmXDgElqfPadPk3a6aCzEyl3PPnYuuPL1p0xWOd3rHDESwKMjMdoUKS7Lm5KsjMJFgAqDSu6hqLrKwsSVL16tUv2icxMVEhISGOT926da9mSsAl/s2ayb9ZM5e384mM5C97ACiFUgeLoqIijR49WrfccotatGhx0X7jxo1TVlaW45OamlraKQEAgIdz6VTI+eLj47Vt2zZ9/fXXl+xntVpltVpLOw0AAKhAShUsRowYoWXLlunLL79UnTp1TNcEAAAqKJeChd1u1+OPP66PPvpIa9euVXR0dFnVBQAAKiCXgkV8fLzmzZunpUuXKigoSOnp6ZKkkJAQ2Wy2MikQAABUHC5dvDljxgxlZWWpc+fOioyMdHwWLFhQVvUBAIAKxOVTIQAAABfDu0IAAIAxBAsAAGBMqZ9jUdaOnsyXJO05nKOwan4ubZt2/LTTGBf+DAAAyoZHHbHYdiBLDZ76VCu3p+vFlbslSW99/bsefndTif0PHM8tsX3NrsOSpBdX7i423lOLt2rbgawS572w/XJ1Xml/AACqCo8LFpL01a8ZkqQpd/9Jr/S/3rF+1oOtNbJrjGM58z9HIZ7o2URT7v7TRcc9N96F81y47EqwcKU/AABVhceeCpGkFrVDnJZrh9ocYeJ84S6eKgEAAGXDo45YAACAio1gAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgAQAAjCFYAAAAYwgWAADAGIIFAAAwhmABAACMIVgAAABjCBYAAMAYggUAADDG5WDx5Zdf6o477lBUVJQsFouWLFlSBmUBAICKyOVgcfLkSbVq1Uqvv/56WdQDAAAqMB9XN+jdu7d69+59xf3z8vKUl5fnWM7OznZ1SgAAUEGU+TUWiYmJCgkJcXzq1q1b1lMCAAA3KfNgMW7cOGVlZTk+qampZT0lAABwE5dPhbjKarXKarWW9TQAAMADcLspAAAwhmABAACMcflUSE5Ojvbs2eNY/v3337VlyxZVr15d9erVM1ocAACoWFwOFj/88IO6dOniWB4zZowkafDgwZozZ46xwgAAQMXjcrDo3Lmz7HZ7WdQCAAAqOK6xAAAAxhAsAACAMQQLAABgDMECAAAYQ7AAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAAAAxhAsAACAMQQLAABgDMECAAAYQ7AAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAAAAxhAsAACAMQQLAABgDMECAAAYQ7AAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAAAAxhAsAACAMQQLAABgDMECAAAYQ7AAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAAAAxhAsAACAMQQLAABgDMECAAAYQ7AAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAAAAxhAsAACAMaUKFq+//roaNGggf39/tWvXTt9//73pugAAQAXkcrBYsGCBxowZo4SEBG3evFmtWrVSz549dfjw4bKoDwAAVCAuB4uXXnpJw4cP15AhQ9SsWTPNnDlTAQEBmj17dlnUBwAAKhAfVzrn5+dr06ZNGjdunKPNy8tL3bt317ffflviNnl5ecrLy3MsZ2VlSZKys7OL9T2Vc0JFeaf0e1qGivJOKefE2T5FeackSTknsh19TuWccKw7/+eSnBvvnG0p6doQbHFaLso7Vaz9Ys71P5VzosT9gPvl5uQop7BQ2Tk5kqScwkKXts/OydGZ7GzHOEe2bVPezl2OcXL/+EOFVzjm0e07lF/vO0lSfkpKsVrOzVUejmTs1JHMvWU2/o60nSrMLdSOtJ2q+8O8MpmjRlgj1ah5XZmMDeDizv19Z7fbL93R7oIDBw7YJdnXr1/v1P7EE0/Y27ZtW+I2CQkJdkl8+PDhw4cPn0rwSU1NvWRWcOmIRWmMGzdOY8aMcSwXFRXp2LFjCg8Pl8Vy+aMDnig7O1t169ZVamqqgoOD3V1OlcZ34Vn4PjwH34XnqCzfhd1u14kTJxQVFXXJfi4Fixo1asjb21uHDh1yaj906JAiIiJK3MZqtcpqtTq1hYaGujKtxwoODq7Qv0kqE74Lz8L34Tn4LjxHZfguQkJCLtvHpYs3/fz81Lp1a/373/92tBUVFenf//63br75ZtcrBAAAlYrLp0LGjBmjwYMHq02bNmrbtq1eeeUVnTx5UkOGDCmL+gAAQAXicrDo37+/MjIy9Mwzzyg9PV3XX3+9VqxYoVq1apVFfR7JarUqISGh2CkelD++C8/C9+E5+C48R1X7Liz2y943AgAAcGV4VwgAADCGYAEAAIwhWAAAAGMIFgAAwBiCBQAAMIZgYUheXp6uv/56WSwWbdmyxd3lVEkpKSkaNmyYoqOjZbPZ1KhRIyUkJCg/P9/dpVUJr7/+uho0aCB/f3+1a9dO33//vbtLqnISExN10003KSgoSNdcc4369u2r3bt3u7ssSJoyZYosFotGjx7t7lLKHMHCkCeffPKyz09H2dq1a5eKioo0a9Ysbd++XS+//LJmzpyp8ePHu7u0Sm/BggUaM2aMEhIStHnzZrVq1Uo9e/bU4cOH3V1albJu3TrFx8drw4YNWrVqlc6cOaMePXro5MmT7i6tStu4caNmzZqlli1buruU8uHK201Rss8++8zetGlT+/bt2+2S7D/++KO7S8J/vPDCC/bo6Gh3l1HptW3b1h4fH+9YLiwstEdFRdkTExPdWBUOHz5sl2Rft26du0upsk6cOGFv3LixfdWqVfbbbrvNPmrUKHeXVOY4YnGVDh06pOHDh+vdd99VQECAu8vBBbKyslS9enV3l1Gp5efna9OmTerevbujzcvLS927d9e3337rxsqQlZUlSfwZcKP4+HjFxsY6/fmo7Mr8temVmd1uV1xcnB555BG1adNGKSkp7i4J59mzZ4+mT5+uadOmubuUSu3IkSMqLCws9lj/WrVqadeuXW6qCkVFRRo9erRuueUWtWjRwt3lVEnz58/X5s2btXHjRneXUq44YlGCp556ShaL5ZKfXbt2afr06Tpx4oTGjRvn7pIrtSv9Ps534MAB9erVS/fee6+GDx/upsoB94mPj9e2bds0f/58d5dSJaWmpmrUqFF677335O/v7+5yyhXvCilBRkaGjh49esk+DRs21H333adPPvlEFovF0V5YWChvb28NHDhQb7/9dlmXWiVc6ffh5+cnSUpLS1Pnzp3Vvn17zZkzR15e5OeylJ+fr4CAAC1atEh9+/Z1tA8ePFjHjx/X0qVL3VdcFTVixAgtXbpUX375paKjo91dTpW0ZMkS9evXT97e3o62wsJCWSwWeXl5KS8vz2ldZUKwuAp//PGHsrOzHctpaWnq2bOnFi1apHbt2qlOnTpurK5qOnDggLp06aLWrVtr7ty5lfYPrqdp166d2rZtq+nTp0s6exi+Xr16GjFihJ566ik3V1d12O12Pf744/roo4+0du1aNW7c2N0lVVknTpzQvn37nNqGDBmipk2bauzYsZX69BTXWFyFevXqOS0HBgZKkho1akSocIMDBw6oc+fOql+/vqZNm6aMjAzHuoiICDdWVvmNGTNGgwcPVps2bdS2bVu98sorOnnypIYMGeLu0qqU+Ph4zZs3T0uXLlVQUJDS09MlSSEhIbLZbG6urmoJCgoqFh6qVaum8PDwSh0qJIIFKpFVq1Zpz5492rNnT7Fgx4G5stW/f39lZGTomWeeUXp6uq6//nqtWLGi2AWdKFszZsyQJHXu3NmpPTk5WXFxceVfEKokToUAAABjuKoNAAAYQ7AAAADGECwAAIAxBAsAAGAMwQIAABhDsAAAAMYQLAAAgDEECwAAYAzBAgAAGEOwAAAAxhAsAACAMf8HkKzGM7N01QMAAAAASUVORK5CYII=",
-      "text/plain": [
-       "<Figure size 640x480 with 1 Axes>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "ERROR: procedure $python3 EmbedTrack/embedtrack.py --sequence 01 --data_path DATA/train/BF-C2DL-HSC --model_path EmbedTrack/models/BF-C2DL-HSC/MU-CZ_CTC23 --batch_size 1 failed with an output 1\n",
+      "creating graph\n"
+     ]
     },
     {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhYAAAGzCAYAAABzfl4TAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAwqUlEQVR4nO3dfVxUZf7/8ffAwDDIrWgC3gKapK5WmpbbjZqlrm5rtWWupqg/1wpL110ttBK1vljZZpt50zeE7tWs1Cw13VarNct0bb1tM29CCW8SQZBA4Pz+cJ2vI6AOnGFm4PV8POaR5zrXOddnnMS3c65zHYthGIYAAABM4OfpAgAAQN1BsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAFBBq1atNGDAgEv2s1gsSk1NdX9BAHwGwQLwQhs3blRqaqpOnjzp6VK8UnZ2tlJTU7Vt27Zqn2Pu3LnKzMw0rSYAZxEsAC+0ceNGTZs2zeuDRVFRkR5//PFaHzc7O1vTpk0jWABeiGAB+LDy8nL98ssvHhs/KChIVqvVY+MD8D4EC8DLpKamauLEiZKkuLg4WSwWWSwWHThwQBaLRWPHjtVbb72l9u3by2azafXq1ZKkWbNmqXv37oqKipLdblfnzp21dOnSSsd488031bVrVwUHBysyMlI333yzPvnkk4vW9dprr8lqtTpqkyrOsUhNTZXFYtHevXuVlJSkiIgIhYeHa8SIETp9+rTT+YqKivTII4+oUaNGCg0N1R133KHDhw9fct7G+vXrdd1110mSRowY4fj9yczM1O7du2W32zVs2DCnY7744gv5+/vr0UcflXR2DsnOnTu1YcMGx/E9evS46PsHcHn4pwbgZe666y795z//0TvvvKMXXnhBjRo1kiQ1btxYkvTpp59qyZIlGjt2rBo1aqRWrVpJkl588UXdcccdGjJkiEpKSrRo0SLdc889Wrlypfr37+84/7Rp05Samqru3btr+vTpCgwM1FdffaVPP/1Ut99+e6U1vfLKK3rggQc0efJkPfXUU5d8D/fee6/i4uKUlpamrVu36tVXX9UVV1yhZ555xtEnKSlJS5Ys0f3336/rr79eGzZscKqzKldddZWmT5+uJ598Un/84x910003SZK6d++u+Ph4zZgxQxMnTtTvf/973XHHHSosLFRSUpISExM1ffp0SdLs2bP18MMPKyQkRFOmTJEkNWnS5JJjA7gMBgCv89xzzxmSjP379zu1SzL8/PyMnTt3Vjjm9OnTTtslJSVGhw4djF69ejnavv/+e8PPz8+48847jbKyMqf+5eXljl+3bNnS6N+/v2EYhvHiiy8aFovFmDFjRoUxJRlTp051bE+dOtWQZIwcOdKp35133mlERUU5trds2WJIMsaPH+/ULykpqcI5K7N582ZDkpGRkVFhX1lZmXHjjTcaTZo0MY4fP24kJycbVqvV2Lx5s1O/9u3bG7fccstFxwHgOi6FAD7mlltuUbt27Sq02+12x69zc3OVl5enm266SVu3bnW0L1u2TOXl5XryySfl5+f8x99isVQ457PPPqtx48bpmWeecWmS5gMPPOC0fdNNN+nnn39Wfn6+JDku3zz00ENO/R5++OHLHqMqfn5+yszMVEFBgfr166e5c+cqJSVFXbp0qfG5AVwal0IAHxMXF1dp+8qVK/XUU09p27ZtKi4udrSfHxh++OEH+fn5VRpMLrRhwwZ99NFHevTRR53mVVyOFi1aOG1HRkZKOht4wsLCdPDgQfn5+VV4L61bt3ZpnKokJCQ45qp06NBBTzzxhCnnBXBpfGMB+Jjzv5k45/PPP9cdd9yhoKAgzZ07Vx9//LHWrl2rP/zhDzIMo1rjtG/fXm3bttUbb7yh/fv3u3Ssv79/pe3VraU6zk1Gzc7O1s8//1xr4wL1HcEC8EKVXZa4mPfee09BQUFas2aNRo4cqX79+ql3794V+iUkJKi8vFy7du265DkbNWqkdevWKSAgQLfeequys7NdquliWrZsqfLy8gqBZe/evZd1/KV+f+bPn6+1a9fq6aefVklJicaMGePyOQBUD8EC8EINGjSQpMteIMvf318Wi0VlZWWOtgMHDmjZsmVO/QYOHCg/Pz9Nnz5d5eXlTvsq+zahWbNmWrdunYqKinTbbbeZ9i//Pn36SDq7SNX5XnrppQp9T58+rT179uj48eOOtov9/uzfv18TJ07U3XffrcmTJ2vWrFlasWKFXn/9dad+DRo08PoFyABfRLAAvFDnzp0lSVOmTNEbb7yhRYsWqbCwsMr+/fv31+nTp9W3b1/Nnz9f06dPV7du3SrMWWjdurWmTJmiDz74QDfddJOef/55zZkzR8OHD9fkyZMrPXfr1q31ySefKCcnR3369HFMwKzp+7v77rs1e/ZsDRs2THPnztWgQYMcK2me/23C119/rauuukpz5sxxtCUkJCgiIkLz589Xenq6Fi1apP3798swDI0cOVJ2u13z5s2TJI0ZM0a33Xabxo0b5/StS+fOnfXvf/9bTz31lBYtWqRPP/20xu8LgLjdFPBWM2bMMJo2bWr4+fk5bj2VZCQnJ1faPz093WjTpo1hs9mMxMREIyMjw3H754UWLlxoXHPNNYbNZjMiIyONW265xVi7dq1j//m3m57z1VdfGaGhocbNN9/suLVVVdxueuzYMadjMzIyKtw+W1hYaCQnJxsNGzY0QkJCjIEDBxrfffedIcmYOXOmo98//vGPSm9BXb58udGuXTvDarU6bj198cUXDUnGe++959T3xx9/NMLCwozf/OY3jracnByjf//+RmhoqCGJW08Bk1gMoxZnUwHARWzbtk3XXHON3nzzTQ0ZMsTT5QCoBi6FAPCIoqKiCm2zZ8+Wn5+fbr75Zg9UBMAMrGMBwCOeffZZbdmyRT179pTVatWqVau0atUq/fGPf1Tz5s09XR6AauJSCACPWLt2raZNm6Zdu3apoKBALVq00P33368pU6bwxFTAh7kULFJTUzVt2jSntrZt22rPnj2mFwYAAHyPy/8saN++vdatW/d/J+BfFgAA4L9cTgVWq1XR0dHuqAUAAPg4l4PF999/r9jYWAUFBemGG25QWlpahQcOna+4uNjpgUjl5eU6ceKEoqKiWFIXAAAfYRiGTp06pdjY2ApPRz6fS3MsVq1apYKCArVt21Y//fSTpk2bpsOHD2vHjh0KDQ2t9JjK5mUAAADflJWVpWbNmlW5v0Z3hZw8eVItW7bUX//6V40aNarSPhd+Y5GXl6cWLVooKytLYWFhlzXO0m+ylPrhLqX+tp3axYbp3gWbKu23ZMz1ahcb7vobAYD/Op51Su8/v1WSdNefr1Wj5pX/owmob/Lz89W8eXOdPHlS4eFV/11bo5mXERERuvLKKy/6REKbzSabzVahPSws7LKDRXBIqPxswQoOCVVIaJj8bMGV9gsJvfxzAkBlikMtsgeefchZaGiYwsIIFsD5LjWNoUYrbxYUFOiHH35QTExMTU4DAADqCJeCxV/+8hdt2LBBBw4c0MaNG3XnnXfK399fgwcPdld9AADAh7h0KeTQoUMaPHiwfv75ZzVu3Fg33nijNm3apMaNG7urPgAA4ENcChaLFi1yVx0AgHqsrKxMZ86c8XQZ9Zq/v7+sVmuNl4Jg2UwAgEcVFBTo0KFD4tFVnhccHKyYmBgFBgZW+xwECwCAx5SVlenQoUMKDg5W48aNWTjRQwzDUElJiY4dO6b9+/erTZs2F10E62IIFgAAjzlz5owMw1Djxo1lt9s9XU69ZrfbFRAQoIMHD6qkpERBQUHVOk+NbjcFAMAMfFPhHar7LYXTOUyoAwAAQBKXQgAAXujwySLlFpbU2niRDQLVNIJLMWYgWAAAvMrhk0Xq/fwGFZ0pq7Ux7QH+WvfnWwgXJiBYAAC8Sm5hiYrOlGn2oKvV+ooQt4+392iBxi/eptzCkssOFklJSTp58qSWLVvm1L5+/Xr17NlTubm5ioiIML9YH0CwAAB4pdZXhKhDU55Y7WuYvAkAAExDsAAAAKbhUggAANWwcuVKhYQ4zwEpK6u9CafeimABAEA19OzZU/PmzXNq++qrrzR06FAPVeQdCBYAAFRDgwYN1Lp1a6e2Q4cOeaga78EcCwAAYBq+sQAAeKW9Rwvq1Dj1BcECAOBVIhsEyh7gr/GLt9XamPYAf0U2CKy18eoyggUAwKs0jbBr3Z9v8epnhWRmZlba3qNHDxmGYVJVvolgAQDwOk0j7Dy3w0cxeRMAAJiGYAEAAExDsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBrWsQAAeJ+TWdLpn2tvvOAoKaJ57Y1XhxEsAADe5WSW9HJX6czp2hszIFhK/ppwYQKCBQDAu5z++WyouOt/pUZXun+84/+R3h99dlwXg8WXX36pG2+8UX379tVHH33kaD9w4IDi4uIc2w0bNlTnzp31zDPP6JprrpF0dvnvDRs2SJJsNpvi4+M1duxYPfTQQya8Kc8hWAAAvFOjK6XYqz1dxUWlp6fr4YcfVnp6urKzsxUbG+u0f926dWrfvr0OHTqkRx55RP369dOePXsUEREhSRo9erSmT5+u06dP6/XXX1dycrIiIyM1ePBgD7wbczB5EwCAaigoKNDixYv14IMPqn///pU+mCwqKkrR0dHq0qWLZs2apSNHjuirr75y7A8ODlZ0dLTi4+OVmpqqNm3aaMWKFbX4LsxHsAAAoBqWLFmixMREtW3bVkOHDtXChQsv+mRTu/3sQ9VKSqp+aqvdbr/ofl9AsAAAoBrS09M1dOhQSVLfvn2Vl5fnmDNxoZMnT2rGjBkKCQlR165dK+wvKyvTm2++qX//+9/q1auXW+t2N4IFAAAu+u677/T111875kJYrVYNGjRI6enpTv26d++ukJAQRUZG6ttvv9XixYvVpEkTx/65c+cqJCREdrtdo0eP1p/+9Cc9+OCDtfpezMbkTQAAXJSenq7S0lKnyZqGYchms2nOnDmOtsWLF6tdu3aKiopyTNg835AhQzRlyhTZ7XbFxMTIz8/3/71PsAAAwAWlpaV6/fXX9fzzz+v222932jdw4EC988476tu3rySpefPmSkhIqPJc4eHhat26tVvrrW0ECwCAdzr+H68cZ+XKlcrNzdWoUaMUHh7utO/uu+9Wenq6I1jURwQLAIB3CY46uxLm+6Nrb8yA4LPjXob09HT17t27QqiQzgaLZ599Vvn5+WZX6DMIFgAA7xLR/Ozy2l76rJAPP/ywyn1du3Z13HJ6sVtPJWn9+vWXXZ4vIVgAALxPRHOe2+GjfH/6KQAA8BoECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA07COBQDA6/xU8JNyi3NrbbxIW6RiQmJqbby6jGABAPAqPxX8pN8t/52KSotqbUy71a7lv1tOuDABwQIA4FVyi3NVVFqktJvSFB8e7/bx9uXtU8rnKcotzr3sYJGUlKTXXntNkmS1WtWwYUN17NhRgwcPVlJSkuPx561atdL48eM1fvx4SdK3336rJ554Qps2bVJ+fr6io6PVrVs3vfTSS7riiiucxkhLS9Pjjz+umTNnauLEiU77MjMzNWLECEmSxWJRbGysbrvtNj3zzDOO81gsFkf/sLAwdejQQTNmzFCvXr1c/01yAcECAOCV4sPj1S6qnafLqFLfvn2VkZGhsrIyHTlyRKtXr9a4ceO0dOlSrVixQlar81+xx44d06233qoBAwZozZo1ioiI0IEDB7RixQoVFhZWOP/ChQs1adIkLVy4sEKwkM6Ghe+++07l5eX69ttvNWLECGVnZ2vNmjWOPhkZGerbt6+OHz+uKVOmaMCAAdqxY4fi490X2AgWAABUg81mU3R0tCSpadOmuvbaa3X99dfr1ltvVWZmpv7f//t/Tv3/+c9/Ki8vT6+++qojdMTFxalnz54Vzr1hwwYVFRVp+vTpev3117Vx40Z1797dqY/FYnGMHxsbq0ceeURPPPGEioqKZLfbJUkRERGKjo5WdHS05s2bp6ZNm2rt2rUaM2aM6b8f53BXCAAAJunVq5c6deqk999/v8K+6OholZaW6oMPPrjkk0/T09M1ePBgBQQEaPDgwUpPT7/k2Ha7XeXl5SotLa1yvySVlJRcxjupPoIFAAAmSkxM1IEDByq0X3/99Zo8ebL+8Ic/qFGjRurXr5+ee+45HTlyxKlffn6+li5dqqFDh0qShg4dqiVLlqigoKDKMb///nvNnz9fXbp0UWhoaIX9p0+f1uOPPy5/f3/dcsstNXuDl0CwAADARIZhOE2cPN/TTz+tnJwczZ8/X+3bt9f8+fOVmJio7du3O/q88847SkhIUKdOnSRJV199tVq2bKnFixc7nSsvL08hISEKDg5W27Zt1aRJE7311ltOfQYPHqyQkBCFhobqvffeU3p6ujp27GjyO3ZGsAAAwES7d+9WXFxclfujoqJ0zz33aNasWdq9e7diY2M1a9Ysx/709HTt3LlTVqvV8dq1a5cWLlzodJ7Q0FBt27ZNO3bsUGFhoT777DNdeeWVTn1eeOEFbdu2TTk5OcrJydHw4cPNfbOVYPImAAAm+fTTT7V9+3b96U9/uqz+gYGBSkhIcNwVsn37dn3zzTdav369GjZs6Oh34sQJ9ejRQ3v27FFiYqIkyc/PT61bt77o+aOjoy/Zx2wECwCAV9qXt8+rxykuLlZOTo7T7aZpaWkaMGCAhg0bVqH/ypUrtWjRIt1333268sorZRiGPvzwQ3388cfKyMiQdPbbiq5du+rmm2+ucPx1112n9PR0Pffcc9Wqt7YQLAAAXiXSFim71a6Uz1NqbUy71a5IW6RLx6xevVoxMTGyWq2KjIxUp06d9Le//U3Dhw93LJB1vnbt2ik4OFh//vOflZWVJZvNpjZt2ujVV1/V/fffr5KSEr355pt69NFHKx3v7rvv1vPPP6//+Z//qdZ7rC0ECwCAV4kJidHy3y336meFZGZmKjMz85L9zr87JD4+Xq+88kqVfQMDA3X8+PEq90+aNEmTJk2SdHblz6SkpIuOfalbWt2FYAEA8DoxITE8t8NHcVcIAAAwDcECAACYpkbBYubMmbJYLI6ntgEAgPqt2sFi8+bNWrBggdtX8AIAAL6jWsGioKBAQ4YM0f/+7/8qMtK123MAAEDdVa1gkZycrP79+6t3796X7FtcXKz8/HynFwAAqJtcvt100aJF2rp1qzZv3nxZ/dPS0jRt2jSXCwMAAL7HpWCRlZWlcePGae3atQoKCrqsY1JSUjRhwgTHdn5+vpo3b+5alQCAeuVMdrZKc2tvgSxrZKQCYmNrbby6zKVgsWXLFh09elTXXnuto62srEyfffaZ5syZo+LiYvn7+zsdY7PZZLPZzKkWAFDnncnO1g/9B8goKqq1MS12uxI+Wkm4MIFLweLWW291ema8JI0YMUKJiYl69NFHK4QKAABcVZqbK6OoSLHPPavA+Hi3j1eyb5+yJ05SaW7uZQeLpKQkvfbaaxozZozmz5/vtC85OVlz587V8OHDHct+Z2VlaerUqVq9erWOHz+umJgYDRw4UE8++aSioqJ05MgRNWvWTG+88Ybuu+++CuONGjVK//rXv7R161alpqZWOsWgbdu22rNnj+u/ASZzKViEhoaqQ4cOTm0NGjRQVFRUhXYAAGoiMD5e9vbtPV1GlZo3b65FixbphRdekN1ulyT98ssvevvtt9WiRQtHv3379umGG27QlVdeqXfeeUdxcXHauXOnJk6cqFWrVmnTpk1q0qSJ+vfvr4ULF1YIFoWFhVqyZIlmzpzpaGvfvr3WrVvn1M9q9Y6ndHhHFQAA+Jhrr71WP/zwg95//30NGTJEkvT++++rRYsWiouLc/RLTk5WYGCgPvnkE0cAadGiha655holJCRoypQpmjdvnkaNGqWBAwfqxx9/dAom7777rkpLSx1jSGdDRHR0dC29U9fUeEnv9evXa/bs2SaUAgCAbxk5cqQyMjIc2wsXLtSIESMc2ydOnNCaNWv00EMPOULFOdHR0RoyZIgWL14swzD0m9/8Rk2aNKnw1NSMjAzdddddioiIcOdbMQ3PCgEAoJqGDh2qL774QgcPHtTBgwf1z3/+U0OHDnXs//7772UYhq666qpKj7/qqquUm5urY8eOyd/f3zEv49wjz3/44Qd9/vnnGjlypNNx27dvV0hIiNPrgQcecN8bdQGXQgAAqKbGjRurf//+jjDQv39/NWrUqEK/c0HhUkaOHKmZM2fqH//4h3r16qWMjAy1atVKvXr1curXtm1brVixwqktLCys+m/ERAQLAABqYOTIkRo7dqwk6eWXX3ba17p1a1ksFu3evVt33nlnhWN3796tyMhINW7cWJLUpk0b3XTTTcrIyFCPHj30+uuva/To0bJYLE7HBQYGqnXr1m56RzXDpRAAAGqgb9++Kikp0ZkzZ9SnTx+nfVFRUbrttts0d+5cFV2wLkdOTo7eeustDRo0yCk4jBo1Su+9957ee+89HT58WElJSbXxNkzDNxYAAK9Usm+fT4zj7++v3bt3O359oTlz5qh79+7q06ePnnrqKafbTZs2baqnn37aqf8999yjRx55RGPGjNHtt99e6WrVpaWlysnJcWqzWCxq0qRJjd6LGQgWAACvYo2MlMVuV/bESbU2psVul7UGT+u+2PyGNm3a6JtvvtHUqVN177336sSJE4qOjtbAgQM1depUNWzY0Kl/cHCw7rvvPr3yyisVJm2es3PnTsXExDi12Ww2/fLLL9V+D2YhWAAAvEpAbKwSPlrp1c8KufCW0AstW7bMabtly5aXPOZ8CxYs0IIFCyrdl5qaqtTU1Ms+V20jWAAAvE5AbCzP7fBRTN4EAACmIVgAAADTECwAAIBpCBYAAMA0PhUsfi4s0eGTRVXu33zgxEX3AwAA9/LaYLHjcJ5aPfaRdhzOU2SDQEnSc2u+05g3tlR5zLQPd+nXMz/Vmp05jmMrOx8AAHAPrw4W5/7bNMJeYf8jvVprwf2dKz328++POZ3jwvMBAAD38Nl1LGIj7JUGDgCA7zt14hf9UnCm1sYLCglQaMOgWhuvLvPZYAEAqJtOnfhFb6duUmlJea2NaQ300x9SrydcmIBgAQDwKr8UnFFpSbl6j2inhjEN3D7eiZ8KtS5jl34pOHPZwSIpKUmvvfaaJMlqtaphw4bq2LGjBg8erKSkJPn5nZ1p0KpVK40fP14PPfSQYmNj9Ze//EWPPfZYhfPNmDFDc+bM0aFDhxQQEGDem/MAggUAwCs1jGmgxi1CPV1Glfr27auMjAyVlZXpyJEjWr16tcaNG6elS5dqxYoVslr/76/YwMBADR06VBkZGRWChWEYyszM1LBhw3w+VEhePHkTAABvZrPZFB0draZNm+raa6/V5MmTtXz5cq1atarSB46NGjVK//nPf/TFF184tW/YsEH79u3TqFGjaqly9yJYAABgkl69eqlTp056//33K+z71a9+peuuu04LFy50as/IyFD37t2VmJhYW2W6FcECAAATJSYm6sCBA5XuGzVqlN59910VFBRIkk6dOqWlS5dq5MiRtVihexEsAAAwkWEYslgsle4bPHiwysrKtGTJEknS4sWL5efnp0GDBtVmiW5FsAAAwES7d+9WXFxcpfvCwsL0+9//XhkZGZLOXga59957FRISUpsluhXBAgAAk3z66afavn277r777ir7jBo1Sl988YVWrlypjRs31plJm+dwuykAwCud+KnQq8cpLi5WTk6O0+2maWlpGjBggIYNG1blcTfffLNat26tYcOGKTExUd27d69u6V6JYAEA8CpBIQGyBvppXcauWhvTGuinoBDX1pBYvXq1YmJiZLVaFRkZqU6dOulvf/ubhg8f7lggqzIWi0UjR47U5MmTlZKSUtPSvQ7BAgDgVUIbBukPqdd79bNCMjMzK12r4kJV3R2SkpJSJ0OFRLAAAHih0IZBPLfDRzF5EwAAmIZgAQAATEOwAAAApiFYAAA8zjAMT5cAmfM5ECwAAB7j7+8vSSopKfFwJZCk06dPS1KNHt/OXSEAAI+xWq0KDg7WsWPHFBAQcNH1H+A+hmHo9OnTOnr0qCIiIhyBrzoIFgAAj7FYLIqJidH+/ft18OBBT5dT70VERCg6OrpG5yBYAAA8KjAwUG3atOFyiIcFBATU6JuKcwgWAACP8/PzU1AQC2LVBVzMAgAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGoIFAAAwDcECAACYhmABAABMQ7AAAACmIVgAAADTECwAAIBpCBYAAMA0BAsAAGAaggUAADANwQIAAJiGYAEAAExDsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApnEpWMybN08dO3ZUWFiYwsLCdMMNN2jVqlXuqg0AAPgYl4JFs2bNNHPmTG3ZskXffPONevXqpd/97nfauXOnu+oDAAA+xOpK59/+9rdO208//bTmzZunTZs2qX379qYWBgAAfI9LweJ8ZWVlevfdd1VYWKgbbrihyn7FxcUqLi52bOfn51d3SAAA4OVcnry5fft2hYSEyGaz6YEHHtAHH3ygdu3aVdk/LS1N4eHhjlfz5s1rVDAAAPBeLgeLtm3batu2bfrqq6/04IMPavjw4dq1a1eV/VNSUpSXl+d4ZWVl1ahgAADgvVy+FBIYGKjWrVtLkjp37qzNmzfrxRdf1IIFCyrtb7PZZLPZalYlAADwCTVex6K8vNxpDgUAAKi/XPrGIiUlRf369VOLFi106tQpvf3221q/fr3WrFnjrvoAAIAPcSlYHD16VMOGDdNPP/2k8PBwdezYUWvWrNFtt93mrvoAAIAPcSlYpKenu6sOAABQB/CsEAAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGoIFAAAwDcECAACYhmABAABMQ7AAAACmIVgAAADTECwAAIBpCBYAAMA0BAsAAGAaggUAADANwQIAAJiGYAEAAExDsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGoIFAAAwDcECAACYhmABAABMQ7AAAACmIVgAAADTECwAAIBpCBYAAMA0BAsAAGAaggUAADANwQIAAJiGYAEAAExDsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGoIFAAAwDcECAACYhmABAABMQ7AAAACmIVgAAADTECwAAIBpXAoWaWlpuu666xQaGqorrrhCAwcO1Hfffeeu2gAAgI9xKVhs2LBBycnJ2rRpk9auXaszZ87o9ttvV2FhobvqAwAAPsTqSufVq1c7bWdmZuqKK67Qli1bdPPNN1d6THFxsYqLix3b+fn51SgTAAD4ghrNscjLy5MkNWzYsMo+aWlpCg8Pd7yaN29ekyEBAIAXq3awKC8v1/jx4/XrX/9aHTp0qLJfSkqK8vLyHK+srKzqDgkAALycS5dCzpecnKwdO3boiy++uGg/m80mm81W3WEAAIAPqVawGDt2rFauXKnPPvtMzZo1M7smAADgo1wKFoZh6OGHH9YHH3yg9evXKy4uzl11AQAAH+RSsEhOTtbbb7+t5cuXKzQ0VDk5OZKk8PBw2e12txQIAAB8h0uTN+fNm6e8vDz16NFDMTExjtfixYvdVR8AAPAhLl8KAQAAqArPCgEAAKYhWAAAANNUex0Ld/u5sESStPdogSIbBLp0bPbJX5zOceGvAQCAe3jVNxY7Duep1WMfac3OHD235uxTU1/9Yr/GvLGl0v6HTxZV2v7pnqOSpOfWfFfhfI+9v107DudVOu6F7Zeq83L7AwBQX3hdsJCkz78/JkmaedevNHvQ1Y79C+7vrEd6tXZs5/73W4iJfdpq5l2/qvK858534TgXbrsSLFzpDwBAfeG1l0IkqUPTcKftphF2R5g4X5SLl0oAAIB7eNU3FgAAwLcRLAAAgGkIFgAAwDQECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGq9+CBlQ285kZ6s0N9exbY2MVEBsrAcrAgDfQrAA/utMdrZ+6D9ARlGRo81ityvho5WECwC4TAQL4L9Kc3NlFBUp9rlnFRgfr5J9+5Q9cZJKc3MJFgBwmQgWwAUC4+Nlb9/e02UAgE9i8iYAADANwQIAAJiGYAEAAExDsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGqunCwBqy5nsbJXm5jq2rZGRCoiN9WBFAFD3ECxQL5zJztYP/QfIKCpytFnsdiV8tJJwAQAmIligXijNzZVRVKTY555VYHy8SvbtU/bESSrNzSVYAICJCBaoVwLj42Vv397TZQBAncXkTQAAYBqCBQAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA03C7Keqs81faLNm3z8PVAED9QLBAnVTVSpvWyEgPVgUAdR/BAnXShSttSjwbBABqA8ECdRorbQJA7WLyJgAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGoIFAAAwDcECAACYhmABAABM43Kw+Oyzz/Tb3/5WsbGxslgsWrZsmRvKAgAAvsjlYFFYWKhOnTrp5Zdfdkc9AADAh7n8dNN+/fqpX79+l92/uLhYxcXFju38/HxXhwQAAD7C7XMs0tLSFB4e7ng1b97c3UMCAAAPcXuwSElJUV5enuOVlZXl7iEBAICHuHwpxFU2m002m83dwwAAAC/A7aYAAMA0BAsAAGAaly+FFBQUaO/evY7t/fv3a9u2bWrYsKFatGhhanEAAMC3uBwsvvnmG/Xs2dOxPWHCBEnS8OHDlZmZaVphAADA97gcLHr06CHDMNxRCwAA8HHMsQAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBq3L+kNeLOSffsq/XVVfayRkQqIjXV7XQDgqwgWqJeskZGy2O3KnjjJqd1it8saGVllH4vdroSPVhIuAKAKBAvUSwGxsUr4aKVKc3Od2s//RuLCPiX79il74iSV5uYSLACgCgQL1FsBsbGXDAiX0wcA8H+YvAkAAExDsAAAAKYhWAAAANMQLAAAgGkIFgAAwDQECwAAYBpuNwVq4Ex2doW1MM7HSp0A6huCBVBNZ7Kz9UP/ATKKiqrsw0qdAOobggVQTaW5uTKKihT73LMKjI+vsJ+VOgHURwQLoIYC4+Nlb9/e02UAgFdg8iYAADANwQIAAJiGYAEAAExDsAAAAKYhWAAAANMQLAAAgGm43RRwUcm+fU7/BQD8H4IFcJmskZGy2O3KnjjJ0Wax22WNjPRgVQDgXQgWwGUKiI1VwkcrnZ4NwrNAAMAZwQJwQUBsLEECAC6CyZsAAMA0BAsAAGAaggUAADANwQIAAJiGYAEAAExDsAAAAKbhdlMAALzEmexsl9bKcbV/bSBYAADgBc5kZ+uH/gNkFBU52ix2uxI+WllpWHC1f20hWAAA4AVKc3NlFBUp9rlnFRgfr5J9+5Q9cZJKc3MrDQqu9q8tBAsAALxIYHy87O3bu62/uzF5EwAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANNxuCtQib1wlD4DvOP9nSMm+fW4f43xFBQWXdTzBAqgl3rpKHgDfUNXPEGtkpFvHOKegrOyyzkGwAGqJt66SB8A3XPgzRDL/W8/Kxjgnv6BAuv76S56DYAHUMm9bJQ+Ab6mNnyGVjXEmP/+yjmXyJgAAMA3BAgAAmIZgAQAATEOwAAAApiFYAAAA0xAsAACAabjdFHCzc6vjuWuVPAC+ozqr77r6M+Ri/S4c78J6zPg5RbAA3MQaGSmL3a7siZMcbWavkgfAd7i6+q6rP0Mq63+h88erapXNmv6cIlgAbhIQG6uEj1bybBAAklxffdfVnyGV9T/fheNVtcpmTX9OESwANwqIjSVIAHDiysqZrv4Mqc7PHLNX8mTyJgAAMA3BAgAAmIZgAQAATFOtYPHyyy+rVatWCgoKUrdu3fT111+bXRcAAPBBLgeLxYsXa8KECZo6daq2bt2qTp06qU+fPjp69Kg76gMAAD7E5WDx17/+VaNHj9aIESPUrl07zZ8/X8HBwVq4cKE76gMAAD7EpdtNS0pKtGXLFqWkpDja/Pz81Lt3b3355ZeVHlNcXKzi4mLHdl5eniQpPz+/Qt/TBadUXnxa+7OPqbz4tApOne1TXnxaklRwKt/R53TBKce+839dmXPnO2fHgRxtCrM4bZcXn67QXpVz/U8XnKr0fcDzigoKVFBWpvyCAp3x0s/oXI3Hd+xQYEGBp8sxTV7uAZ08leW28xf/YtWZX9x3p3xhYaCKSmIkSZvef0cNGpS4ZZyAoFLZgkrdcm54qcPHZSkr0/Zdq6Sib6X92We3174h7WpUcb/ZajheQcHZv0cNw7h4R8MFhw8fNiQZGzdudGqfOHGi0bVr10qPmTp1qiGJFy9evHjx4lUHXllZWRfNCm5fICslJUUTJkxwbJeXl+vEiROKioqSxXLpbwe8UX5+vpo3b66srCyFhYV5upx6jc/Cu/B5eA8+C+9RVz4LwzB06tQpxV5iAS6XgkWjRo3k7++vI0eOOLUfOXJE0dHRlR5js9lks9mc2iIiIlwZ1muFhYX59P8kdQmfhXfh8/AefBbeoy58FuHh4Zfs49LkzcDAQHXu3Fl///vfHW3l5eX6+9//rhtuuMH1CgEAQJ3i8qWQCRMmaPjw4erSpYu6du2q2bNnq7CwUCNGjHBHfQAAwIe4HCwGDRqkY8eO6cknn1ROTo6uvvpqrV69Wk2aNHFHfV7JZrNp6tSpFS7xoPbxWXgXPg/vwWfhPerbZ2ExLnnfCAAAwOXhWSEAAMA0BAsAAGAaggUAADANwQIAAJiGYAEAAExDsDBJcXGxrr76alksFm3bts3T5dRLBw4c0KhRoxQXFye73a6EhARNnTpVJSXueYgUnL388stq1aqVgoKC1K1bN3399deeLqneSUtL03XXXafQ0FBdccUVGjhwoL777jtPlwVJM2fOlMVi0fjx4z1ditsRLEwyadKkS66fDvfas2ePysvLtWDBAu3cuVMvvPCC5s+fr8mTJ3u6tDpv8eLFmjBhgqZOnaqtW7eqU6dO6tOnj44ePerp0uqVDRs2KDk5WZs2bdLatWt15swZ3X777SosLPR0afXa5s2btWDBAnXs2NHTpdQOV55uisp9/PHHRmJiorFz505DkvGvf/3L0yXhv5599lkjLi7O02XUeV27djWSk5Md22VlZUZsbKyRlpbmwapw9OhRQ5KxYcMGT5dSb506dcpo06aNsXbtWuOWW24xxo0b5+mS3I5vLGroyJEjGj16tN544w0FBwd7uhxcIC8vTw0bNvR0GXVaSUmJtmzZot69ezva/Pz81Lt3b3355ZcerAx5eXmSxJ8BD0pOTlb//v2d/nzUdW5/bHpdZhiGkpKS9MADD6hLly46cOCAp0vCefbu3auXXnpJs2bN8nQpddrx48dVVlZWYVn/Jk2aaM+ePR6qCuXl5Ro/frx+/etfq0OHDp4up15atGiRtm7dqs2bN3u6lFrFNxaVeOyxx2SxWC762rNnj1566SWdOnVKKSkpni65Trvcz+N8hw8fVt++fXXPPfdo9OjRHqoc8Jzk5GTt2LFDixYt8nQp9VJWVpbGjRunt956S0FBQZ4up1bxrJBKHDt2TD///PNF+8THx+vee+/Vhx9+KIvF4mgvKyuTv7+/hgwZotdee83dpdYLl/t5BAYGSpKys7PVo0cPXX/99crMzJSfH/nZnUpKShQcHKylS5dq4MCBjvbhw4fr5MmTWr58ueeKq6fGjh2r5cuX67PPPlNcXJyny6mXli1bpjvvvFP+/v6OtrKyMlksFvn5+am4uNhpX11CsKiBH3/8Ufn5+Y7t7Oxs9enTR0uXLlW3bt3UrFkzD1ZXPx0+fFg9e/ZU586d9eabb9bZP7jeplu3buratateeuklSWe/hm/RooXGjh2rxx57zMPV1R+GYejhhx/WBx98oPXr16tNmzaeLqneOnXqlA4ePOjUNmLECCUmJurRRx+t05enmGNRAy1atHDaDgkJkSQlJCQQKjzg8OHD6tGjh1q2bKlZs2bp2LFjjn3R0dEerKzumzBhgoYPH64uXbqoa9eumj17tgoLCzVixAhPl1avJCcn6+2339by5csVGhqqnJwcSVJ4eLjsdruHq6tfQkNDK4SHBg0aKCoqqk6HColggTpk7dq12rt3r/bu3Vsh2PHFnHsNGjRIx44d05NPPqmcnBxdffXVWr16dYUJnXCvefPmSZJ69Ojh1J6RkaGkpKTaLwj1EpdCAACAaZjVBgAATEOwAAAApiFYAAAA0xAsAACAaQgWAADANAQLAABgGoIFAAAwDcECAACYhmABAABMQ7AAAACmIVgAAADT/H9jO0oHAo7aHwAAAABJRU5ErkJggg==",
-      "text/plain": [
-       "<Figure size 640x480 with 1 Axes>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
+     "ename": "FileNotFoundError",
+     "evalue": "[Errno 2] No such file or directory: 'DATA/train/BF-C2DL-HSC/01_DATA'",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
+      "Cell \u001b[0;32mIn[9], line 34\u001b[0m\n\u001b[1;32m     31\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m res_path\u001b[38;5;241m.\u001b[39mexists():\n\u001b[1;32m     32\u001b[0m     os\u001b[38;5;241m.\u001b[39mmakedirs(res_path)\n\u001b[0;32m---> 34\u001b[0m tr \u001b[38;5;241m=\u001b[39m \u001b[43mGlobalTracker\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata_path\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m     35\u001b[0m \u001b[43m                   \u001b[49m\u001b[43mres_path\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m     36\u001b[0m \u001b[43m                   \u001b[49m\u001b[43mmax_dist\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmax_dist\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m     37\u001b[0m \u001b[43m                   \u001b[49m\u001b[43mvertex_thr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mvertex_thr\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     40\u001b[0m \u001b[38;5;66;03m# create tracking.sol and tracking.txt\u001b[39;00m\n\u001b[1;32m     41\u001b[0m tr\u001b[38;5;241m.\u001b[39mrun_tracking(limit_dist\u001b[38;5;241m=\u001b[39mlm_dist)\n",
+      "File \u001b[0;32m~/PROJECTS/TRACKING/twin/global-linking/tracking/global_tracker.py:101\u001b[0m, in \u001b[0;36mGlobalTracker.__init__\u001b[0;34m(self, data_path, res_path, mean_dist, max_dist, vertex_thr, app_cost, edge_min_cost, normalize, n_edges, min_frame, max_frame)\u001b[0m\n\u001b[1;32m     98\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmin_frame \u001b[38;5;241m=\u001b[39m min_frame\n\u001b[1;32m     99\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmax_frame \u001b[38;5;241m=\u001b[39m max_frame\n\u001b[0;32m--> 101\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgraph \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_graph\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/PROJECTS/TRACKING/twin/global-linking/tracking/global_tracker.py:110\u001b[0m, in \u001b[0;36mGlobalTracker.create_graph\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    106\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate_graph\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m    107\u001b[0m     \u001b[38;5;66;03m# create graph\u001b[39;00m\n\u001b[1;32m    108\u001b[0m     \u001b[38;5;66;03m# TODO: limit to min_frame/max_frame\u001b[39;00m\n\u001b[1;32m    109\u001b[0m     \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mcreating graph\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m--> 110\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mget_graph_bk\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    111\u001b[0m \u001b[43m                    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdata_path\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    112\u001b[0m \u001b[43m                    \u001b[49m\u001b[43mmin_frame\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmin_frame\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    113\u001b[0m \u001b[43m                    \u001b[49m\u001b[43mmax_frame\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_frame\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    114\u001b[0m \u001b[43m                    \u001b[49m\u001b[43mn_edges\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mn_edges\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/PROJECTS/TRACKING/twin/global-linking/tracking/global_tracker.py:281\u001b[0m, in \u001b[0;36mget_graph_bk\u001b[0;34m(data_path, n_edges, min_frame, max_frame)\u001b[0m\n\u001b[1;32m    258\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m'''\u001b[39;00m\n\u001b[1;32m    259\u001b[0m \u001b[38;5;124;03mcreates instance of candidate graph\u001b[39;00m\n\u001b[1;32m    260\u001b[0m \u001b[38;5;124;03m\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    277\u001b[0m \u001b[38;5;124;03m\u001b[39;00m\n\u001b[1;32m    278\u001b[0m \u001b[38;5;124;03m'''\u001b[39;00m\n\u001b[1;32m    280\u001b[0m \u001b[38;5;66;03m# create an empty graph structure\u001b[39;00m\n\u001b[0;32m--> 281\u001b[0m graph \u001b[38;5;241m=\u001b[39m \u001b[43mFlowGraph\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata_path\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    283\u001b[0m \u001b[38;5;66;03m# compute distances\u001b[39;00m\n\u001b[1;32m    284\u001b[0m pd_edge_path \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(data_path, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124medge_prob_distance.csv\u001b[39m\u001b[38;5;124m'\u001b[39m)\n",
+      "File \u001b[0;32m~/PROJECTS/TRACKING/twin/global-linking/tracking/graph.py:33\u001b[0m, in \u001b[0;36mFlowGraph.__init__\u001b[0;34m(self, res_path)\u001b[0m\n\u001b[1;32m     30\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m res_path\u001b[38;5;241m.\u001b[39mexists(), res_path\n\u001b[1;32m     32\u001b[0m \u001b[38;5;66;03m# get datset of frames \u001b[39;00m\n\u001b[0;32m---> 33\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset \u001b[38;5;241m=\u001b[39m \u001b[43mDataset\u001b[49m\u001b[43m(\u001b[49m\u001b[43mres_path\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname_pattern\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mmask\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m     36\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m'''\u001b[39;00m\n\u001b[1;32m     37\u001b[0m \u001b[38;5;124;03m#########\u001b[39;00m\n\u001b[1;32m     38\u001b[0m \u001b[38;5;124;03mOUTPUTS\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m     43\u001b[0m \u001b[38;5;124;03musing this index you can get vertices that it represents\u001b[39;00m\n\u001b[1;32m     44\u001b[0m \u001b[38;5;124;03m'''\u001b[39;00m\n\u001b[1;32m     45\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39medges \u001b[38;5;241m=\u001b[39m {} \u001b[38;5;66;03m# edges[e_idx] = (v_idx, v_idx)\u001b[39;00m\n",
+      "File \u001b[0;32m~/PROJECTS/TRACKING/twin/global-linking/tracking/my_utils/image.py:91\u001b[0m, in \u001b[0;36mDataset.__init__\u001b[0;34m(self, data_path, name_pattern, max_size)\u001b[0m\n\u001b[1;32m     89\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39misdir(data_path), data_path\n\u001b[1;32m     90\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_path \u001b[38;5;241m=\u001b[39m data_path\n\u001b[0;32m---> 91\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_files \u001b[38;5;241m=\u001b[39m \u001b[38;5;28msorted\u001b[39m([os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_path, name) \u001b[38;5;28;01mfor\u001b[39;00m name \u001b[38;5;129;01min\u001b[39;00m \u001b[43mos\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlistdir\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_path\u001b[49m\u001b[43m)\u001b[49m \\\n\u001b[1;32m     92\u001b[0m                       \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m.tif\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;129;01min\u001b[39;00m name \u001b[38;5;129;01mand\u001b[39;00m name_pattern \u001b[38;5;129;01min\u001b[39;00m name])\n\u001b[1;32m     93\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_max_size \u001b[38;5;241m=\u001b[39m max_size\n\u001b[1;32m     94\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_len \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_files)\n",
+      "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: 'DATA/train/BF-C2DL-HSC/01_DATA'"
+     ]
     }
    ],
    "source": [
-    "from pathlib import Path\n",
-    "\n",
-    "\n",
-    "# TODO: add to the confing file\n",
-    "time = 4\n",
-    "nn = 2 \n",
-    "dataset_name = 'BF-C2DL-HSC'\n",
-    "subsets = ['challenge']\n",
-    "seqs = [ '09']\n",
-    "mean_dist = 0\n",
-    "max_dist = 42\n",
-    "vertex_thr = 0.97\n",
-    "lm_dist = 80\n",
-    "app_cost = 1000000\n",
-    "edge_min_cost = -2\n",
-    "\n",
-    "idx_max = 30\n",
-    "\n",
-    "\n",
-    "# lbd is a weight of a distance cost\n",
     "for subset in subsets:\n",
     "    for seq in seqs:\n",
-    "        for lbd in [0.9]: #[1.0, 0.9, 0.5, 0.1, 0.0]:\n",
-    "            experiment = f'embtck_lbd{lbd}_e{nn}_avgdst{mean_dist}_mxdst{max_dist}_vthr{vertex_thr}_app{app_cost}_ld{lm_dist}_em{edge_min_cost}'\n",
-    "            \n",
-    "            # learn data path\n",
-    "            data_path = Path('DATA',\n",
-    "                             subset,\n",
-    "                             dataset_name,\n",
-    "                             f'{seq}_DATA')\n",
     "\n",
-    "            assert data_path.exists(), data_path\n",
-    "            print(data_path)\n",
-    "            \n",
-    "            if not os.path.isdir(data_path):\n",
-    "                model_path = 'EmbedTrack/models/BF-C2DL-HSC'\n",
+    "        # learn data path\n",
+    "        data_path = Path('DATA',\n",
+    "                         subset,\n",
+    "                         dataset_name,\n",
+    "                         f'{seq}_DATA')\n",
     "\n",
-    "                assert os.path.isdir(model_path)\n",
-    "                run_embedtrack(os.path.dirname(data_path), seq, model_path)\n",
-    "            \n",
+    "        assert data_path.exists(), data_path\n",
     "\n",
-    "            # create result directory\n",
-    "            res_path = Path('RESULTS',\n",
-    "                             experiment,\n",
-    "                             subset,\n",
-    "                             dataset_name,\n",
-    "                             f'{seq}_RES')\n",
+    "        # run EmbedTrack procedure\n",
+    "        if not os.path.isdir(data_path):\n",
+    "            model_path = Path('EmbedTrack',\n",
+    "                              'models',\n",
+    "                              dataset_name,\n",
+    "                              experiment)\n",
     "\n",
+    "            assert os.path.isdir(model_path)\n",
+    "            run_embedtrack(os.path.dirname(data_path), seq, model_path)\n",
     "\n",
-    "            if not res_path.exists():\n",
-    "                os.makedirs(res_path)\n",
     "\n",
-    "            #'''\n",
-    "            tr = GlobalTracker(data_path,\n",
-    "                               res_path,\n",
-    "                               lbd=lbd,\n",
-    "                               mean_dist=mean_dist,\n",
-    "                               max_dist=max_dist,\n",
-    "                               vertex_thr=vertex_thr,\n",
-    "                               app_cost=app_cost,\n",
-    "                               edge_min_cost=edge_min_cost)\n",
+    "        # create result directory\n",
+    "        res_path = Path('RESULTS',\n",
+    "                         experiment,\n",
+    "                         subset,\n",
+    "                         dataset_name,\n",
+    "                         f'{seq}_RES')\n",
     "\n",
     "\n",
-    "            # create tracking.sol and tracking.txt\n",
-    "            tr.run_tracking(n_neighbours=nn, limit_dist=lm_dist)\n",
+    "        if not res_path.exists():\n",
+    "            os.makedirs(res_path)\n",
     "\n",
-    "            # evaluate\n",
-    "            tr.save_results_ctc()\n",
-    "            tr.evaluate_ctc()\n",
+    "        tr = GlobalTracker(data_path,\n",
+    "                           res_path,\n",
+    "                           max_dist=max_dist,\n",
+    "                           vertex_thr=vertex_thr)\n",
     "\n",
-    "            tr.solution_stats(plot=True)\n",
     "\n",
-    "            # TODO: check if Blender is ready\n",
-    "            tv = TrackingVisualizer(data_path,\n",
-    "                                    res_path,\n",
-    "                                    time=time,  \n",
-    "                                    idx_min=0,\n",
-    "                                    idx_max=10000)\n",
+    "        # create tracking.sol and tracking.txt\n",
+    "        tr.run_tracking(limit_dist=lm_dist)\n",
     "\n",
-    "            #tv.display_gt(time=time)\n",
-    "            tv.display_candidate_graph(time=time)\n",
-    "            tv.display_solution(time=time)\n",
-    "            tv.display_mistakes(time=time+1)\n",
+    "        # evaluate\n",
+    "        tr.save_results_ctc()\n",
+    "        tr.evaluate_ctc()\n",
+    "        tr.solution_stats()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "88691d44-c2cb-4780-9789-1d47f5dda8ba",
+   "metadata": {},
+   "source": [
+    "### 4. See results\n",
     "\n",
-    "            time +=2\n",
-    "\n"
+    "Results are stored in a `RESULTS` folder."
    ]
   },
   {
diff --git a/tracking/blender/blender_tools.py b/tracking/blender/blender_tools.py
deleted file mode 100644
index c6944e3..0000000
--- a/tracking/blender/blender_tools.py
+++ /dev/null
@@ -1,1420 +0,0 @@
-"""
-Author: Filip Lux (2023), Masaryk University
-Licensed under MIT License
-"""
-
-from grpc import insecure_channel, RpcError
-from copy import copy
-import os
-from tqdm import tqdm
-from skimage import io, measure
-import numpy as np
-import math
-from pathlib import Path
-
-
-from . import buckets_with_graphics_pb2 as buckets_with_graphics_pb2
-from . import buckets_with_graphics_pb2_grpc as  buckets_with_graphics_pb2_grpc
-
-
-# GLOBAL PARAMETERS
-COLOR = {'red':    0xF54731,      # color of objects
-         'green':  0xB4FA41,
-         'blue':   0x47CDFF,
-         'gray':   0xEEEEEE,
-         'yellow': 0xF5BC00,
-         'pink':   0xD6559E,
-         'black':  0x000000}
-
-CONFIG = {
-    'x_scale' : .1,           # scale of the axis x
-    'x_shift' : 500,          # shift of the axis x
-    'y_scale' : .1,           # scale of the axis y
-    'y_shift' : 500,          # shift of the axis y
-    'z_scale' : .2,           # scale of the axis z
-    'z_shift' : 1000          # shift of the axis z
-    }
-
-class TrackingVisualizer:
-    
-    '''
-    tool to send objects from python to Blender
-    developed to visualize cell tracking graphs in 3D 
-    '''
-    
-    def __init__(self,
-                data_path,
-                res_path,
-                time,
-                idx_min=0,
-                idx_max=10000):
-        
-    
-        self.data_path = data_path
-        self.res_path = res_path
-        self.time = time
-        self.idx_min = idx_min
-        self.idx_max = idx_max
-        
-        # TODO: load from config
-        self.sequence = os.path.basename(data_path)[:2]
-        
-        
-    def display_gt(self,
-                   time=None):
-        
-        data_dir = os.path.dirname(self.data_path)
-        
-        # test if gt exists
-        gt_path = os.path.join(data_dir,
-                               self.sequence + '_GT')
-        if not os.path.isdir(gt_path):
-            print(f'The sequence do not have a GT files')
-            print(gt_path)
-            return
-        
-        time = time if time is not None else self.time
-
-        # send gt to blender
-        send_gt(data_dir,
-                self.sequence,
-                time=self.time,
-                first_frame=self.idx_min,
-                last_frame=self.idx_max)
-        
-    def display_candidate_graph(self,
-                                time=None):
-        
-        # test if tracking.txt exists
-        tracking_path = os.path.join(self.res_path,
-                                     'tracking.txt')
-        if not os.path.isfile(tracking_path):
-            print(f'The tracking file {tracking_path} do not exists')
-            return
-        
-        time = time if time is not None else self.time
-
-        # send candidate graph to blender
-        show_libct(tracking_path,
-                   time=time,
-                   min_idx=self.idx_min,
-                   max_idx=self.idx_max)
-    
-    def display_solution(self,
-                         time=None):
-        '''
-        displays tracks choosen by a tracker
-        '''
-        
-        # test if tracking.sol exists
-        sol_path = os.path.join(self.res_path,
-                                     'tracking.sol')
-        if not os.path.isfile(sol_path):
-            print(f'The solution file {sol_path} do not exists')
-            return
-        
-        time = time if time is not None else self.time
-        show_libct_solution(sol_path,
-                            time=time,
-                            min_idx=self.idx_min,
-                            max_idx=self.idx_max)
-    
-    def display_mistakes(self,
-                         time=None):
-        '''
-        displays errors after ctc evaluation 
-        '''
-        
-                # test if tracking.sol exists
-        tra_path = os.path.join(self.res_path,
-                                     'TRA_log.txt')
-        if not os.path.isfile(tra_path):
-            print(f'The tracking log file {tra_path} do not exists')
-            return
-        
-        time = time if time is not None else self.time
-        send_res(self.data_path,
-                 self.res_path,
-                 time=time,
-                 first_frame=self.idx_min,
-                 last_frame=self.idx_max)
-        
-
-def choose_color_and_size(prob):
-
-    '''
-    (-inf, -1) green
-    (-1, 0) green
-    (0, 3) pink
-    (3, inf) red
-
-    '''
-    
-    size = ((prob * 10) // 1) * .1
-
-    size = -prob / 5
-    size = max(size, .05)
-
-    # four colors
-    if prob <= -2:
-        return COLOR['green'], size
-    elif prob <= -1:
-        return COLOR['yellow'],  size
-    elif prob <= 0:
-        return COLOR['pink'], size
-    else:
-        return COLOR['red'], size
-
-def transform_coo(x, y, z):
-    '''
-    transforms image coordinates to blender coordinates
-    '''
-    x = (x - CONFIG['x_shift']) * CONFIG['x_scale']
-    y = (y - CONFIG['y_shift']) * CONFIG['y_scale']
-    z = (z - CONFIG['z_shift']) * CONFIG['z_scale']
-    
-    return x, y, z
-
-
-def label2rgb(label):
-    '''
-    maps an integer label to rgb color
-    '''
-    
-    np.random.seed(label)
-    color = list(np.random.choice(range(256), size=3))
-    color_rgb = color[2] * 256 * 256 + color[1] * 256 + color[0]
-    
-    return color_rgb
-
-
-def get_linear_color(label):
-    
-    # clip to (0, 1)
-    #label = np.minimum(np.maximum(label, 0), 1)
-
-    color = [210, int(255*label), 0]
-    color_rgb = color[0] + color[1] * 256 + color[2] * 256 * 256
-        
-    return color_rgb
-
-
-def get_label(string):
-    
-    string = string.replace(']', '').replace('[', '')
-    
-    time, label = string.strip().split(' ')
-    time = time.replace('T=', '')
-    label = label.replace('Label=', '').replace('GT_label=', '')
-    
-    return int(time), int(label)
-    
-
-
-def read_det_log(txt_path):
-        
-    
-    with open(txt_path, 'r') as f:
-
-        lines = f.readlines()[:-2]
-        sep = []
-        
-        for idx, line in enumerate(lines):
-            if line[0] == '-':
-                sep.append(idx)
-        print(sep)
-        
-        secs = np.split(lines, sep)
-        res = [sec[1:] for sec in secs[1:]]
-        
-        print(len(res))
-        
-        lines_split, lines_fn, lines_fp = res
-        
-        
-        '----------Splitting Operations (Penalty=5)----------'
-        splitting_op = {}
-        for line in lines_split:
-            
-            time, label = get_label(line)
-            
-            splitting_op[time] = splitting_op.get(time, [])
-            splitting_op[time].append(label)
-
-        
-        'GT - list'
-        '----------False Negative Vertices (Penalty=10)----------'
-        fn_vertices = []
-        for line in lines_fn:
-            
-            time, label = get_label(line)
-            fn_vertices.append((time, label))
-            
-        
-        '----------False Positive Vertices (Penalty=1)----------'
-        fp_vertices = {}
-        for line in lines_fp:
-            
-            time, label = get_label(line)
-            
-            fp_vertices[time] = fp_vertices.get(time, [])
-            fp_vertices[time].append(label)
-
-            
-        
-        res = splitting_op, fn_vertices, fp_vertices
-        
-        return res
-    
-    
-def read_tra_log(txt_path):
-        
-    # for each time frame list index of daughters (time t) and their mothers (time t-1)
-    
-    keys = set()
-    t0_init = set()
-    
-    tags = []
-    with open(txt_path, 'r') as f:
-
-        lines = f.readlines()
-        sep = []
-        
-        for idx, line in enumerate(lines):
-            if line[0] == '-':
-                sep.append(idx)
-        print(sep)
-        
-        secs = np.split(lines, sep)
-        res = [sec[1:] for sec in secs[1:]]
-        
-        print(len(res))
-        
-        lines_split, lines_fn, lines_fp, lines_remove, lines_add, lines_sem = res
-        
-        
-        '----------Splitting Operations (Penalty=5)----------'
-        splitting_op = {}
-        for line in lines_split:
-            
-            time, label = get_label(line)
-            
-            splitting_op[time] = splitting_op.get(time, [])
-            splitting_op[time].append(label)
-            
-            keys.add(time)
-        
-        'GT - list'
-        '----------False Negative Vertices (Penalty=10)----------'
-        fn_vertices = []
-        for line in lines_fn:
-            
-            time, label = get_label(line)
-            fn_vertices.append((time, label))
-            
-        
-        '----------False Positive Vertices (Penalty=1)----------'
-        fp_vertices = {}
-        for line in lines_fp:
-            
-            time, label = get_label(line)
-            
-            fp_vertices[time] = fp_vertices.get(time, [])
-            fp_vertices[time].append(label)
-            
-            keys.add(time)
-            
-        
-        '----------Redundant Edges To Be Deleted (Penalty=1)----------'
-        remove_edges = {}
-        for line in lines_remove:
-            
-            start, end = line.strip().split(' -> ')
-            time0, label0 = get_label(start)
-            time1, label1 = get_label(end)
-            
-            if not time0 + 1 == time1:
-                print(f'Redundant edge: {line}')
-                continue
-            
-            remove_edges[time1] = remove_edges.get(time1, [])
-            remove_edges[time1].append((label0, label1))
-            
-            keys.add(time0)
-            keys.add(time1)
-            t0_init.add(time0)
-            
-        'GT - list'
-        '----------Edges To Be Added (Penalty=1.5)----------'
-        add_edges = []
-        skip_edges = []
-        for line in lines_add:
-            
-            start, end = line.strip().split(' -> ')
-            time0, label0 = get_label(start)
-            time1, label1 = get_label(end)
-            
-            #assert time0 + 1 == time1, f'{time0} {time1}, {line}'
-            if not time0 + 1 == time1:
-                skip_edges.append((time0, label0, time1, label1))
-                t0_init.add(time0)
-            else:
-                add_edges.append((time0, label0, time1, label1))
-            
-            
-            
-            
-        '----------Edges with Wrong Semantics (Penalty=1)----------'
-        semantic_edges = {}
-        for line in lines_sem:
-            
-            if line[0] == '=':
-                break
-            
-            start, end = line.strip().split(' -> ')
-            time0, label0 = get_label(start)
-            time1, label1 = get_label(end)
-            
-            if not time0 + 1 == time1:
-                print(f'Wrong semantics: {line}')
-                continue
-                
-            semantic_edges[time1] = semantic_edges.get(time1, [])
-            semantic_edges[time1].append((label0, label1))
-            
-            keys.add(time0)
-            keys.add(time1)
-            t0_init.add(time0)
-            
-        
-        print(lines_sem[-1])
-        
-        res = {}
-        res['splitting_op'] = splitting_op
-        res['fn_vertices'] = fn_vertices
-        res['fp_vertices'] = fp_vertices
-        res['remove_edges'] = remove_edges
-        res['add_edges'] = add_edges
-        res['semantic_edges'] = semantic_edges
-        res['skip_edges'] = skip_edges
-        
-        # depreciated
-        # return as a dictionary
-        #res = (splitting_op, fn_vertices, fp_vertices, remove_edges, add_edges, semantic_edges), 
-        
-        return res, (keys, t0_init)
-    
-    
-
-
-class BlenderViewer():
-    
-    def __init__(self,
-                 client_name,
-                 clientURL="localhost:9085",
-                 serverURL="localhost:9083"):
-        
-        self.clientName = client_name
-        self.clientURL = clientURL
-        self.serverURL = serverURL
-        
-        self.comm = buckets_with_graphics_pb2_grpc.ClientToServerStub( insecure_channel(serverURL) )
-        
-        self.sphere_buckets = {}
-        self.line_buckets = {}
-        
-        # self.__greetings()
-        
-        
-    def __greetings(self):
-        clientGreeting = buckets_with_graphics_pb2.ClientHello()
-        clientGreeting.clientID.clientName = self.clientName
-        clientGreeting.returnURL = self.clientURL
-        self.comm.introduceClient(clientGreeting)
-        
-    def put_sphere(self, coo0, object_id=128, color=None, size=0.4):
-        x0, y0, z0, t0 = coo0
-        
-        if color is None:
-            color = label2rgb(object_id)
-        
-        sphParams = buckets_with_graphics_pb2.SphereParameters()
-        sphParams.centre.x = x0
-        sphParams.centre.y = y0
-        sphParams.centre.z = z0
-        sphParams.radius = size
-        sphParams.colorXRGB = color
-        
-        # create bucket if not exists
-        if object_id not in self.sphere_buckets.keys():
-            
-            bucket = buckets_with_graphics_pb2.BucketOfSpheres()
-            bucket.clientID.clientName = self.clientName
-            bucket.bucketID = object_id
-
-            bucket.time = t0
-            
-            self.sphere_buckets[object_id] = bucket
-            
-        # start bucket
-        self.sphere_buckets[object_id].spheres.append(sphParams)
-                  
-        #self.sphere_buckets[object_id].spheres.append(sphParams)
-
-        
-    def put_vector(self, coo0, coo1, object_id=128, color=None, radius=1):
-        
-        # coordinates
-        x0, y0, z0, t0 = coo0
-        x1, y1, z1, t1 = coo1
-        
-        if color is None:
-            color = label2rgb(object_id)
-                
-        #
-        line = buckets_with_graphics_pb2.LineParameters()
-        line.startPos.x = x0
-        line.startPos.y = y0
-        line.startPos.z = z0
-        line.endPos.x = x1
-        line.endPos.y = y1
-        line.endPos.z = z1
-        line.radius = radius
-        line.colorXRGB = color
-        
-        # create bucket if not exists
-        if object_id not in self.line_buckets.keys():
-        
-        
-            bucket = buckets_with_graphics_pb2.BucketOfLines()
-            bucket.clientID.clientName = self.clientName
-            bucket.bucketID = object_id
-
-            bucket.time = t0
-            
-            self.line_buckets[object_id] = bucket
-            
-        self.line_buckets[object_id].lines.append(line)
-
-        
-    
-    def send_buckets(self):
-        
-        self.__greetings()
-        
-        # send spheres
-        sphere_list = []
-        for key in self.sphere_buckets.keys():
-            sphere_list.append(self.sphere_buckets[key])
-            
-        self.comm.addSpheres(iter(sphere_list))
-        self.sphere_buckets = {}
-        
-        
-        # send lines
-        line_list = []
-        for key in self.line_buckets.keys():
-            line_list.append(self.line_buckets[key])
-            
-        self.comm.addLines(iter(line_list))
-        self.line_buckets = {}
-        
-
-def read_man_gt(path,
-                seq,
-                first_frame=0,
-                last_frame=10000):
-    
-        
-    # gt path
-    gt_path = os.path.join(path, f'{seq}_GT', 'TRA')
-    assert os.path.isdir(gt_path), gt_path
-    
-     # list of available files
-    tif_files = [os.path.join(gt_path, file) for file in os.listdir(gt_path) if '.tif' in file]
-    tif_files.sort()
-
-    gt_coordinates = {}
-    for frame_idx, file_path in enumerate(tqdm(tif_files)):
-        if not (first_frame < frame_idx < last_frame):
-            continue
-        
-        
-        gt = io.imread(file_path)
-        
-        frame_dict = {}
-        
-        regions = measure.regionprops(gt)
-        
-        for reg in regions:
-            frame_dict[reg.label] = reg.centroid
-            
-        gt_coordinates[frame_idx] = frame_dict
-        
-    return gt_coordinates
-
-
-def send_det(path, suffix, seq, time, first_frame=0, last_frame=10000, dz=1, gt_dict=None):
-    
-    
-    name = os.path.basename(path)
-    
-    new_label = 3000
-    blender = BlenderViewer(f'{name}_{seq}_{suffix}')
-
-    
-    # tracking path
-    res_path = os.path.join(f'{path}_{suffix}', f'{seq}_RES')
-    assert os.path.isdir(res_path), res_path
-    
-    # gt path
-    gt_path = os.path.join(f'{path}', f'{seq}_GT', 'TRA')
-    assert os.path.isdir(gt_path), gt_path
-
-
-    # list of available files
-    tif_files = [os.path.join(res_path, file) for file in os.listdir(res_path) if '.tif' in file]
-    tif_files.sort()
-    log_file = os.path.join(res_path, 'DET_log.txt')
-    
-    
-    
-    if not os.path.isfile(log_file):
-        print(f'tra res file {log_file} do not exists. Run DETMeasure first')
-        return
-    
-    res = read_det_log(log_file)
-   
-    # result of DETMeasure
-    splitting_op, fn_vertices, fp_vertices = res
-    
-    
-    if gt_dict is None:
-        gt_dict = read_man_gt(path, seq, first_frame, last_frame)
-        
-    detections = {}
-    
-    # TODO: iterate only over the <keys>
-    for frame_idx in tqdm(range(first_frame, np.minimum(len(tif_files), last_frame))):
-
-        
-        # read gt and res
-        gt = io.imread(tif_files[frame_idx])
-        if gt.dtype != int:
-            gt = gt.astype(int)
-            #print(tif_files[frame_idx])
-        
-        regions = measure.regionprops(gt)
-        
-        # get regions
-        regions_lab = {}
-        for reg in regions:
-            regions_lab[int(reg.label)] = reg
-            
-        
-        # split
-        for label in splitting_op.get(frame_idx, []):
-            
-            reg = regions_lab[label]
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            blender.put_sphere((x1, y1, t1, time),
-                   object_id=1,
-                   color=COLOR['red'])
-            
-        # fp_vertices
-        for label in fp_vertices.get(frame_idx, []):
-                        
-            reg = regions_lab[label]
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            blender.put_sphere((x1, y1, t1, time),
-                   object_id=2,
-                   color=COLOR['yellow'],
-                   size=0.2)
-        
-    #fn_vertices
-    for frame_idx, idx in fn_vertices:
-
-        gt_name = os.path.join(gt_path, f'man_track{frame_idx:04}.tif')
-        assert os.path.isfile(gt_name), gt_name
-
-        gt = io.imread(gt_name)
-
-        x1, y1 = gt_dict[frame_idx][idx]
-        x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-
-        #print('sphere', time1, label1, x1, y1, t1)
-        blender.put_sphere((x1, y1, t1, time),
-               object_id=5,
-               color=COLOR['blue'], size=.2)
-
-       
-    
-
-    
-    blender.send_buckets()
-    
-    
-    
-def init_tracks(tif_files, first_frame, dz=1):
-    
-    tracks = {}
-    
-    frame_idx = first_frame-1
-    
-    if frame_idx >=0 :
-    
-        gt = io.imread(tif_files[frame_idx])
-        regions = measure.regionprops(gt)
-
-        for reg in regions:
-
-            label = reg.label
-            x, y = reg.centroid
-            x, y, t = transform_coo(x, y, frame_idx)
-
-            tracks[label] = x, y, t
-        
-    return tracks
-        
-
-def send_gt(path,
-            seq,
-            time,
-            first_frame=0,
-            last_frame=10000,
-            individual_tracks=False,
-            radius=.5):
-
-    """
-    Displays GT in a Blender
-
-
-    Parameters
-    ----------
-    path : str
-        path to the solution
-    seq : str
-        sequence, e.g. '01', '02', ..
-    time : int
-        Blender timepoint 
-    first_frame : int 
-        first frame index that is send to the Blender
-    last_frame : int 
-        last frame index that is send to the Blender
-    individual_tracks : bool
-        True when asign an unique ID to each track
-    radius : float
-        radius of the spheres
-
-    Returns
-    -------
-
-    None
-
-    """
-
-    first_frame = first_frame + 1
-    
-    
-    new_label = 3000
-    blender = BlenderViewer(f'BF-C2DL-HSC_gt')
-
-    
-    # tracking path
-    img_path = os.path.join(path, seq)
-    assert os.path.isdir(img_path), img_path
-    
-    # gt path
-    gt_path = os.path.join(path, f'{seq}_GT', 'TRA')
-    assert os.path.isdir(gt_path), gt_path
-    
-
-    # list of available files
-    tif_files = [os.path.join(gt_path, file) for file in os.listdir(gt_path) if '.tif' in file]
-    tif_files.sort()
-    txt_file = os.path.join(gt_path, 'man_track.txt')
-    assert os.path.isfile(txt_file), txt_file
-    
-    # analize mother and daughter cells
-    daughters = get_daughters_dict(txt_file)
-    
-    # fixed label
-    object_id = 1
-    
-
-    tracks = init_tracks(tif_files, first_frame, dz=CONFIG['z_scale'])
-    
-    for frame_idx in tqdm(range(first_frame, np.minimum(len(tif_files), last_frame))):
-        
-        # read gt and res
-        gt = io.imread(tif_files[frame_idx])
-        regions = measure.regionprops(gt)
-        
-        for reg in regions:
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            label = reg.label
-            
-            if individual_tracks:
-                object_id = label
-            
-            
-            if label in tracks.keys():
-                
-                # find previous stage
-                x0, y0, t0 = tracks[label]
-                
-                # put gray line
-                blender.put_vector((x0, y0, t0, time),
-                                   (x1, y1, t1, time),
-                                   object_id=object_id,
-                                   color=COLOR['gray'],
-                                   radius=radius)
-                
-                # put sphere
-                blender.put_sphere((x0, y0, t0, time),
-                                   object_id=2,
-                                   color=COLOR['gray'],
-                                   size=.15)
-                
-            
-            
-            else:
-                assert label in daughters[frame_idx].keys(), f'{label} {daughters[frame_idx].keys()}'
-                
-                m_idx = daughters[frame_idx][label]
-                x0, y0, t0 = tracks[m_idx]
-                
-                # put gray line
-                blender.put_vector((x0, y0, t0, time),
-                                   (x1, y1, t1, time),
-                                   object_id=object_id,
-                                   color=COLOR['gray'],
-                                   radius=radius)
-                
-                # put sphere
-                blender.put_sphere((x0, y0, t0, time),
-                                   object_id=object_id,
-                                   color=COLOR['gray'],
-                                   size=.15)
-                
-            # update tracks    
-            tracks[label] = x1, y1, t1
-                
-            
-    blender.send_buckets()
-    
-    
-
-
-
-def send_res(data_path,
-             res_path,
-             time,
-             first_frame=0,
-             last_frame=10000,
-             gt_dict=None):
-
-    """
-    Displays RES file in a Blender
-
-
-    Parameters
-    ----------
-    path : str
-        path to the result
-    time : int
-        timepoint 
-    first_frame : int 
-        first frame index that is send to the Blender
-    last_frame : int 
-        last frame index that is send to the Blender
-    gt_dict : None, dict
-        depreciated
-        dictionary describing the res_tra.txt file
-
-    Returns
-    -------
-
-    None
-
-    """
-    
-    name = os.path.basename(res_path)
-    
-    new_label = 3000
-    blender = BlenderViewer(f'{res_path}')
-
-    # tracking path
-    assert os.path.isdir(res_path), res_path
-
-    # list of available files
-    tif_files = [os.path.join(res_path, file) for file in os.listdir(res_path) if '.tif' in file]
-    tif_files.sort()
-    tra_path = os.path.join(res_path, 'TRA_log.txt')
-    
-    if not os.path.isfile(tra_path):
-        print(f'TRA res file {tra_path} do not exists. Run TRAMeasure procedure first.')
-
-        # TODO: run TRA measure rather than stop the procedure
-        return
-    
-    res, (keys, t0_init) = read_tra_log(tra_path)
-   
-    # result of TRAMeasure
-    splitting_op = res['splitting_op']
-    fn_vertices = res['fn_vertices']
-    fp_vertices = res['fp_vertices']
-    remove_edges = res['remove_edges']
-    add_edges = res['add_edges']
-    semantic_edges = res['semantic_edges']
-    skip_edges = res['skip_edges']
-    
-    if gt_dict is None:
-        path = os.path.dirname(data_path)
-        seq = os.path.basename(data_path)[:2]
-        gt_dict = read_man_gt(path, seq)
-        
-    detections = {}
-    
-    # initialize detections
-    if first_frame > 0:
-        frame_idx = first_frame-1
-        gt = io.imread(tif_files[frame_idx])
-        regions = measure.regionprops(gt)
-        for reg in regions:
-
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-
-            detections[reg.label] = x1, y1, t1
-    
-    # TODO: iterate only over the <keys>
-    for frame_idx in tqdm(range(first_frame, np.minimum(len(tif_files), last_frame))):
-                
-        if frame_idx not in keys:
-            continue
-        
-        # read gt and res
-        gt = io.imread(tif_files[frame_idx])
-        regions = measure.regionprops(gt)
-        
-        # get regions
-        regions_lab = {}
-        for reg in regions:
-            regions_lab[int(reg.label)] = reg
-            
-        
-        # split
-        for label in splitting_op.get(frame_idx, []):
-            
-            reg = regions_lab[label]
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            blender.put_sphere((x1, y1, t1, time),
-                   object_id=1,
-                   color=COLOR['red'])
-            
-
-            
-        # fp_vertices
-        for label in fp_vertices.get(frame_idx, []):
-            
-            reg = regions_lab[label]
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            blender.put_sphere((x1, y1, t1, time),
-                   object_id=2,
-                   color=COLOR['yellow'],
-                   size=0.2)
-            
-        # remove_edges
-        for label0, label1 in remove_edges.get(frame_idx, []):
-            
-            reg = regions_lab[label1]
-            
-            x0, y0, t0 = detections[label0]
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            blender.put_vector((x0, y0, t0, time),
-                               (x1, y1, t1, time),
-                               object_id=3,
-                               color=COLOR['red'])
-            
-
-        
-            
-
-            
-        # semantic_edges
-        for label0, label1 in semantic_edges.get(frame_idx, []):
-            
-            if label0 != label1:
-                # extra split
-                object_id = 4
-                color = COLOR['pink']
-            else:
-                # missing split
-                object_id = 7
-                color=COLOR['green']
-            
-            reg = regions_lab[label1]
-            
-            x0, y0, t0 = detections[label0]
-            
-            x1, y1 = reg.centroid
-            x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-            
-            blender.put_vector((x0, y0, t0, time),
-                               (x1, y1, t1, time),
-                               object_id=object_id,
-                               color=color)
-            
-            blender.put_sphere((x0, y0, t0, time),
-                   object_id=object_id,
-                   color=color,
-                   size=0.2)
-            
-        # initialize previous detections    
-        if frame_idx in t0_init:
-            for reg in regions:
-                
-                x1, y1 = reg.centroid
-                x1, y1, t1 = transform_coo(x1, y1, frame_idx)
-                
-                detections[reg.label] = x1, y1, t1
-                
-    # missing objects
-    
-    # skip_edges
-    print('the number of skip edges', len(skip_edges))
-    for time0, label0, time1, label1 in tqdm(skip_edges):
-        if not (first_frame <= time0) and (time2 < last_frame ):
-            continue
-
-        x0, y0 = gt_dict[time0][label0]
-        x0, y0, t0 = transform_coo(x0, y0, time0)
-
-        x1, y1 = gt_dict[time1][label1]
-        x1, y1, t1 = transform_coo(x1, y1, time1)
-
-        blender.put_vector((x0, y0, t0, time),
-                           (x1, y1, t1, time),
-                           object_id=6,
-                           color=COLOR['green'])
-
-    # fn_vertices
-    print('the size of FN_verticies', len(fn_vertices))
-    for time1, label1 in tqdm(fn_vertices):
-        if not (first_frame <= time1 < last_frame):
-            continue
-
-        x1, y1 = gt_dict[time1][label1]        
-        x1, y1, t1 = transform_coo(x1, y1, time1)
-
-        blender.put_sphere((x1, y1, t1, time),
-               object_id=5,
-               color=COLOR['blue'], size=.2)
-        
-    # add_edges
-    print('the number of mising edges', len(add_edges))
-    for time0, label0, time1, label1 in tqdm(add_edges):
-        if not (first_frame <= time1 < last_frame - 1):
-            continue
-
-        x0, y0 = gt_dict[time0][label0]
-        x0, y0, t0 = transform_coo(x0, y0, time0)
-        
-        x1, y1 = gt_dict[time1][label1]
-        x1, y1, t1 = transform_coo(x1, y1, time1)
-
-        blender.put_vector((x0, y0, t0, time),
-                           (x1, y1, t1, time),
-                           object_id=6,
-                           color=COLOR['blue'])
-        
-        #print('vector', time1, label1, x1, y1, t1)
-       
-    
-
-    
-    blender.send_buckets()
-    
-    
-def get_daughters_dict(txt_file):
-    '''
-    Returns
-    -------
-
-    dict:
-    daughters = {frame_idx:
-                        { daughter_index : mother_index}
-                }
-
-    '''
-
-    
-    daughters = {}
-    with open(txt_file, 'r') as f:
-
-        for line in f.readlines():
-            d_idx, f_idx, _, m_idx = line.strip().split(' ')
-            
-            f_idx = int(f_idx)
-            d_idx = int(d_idx)
-            m_idx = int(m_idx)
-            
-            daughters[f_idx] = daughters.get(f_idx, {})
-            daughters[f_idx][d_idx] = m_idx
-            
-    return daughters
-
-
-def show_libct(file_path,
-               z_shift=0,
-               description='',
-               time=10,
-               object_id=5,
-               min_idx=0,
-               max_idx=2000):
-    '''
-    Displays candidate graph 
-
-    Parameters
-    ----------
-
-    file_path : str
-        path to tracking.txt file, in a "libct" format
-    z_shift : int
-        change of the position in a z axis due to the consitency
-        with a ctlib solver that expects the tracking at a zero level
-    description : str
-        string indentificator of the bledner objects
-    time : int
-        timepoint in a Blender to display objects
-    object_id : int
-        Not used
-    min_idx : int                  NOT IMPLEMENTED
-        index of starting frame  
-    max_idx : int                  NOT IMPLEMENTED
-        index of the first missing frame
-        (displays frames {min_idx, min_idx + 1, ..., max_idx - 1 })
-
-    '''
-    
-    
-    assert os.path.isfile(file_path), file_path
-    if min_idx >= max_idx:
-        print('Nothing to display')
-        return
-    
-    blender = BlenderViewer(f'{file_path}_{description}')
-    
-    e_id = 5
-    e_count = 1
-    
-    vertices = {}
-    
-    print(f'range {min_idx} <= frame_idx <= {max_idx}')
-    
-    with open(file_path, 'r') as f:
-        
-        # show vertices
-        for line in tqdm(f.readlines()):
-            key = line.split(' ')[0]
-            if key == 'H':
-                
-                _, frame_idx, unique_id, cost, x, y = line.split(' ')
-                
-                # test range
-                if not (min_idx <= int(frame_idx) < max_idx):
-                    continue
-                    
-                # transform coordinates
-                x, y, z = transform_coo(float(x), float(y), int(frame_idx) + z_shift)
-                # save vertex
-                vertices[unique_id] = (x, y, z)
-                
-                
-                
-                color, size = choose_color_and_size(float(cost))
-                blender.put_sphere((x, y, z, time),
-                   object_id=1,
-                   color=color,
-                   size=size * .1)
-                
-
-                
-            elif key == 'APP':
-                
-                _, event_id, vertex_id, cost = line.split(' ')
-                if vertex_id not in vertices.keys():
-                    continue
-                
-                x1, y1, frame_idx = vertices[vertex_id]
-                x2, y2 = x1 + 20, y1 + 20
-                
-                # filter out forbidden APP events
-                if float(cost) > 1:
-                    continue
-                
-                color, size = choose_color_and_size(float(cost))
-                blender.put_vector(
-                           (x1, y1, frame_idx, time),
-                           (x2, y2, frame_idx, time),
-                           object_id=2,
-                           color=color,
-                           radius=size)
-                
-                
-            elif key == 'DISAPP':
-                _, event_id, vertex_id, cost = line.split(' ')
-                if vertex_id not in vertices.keys():
-                    continue
-                    
-                x1, y1, z = vertices[vertex_id]
-                x2, y2 = x1 + 20, y1 - 20
-                
-                # filter out forbidden APP events
-                if float(cost) > 1:
-                    continue
-                
-                color, size = choose_color_and_size(float(cost))
-                blender.put_vector(
-                           (x1, y1, z, time),
-                           (x2, y2, z, time),
-                           object_id=3,
-                           color=color,
-                           radius=size)
-                                
-            elif key == 'MOVE':
-                _, event_id, left_id, right_id, cost = line.split(' ')
-                if any([v_id not in vertices.keys() for v_id in [left_id, right_id]]):
-                    continue
-                
-                x1, y1, z1 = vertices[left_id]
-                x2, y2, z2 = vertices[right_id]
-                
-                
-                color, size = choose_color_and_size(float(cost))
-                blender.put_vector(
-                           (x1, y1, z1, time),
-                           (x2, y2, z2, time),
-                           object_id=5,
-                           color=color,
-                           radius=size)
-                
-            elif key == 'DIV':
-                _, event_id, mother_id, left_id, right_id, cost = line.split(' ')
-                if any([v_id not in vertices.keys() for v_id in [mother_id, left_id, right_id]]):
-                    continue
-                    
-                x1, y1, frame_idx1 = vertices[left_id]
-                x2, y2, frame_idx2 = vertices[right_id]
-                
-                prob = math.exp(-float(cost))
-                color, size = choose_color_and_size(float(cost))
-                blender.put_vector(
-                           (x1, y1, frame_idx1, time),
-                           (x2, y2, frame_idx2, time),
-                           object_id=4,
-                           color=0x1122FF,
-                           radius=size)
-                
-                
-            else:
-                assert False, line
-                
-                
-            # limit number of vectors in a bucket
-            e_count += 1
-            if e_count % 50000 == 0:
-                e_id += 1
-                print('sending buckets')
-                blender.send_buckets()
-                #blender = BlenderViewer(f'{file_path}_{description}_{e_id}')
-                
-                
-    blender.send_buckets()
-                
-    return
-
-
-def show_libct_solution(sol_path,
-               z_shift=0,
-               time=10,
-               object_id=5,
-               min_idx=0,
-               max_idx=2000):
-    '''
-    Displays candidate graph 
-
-    Parameters
-    ----------
-
-    file_path : str
-        path to tracking.txt file, in a "libct" format
-    z_shift : int
-        change of the position in a z axis due to the consitency
-        with a ctlib solver that expects the tracking at a zero level
-    time : int
-        timepoint in a Blender to display objects
-    object_id : int
-        Not used
-
-    '''
-    
-    tra_path = str(sol_path).replace('tracking.sol', 'tracking.txt')
-    
-    assert os.path.isfile(tra_path), tra_path
-    assert os.path.isfile(sol_path), sol_path
-    
-    # COLORS 
-    
-    # get vertex map from 'tracking.txt'
-    vertex_map = {} # vertex_id : (x, y, frame_idx)
-    with open(tra_path, 'r') as f:
-        
-        # show vertices
-        for line in tqdm(f.readlines()):
-            key = line.split(' ')[0]
-            if key == 'H':
-                
-                _, frame_idx, unique_id, cost, x, y = line.split(' ')
-                
-                # test range
-                if not (min_idx <= int(frame_idx) < max_idx):
-                    continue
-                
-                x, y, z = transform_coo(float(x), float(y), int(frame_idx) + z_shift)
-                vertex_map[unique_id] = (x, y, z)
-
-    
-    blender = BlenderViewer(f'solution_{sol_path}')
-    e_count = 1
-    
-    with open(sol_path, 'r') as f:
-        
-        # show vertices
-        for line in tqdm(f.readlines()):
-            
-            line = line.rstrip()
-            
-            key, unique_id = line.split(' ')
-            if key == 'H':
-                if unique_id not in vertex_map.keys():
-                    continue
-                
-                x, y, z = vertex_map[unique_id]
-    
-                blender.put_sphere((x, y, z, time),
-                   object_id=1,
-                   color=COLOR['black'], size=.05)
-                
-            elif key == 'APP':
-                
-                vertex_id = '1' + str(unique_id[1:])
-                if vertex_id not in vertex_map.keys():
-                    continue
-                    
-                x, y, z = vertex_map[vertex_id]
-    
-                blender.put_sphere((x, y, z, time),
-                   object_id=2,
-                   color=COLOR['yellow'], size=.2)
-                
-
-            elif key == 'DISAPP':
-
-                vertex_id = '1' + str(unique_id[1:])
-                if vertex_id not in vertex_map.keys():
-                    continue
-                    
-                x, y, z = vertex_map[vertex_id]
-    
-                blender.put_sphere((x, y, z, time),
-                   object_id=3,
-                   color=COLOR['red'], size=.2)
-        
-            elif key == 'MOVE':
-                
-                vertex1_id = str(unique_id)[1:12]
-                vertex2_id = str(unique_id)[12:]
-                if any([v_id not in vertex_map.keys() for v_id in [vertex1_id, vertex2_id]]):
-                    continue
-                
-                
-                assert len(vertex1_id) == 11, vertex1_id
-                assert len(vertex2_id) == 11, vertex2_id
-                
-                x1, y1, z1 = vertex_map[vertex1_id]
-                x2, y2, z2 = vertex_map[vertex2_id]
-                
-                blender.put_vector(
-                           (x1, y1, z1, time),
-                           (x2, y2, z2, time),
-                           object_id=4,
-                           color=COLOR['black'],
-                           radius=.3)
-                
-            elif key == 'DIV':
-                
-                mother_id = str(unique_id[1:12])
-                daughter1_id = str(unique_id[12:23])
-                daughter2_id = str(unique_id[23:34])
-                if any([v_id not in vertex_map.keys() for v_id in [mother_id, daughter1_id, daughter2_id]]):
-                    continue
-
-                x, y, frame_idx = vertex_map[mother_id]
-    
-                blender.put_sphere((x, y, frame_idx, time),
-                   object_id=5,
-                   color=COLOR['blue'], size=.2)
-
-                # show lines to daughters
-
-
-                for d_id in [daughter1_id, daughter2_id]:
-
-                    xd, yd, frame_idx_d = vertex_map[d_id]
-
-                    blender.put_vector(
-                       (x, y, frame_idx, time),
-                       (xd, yd, frame_idx_d, time),
-                       object_id=5,
-                       color=COLOR['blue'],
-                       radius=.3)
-                
-                
-            else:
-                assert False, line
-                
-                
-            # limit number of vectors in a bucket
-            e_count += 1
-            if e_count % 50000 == 0:
-                print('sending buckets')
-                blender.send_buckets()
-                #blender = BlenderViewer(f'solution_{sol_path}')
-                
-                e_count = 1
-                                
-                
-    blender.send_buckets()
-                
-    return
-    
\ No newline at end of file
diff --git a/tracking/blender/buckets_with_graphics_pb2.py b/tracking/blender/buckets_with_graphics_pb2.py
deleted file mode 100644
index 6826b5a..0000000
--- a/tracking/blender/buckets_with_graphics_pb2.py
+++ /dev/null
@@ -1,947 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: buckets_with_graphics.proto
-"""Generated protocol buffer code."""
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='buckets_with_graphics.proto',
-  package='transfers_graphics_protocol',
-  syntax='proto3',
-  serialized_options=b'\n)cz.it4i.ulman.transfers.graphics.protocol',
-  create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x1b\x62uckets_with_graphics.proto\x12\x1btransfers_graphics_protocol\"\x07\n\x05\x45mpty\"*\n\x14\x43lientIdentification\x12\x12\n\nclientName\x18\x01 \x01(\t\"e\n\x0b\x43lientHello\x12\x43\n\x08\x63lientID\x18\x01 \x01(\x0b\x32\x31.transfers_graphics_protocol.ClientIdentification\x12\x11\n\treturnURL\x18\x02 \x01(\t\"\xc5\x01\n\x0f\x42ucketOfSpheres\x12\x43\n\x08\x63lientID\x18\x01 \x01(\x0b\x32\x31.transfers_graphics_protocol.ClientIdentification\x12\x10\n\x08\x62ucketID\x18\x02 \x01(\x04\x12\r\n\x05label\x18\x03 \x01(\t\x12\x0c\n\x04time\x18\x04 \x01(\x04\x12>\n\x07spheres\x18\x05 \x03(\x0b\x32-.transfers_graphics_protocol.SphereParameters\"\xbf\x01\n\rBucketOfLines\x12\x43\n\x08\x63lientID\x18\x01 \x01(\x0b\x32\x31.transfers_graphics_protocol.ClientIdentification\x12\x10\n\x08\x62ucketID\x18\x02 \x01(\x04\x12\r\n\x05label\x18\x03 \x01(\t\x12\x0c\n\x04time\x18\x04 \x01(\x04\x12:\n\x05lines\x18\x05 \x03(\x0b\x32+.transfers_graphics_protocol.LineParameters\"\xc5\x01\n\x0f\x42ucketOfVectors\x12\x43\n\x08\x63lientID\x18\x01 \x01(\x0b\x32\x31.transfers_graphics_protocol.ClientIdentification\x12\x10\n\x08\x62ucketID\x18\x02 \x01(\x04\x12\r\n\x05label\x18\x03 \x01(\t\x12\x0c\n\x04time\x18\x04 \x01(\x04\x12>\n\x07vectors\x18\x05 \x03(\x0b\x32-.transfers_graphics_protocol.VectorParameters\"+\n\x08Vector3D\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\"l\n\x10SphereParameters\x12\x35\n\x06\x63\x65ntre\x18\x01 \x01(\x0b\x32%.transfers_graphics_protocol.Vector3D\x12\x0e\n\x06radius\x18\x03 \x01(\x02\x12\x11\n\tcolorXRGB\x18\x04 \x01(\r\"\xa3\x01\n\x0eLineParameters\x12\x37\n\x08startPos\x18\x01 \x01(\x0b\x32%.transfers_graphics_protocol.Vector3D\x12\x35\n\x06\x65ndPos\x18\x02 \x01(\x0b\x32%.transfers_graphics_protocol.Vector3D\x12\x0e\n\x06radius\x18\x03 \x01(\x02\x12\x11\n\tcolorXRGB\x18\x04 \x01(\r\"\xa5\x01\n\x10VectorParameters\x12\x37\n\x08startPos\x18\x01 \x01(\x0b\x32%.transfers_graphics_protocol.Vector3D\x12\x35\n\x06\x65ndPos\x18\x02 \x01(\x0b\x32%.transfers_graphics_protocol.Vector3D\x12\x0e\n\x06radius\x18\x03 \x01(\x02\x12\x11\n\tcolorXRGB\x18\x04 \x01(\r\"\x1a\n\x0bTextMessage\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\x99\x01\n\x11SignedTextMessage\x12\x43\n\x08\x63lientID\x18\x01 \x01(\x0b\x32\x31.transfers_graphics_protocol.ClientIdentification\x12?\n\rclientMessage\x18\x02 \x01(\x0b\x32(.transfers_graphics_protocol.TextMessage\"\x1c\n\nClickedIDs\x12\x0e\n\x06objIDs\x18\x01 \x03(\x04\"\x9a\x01\n\x10SignedClickedIDs\x12\x43\n\x08\x63lientID\x18\x01 \x01(\x0b\x32\x31.transfers_graphics_protocol.ClientIdentification\x12\x41\n\x10\x63lientClickedIDs\x18\x02 \x01(\x0b\x32\'.transfers_graphics_protocol.ClickedIDs2\x96\x07\n\x0e\x43lientToServer\x12\x61\n\x0fintroduceClient\x12(.transfers_graphics_protocol.ClientHello\x1a\".transfers_graphics_protocol.Empty\"\x00\x12\x62\n\naddSpheres\x12,.transfers_graphics_protocol.BucketOfSpheres\x1a\".transfers_graphics_protocol.Empty\"\x00(\x01\x12^\n\x08\x61\x64\x64Lines\x12*.transfers_graphics_protocol.BucketOfLines\x1a\".transfers_graphics_protocol.Empty\"\x00(\x01\x12\x62\n\naddVectors\x12,.transfers_graphics_protocol.BucketOfVectors\x1a\".transfers_graphics_protocol.Empty\"\x00(\x01\x12\x63\n\x0bshowMessage\x12..transfers_graphics_protocol.SignedTextMessage\x1a\".transfers_graphics_protocol.Empty\"\x00\x12\x61\n\nfocusEvent\x12-.transfers_graphics_protocol.SignedClickedIDs\x1a\".transfers_graphics_protocol.Empty\"\x00\x12g\n\x0cunfocusEvent\x12\x31.transfers_graphics_protocol.ClientIdentification\x1a\".transfers_graphics_protocol.Empty\"\x00\x12\x62\n\x0bselectEvent\x12-.transfers_graphics_protocol.SignedClickedIDs\x1a\".transfers_graphics_protocol.Empty\"\x00\x12\x64\n\runselectEvent\x12-.transfers_graphics_protocol.SignedClickedIDs\x1a\".transfers_graphics_protocol.Empty\"\x00\x32\xe4\x03\n\x0eServerToClient\x12]\n\x0bshowMessage\x12(.transfers_graphics_protocol.TextMessage\x1a\".transfers_graphics_protocol.Empty\"\x00\x12[\n\nfocusEvent\x12\'.transfers_graphics_protocol.ClickedIDs\x1a\".transfers_graphics_protocol.Empty\"\x00\x12X\n\x0cunfocusEvent\x12\".transfers_graphics_protocol.Empty\x1a\".transfers_graphics_protocol.Empty\"\x00\x12\\\n\x0bselectEvent\x12\'.transfers_graphics_protocol.ClickedIDs\x1a\".transfers_graphics_protocol.Empty\"\x00\x12^\n\runselectEvent\x12\'.transfers_graphics_protocol.ClickedIDs\x1a\".transfers_graphics_protocol.Empty\"\x00\x42+\n)cz.it4i.ulman.transfers.graphics.protocolb\x06proto3'
-)
-
-
-
-
-_EMPTY = _descriptor.Descriptor(
-  name='Empty',
-  full_name='transfers_graphics_protocol.Empty',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=60,
-  serialized_end=67,
-)
-
-
-_CLIENTIDENTIFICATION = _descriptor.Descriptor(
-  name='ClientIdentification',
-  full_name='transfers_graphics_protocol.ClientIdentification',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientName', full_name='transfers_graphics_protocol.ClientIdentification.clientName', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=69,
-  serialized_end=111,
-)
-
-
-_CLIENTHELLO = _descriptor.Descriptor(
-  name='ClientHello',
-  full_name='transfers_graphics_protocol.ClientHello',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientID', full_name='transfers_graphics_protocol.ClientHello.clientID', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='returnURL', full_name='transfers_graphics_protocol.ClientHello.returnURL', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=113,
-  serialized_end=214,
-)
-
-
-_BUCKETOFSPHERES = _descriptor.Descriptor(
-  name='BucketOfSpheres',
-  full_name='transfers_graphics_protocol.BucketOfSpheres',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientID', full_name='transfers_graphics_protocol.BucketOfSpheres.clientID', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='bucketID', full_name='transfers_graphics_protocol.BucketOfSpheres.bucketID', index=1,
-      number=2, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='label', full_name='transfers_graphics_protocol.BucketOfSpheres.label', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='time', full_name='transfers_graphics_protocol.BucketOfSpheres.time', index=3,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='spheres', full_name='transfers_graphics_protocol.BucketOfSpheres.spheres', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=217,
-  serialized_end=414,
-)
-
-
-_BUCKETOFLINES = _descriptor.Descriptor(
-  name='BucketOfLines',
-  full_name='transfers_graphics_protocol.BucketOfLines',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientID', full_name='transfers_graphics_protocol.BucketOfLines.clientID', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='bucketID', full_name='transfers_graphics_protocol.BucketOfLines.bucketID', index=1,
-      number=2, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='label', full_name='transfers_graphics_protocol.BucketOfLines.label', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='time', full_name='transfers_graphics_protocol.BucketOfLines.time', index=3,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='lines', full_name='transfers_graphics_protocol.BucketOfLines.lines', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=417,
-  serialized_end=608,
-)
-
-
-_BUCKETOFVECTORS = _descriptor.Descriptor(
-  name='BucketOfVectors',
-  full_name='transfers_graphics_protocol.BucketOfVectors',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientID', full_name='transfers_graphics_protocol.BucketOfVectors.clientID', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='bucketID', full_name='transfers_graphics_protocol.BucketOfVectors.bucketID', index=1,
-      number=2, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='label', full_name='transfers_graphics_protocol.BucketOfVectors.label', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='time', full_name='transfers_graphics_protocol.BucketOfVectors.time', index=3,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='vectors', full_name='transfers_graphics_protocol.BucketOfVectors.vectors', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=611,
-  serialized_end=808,
-)
-
-
-_VECTOR3D = _descriptor.Descriptor(
-  name='Vector3D',
-  full_name='transfers_graphics_protocol.Vector3D',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='x', full_name='transfers_graphics_protocol.Vector3D.x', index=0,
-      number=1, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='y', full_name='transfers_graphics_protocol.Vector3D.y', index=1,
-      number=2, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='z', full_name='transfers_graphics_protocol.Vector3D.z', index=2,
-      number=3, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=810,
-  serialized_end=853,
-)
-
-
-_SPHEREPARAMETERS = _descriptor.Descriptor(
-  name='SphereParameters',
-  full_name='transfers_graphics_protocol.SphereParameters',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='centre', full_name='transfers_graphics_protocol.SphereParameters.centre', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='radius', full_name='transfers_graphics_protocol.SphereParameters.radius', index=1,
-      number=3, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='colorXRGB', full_name='transfers_graphics_protocol.SphereParameters.colorXRGB', index=2,
-      number=4, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=855,
-  serialized_end=963,
-)
-
-
-_LINEPARAMETERS = _descriptor.Descriptor(
-  name='LineParameters',
-  full_name='transfers_graphics_protocol.LineParameters',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='startPos', full_name='transfers_graphics_protocol.LineParameters.startPos', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endPos', full_name='transfers_graphics_protocol.LineParameters.endPos', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='radius', full_name='transfers_graphics_protocol.LineParameters.radius', index=2,
-      number=3, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='colorXRGB', full_name='transfers_graphics_protocol.LineParameters.colorXRGB', index=3,
-      number=4, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=966,
-  serialized_end=1129,
-)
-
-
-_VECTORPARAMETERS = _descriptor.Descriptor(
-  name='VectorParameters',
-  full_name='transfers_graphics_protocol.VectorParameters',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='startPos', full_name='transfers_graphics_protocol.VectorParameters.startPos', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endPos', full_name='transfers_graphics_protocol.VectorParameters.endPos', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='radius', full_name='transfers_graphics_protocol.VectorParameters.radius', index=2,
-      number=3, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='colorXRGB', full_name='transfers_graphics_protocol.VectorParameters.colorXRGB', index=3,
-      number=4, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1132,
-  serialized_end=1297,
-)
-
-
-_TEXTMESSAGE = _descriptor.Descriptor(
-  name='TextMessage',
-  full_name='transfers_graphics_protocol.TextMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='msg', full_name='transfers_graphics_protocol.TextMessage.msg', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1299,
-  serialized_end=1325,
-)
-
-
-_SIGNEDTEXTMESSAGE = _descriptor.Descriptor(
-  name='SignedTextMessage',
-  full_name='transfers_graphics_protocol.SignedTextMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientID', full_name='transfers_graphics_protocol.SignedTextMessage.clientID', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='clientMessage', full_name='transfers_graphics_protocol.SignedTextMessage.clientMessage', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1328,
-  serialized_end=1481,
-)
-
-
-_CLICKEDIDS = _descriptor.Descriptor(
-  name='ClickedIDs',
-  full_name='transfers_graphics_protocol.ClickedIDs',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='objIDs', full_name='transfers_graphics_protocol.ClickedIDs.objIDs', index=0,
-      number=1, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1483,
-  serialized_end=1511,
-)
-
-
-_SIGNEDCLICKEDIDS = _descriptor.Descriptor(
-  name='SignedClickedIDs',
-  full_name='transfers_graphics_protocol.SignedClickedIDs',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='clientID', full_name='transfers_graphics_protocol.SignedClickedIDs.clientID', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='clientClickedIDs', full_name='transfers_graphics_protocol.SignedClickedIDs.clientClickedIDs', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1514,
-  serialized_end=1668,
-)
-
-_CLIENTHELLO.fields_by_name['clientID'].message_type = _CLIENTIDENTIFICATION
-_BUCKETOFSPHERES.fields_by_name['clientID'].message_type = _CLIENTIDENTIFICATION
-_BUCKETOFSPHERES.fields_by_name['spheres'].message_type = _SPHEREPARAMETERS
-_BUCKETOFLINES.fields_by_name['clientID'].message_type = _CLIENTIDENTIFICATION
-_BUCKETOFLINES.fields_by_name['lines'].message_type = _LINEPARAMETERS
-_BUCKETOFVECTORS.fields_by_name['clientID'].message_type = _CLIENTIDENTIFICATION
-_BUCKETOFVECTORS.fields_by_name['vectors'].message_type = _VECTORPARAMETERS
-_SPHEREPARAMETERS.fields_by_name['centre'].message_type = _VECTOR3D
-_LINEPARAMETERS.fields_by_name['startPos'].message_type = _VECTOR3D
-_LINEPARAMETERS.fields_by_name['endPos'].message_type = _VECTOR3D
-_VECTORPARAMETERS.fields_by_name['startPos'].message_type = _VECTOR3D
-_VECTORPARAMETERS.fields_by_name['endPos'].message_type = _VECTOR3D
-_SIGNEDTEXTMESSAGE.fields_by_name['clientID'].message_type = _CLIENTIDENTIFICATION
-_SIGNEDTEXTMESSAGE.fields_by_name['clientMessage'].message_type = _TEXTMESSAGE
-_SIGNEDCLICKEDIDS.fields_by_name['clientID'].message_type = _CLIENTIDENTIFICATION
-_SIGNEDCLICKEDIDS.fields_by_name['clientClickedIDs'].message_type = _CLICKEDIDS
-DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
-DESCRIPTOR.message_types_by_name['ClientIdentification'] = _CLIENTIDENTIFICATION
-DESCRIPTOR.message_types_by_name['ClientHello'] = _CLIENTHELLO
-DESCRIPTOR.message_types_by_name['BucketOfSpheres'] = _BUCKETOFSPHERES
-DESCRIPTOR.message_types_by_name['BucketOfLines'] = _BUCKETOFLINES
-DESCRIPTOR.message_types_by_name['BucketOfVectors'] = _BUCKETOFVECTORS
-DESCRIPTOR.message_types_by_name['Vector3D'] = _VECTOR3D
-DESCRIPTOR.message_types_by_name['SphereParameters'] = _SPHEREPARAMETERS
-DESCRIPTOR.message_types_by_name['LineParameters'] = _LINEPARAMETERS
-DESCRIPTOR.message_types_by_name['VectorParameters'] = _VECTORPARAMETERS
-DESCRIPTOR.message_types_by_name['TextMessage'] = _TEXTMESSAGE
-DESCRIPTOR.message_types_by_name['SignedTextMessage'] = _SIGNEDTEXTMESSAGE
-DESCRIPTOR.message_types_by_name['ClickedIDs'] = _CLICKEDIDS
-DESCRIPTOR.message_types_by_name['SignedClickedIDs'] = _SIGNEDCLICKEDIDS
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
-  'DESCRIPTOR' : _EMPTY,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.Empty)
-  })
-_sym_db.RegisterMessage(Empty)
-
-ClientIdentification = _reflection.GeneratedProtocolMessageType('ClientIdentification', (_message.Message,), {
-  'DESCRIPTOR' : _CLIENTIDENTIFICATION,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.ClientIdentification)
-  })
-_sym_db.RegisterMessage(ClientIdentification)
-
-ClientHello = _reflection.GeneratedProtocolMessageType('ClientHello', (_message.Message,), {
-  'DESCRIPTOR' : _CLIENTHELLO,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.ClientHello)
-  })
-_sym_db.RegisterMessage(ClientHello)
-
-BucketOfSpheres = _reflection.GeneratedProtocolMessageType('BucketOfSpheres', (_message.Message,), {
-  'DESCRIPTOR' : _BUCKETOFSPHERES,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.BucketOfSpheres)
-  })
-_sym_db.RegisterMessage(BucketOfSpheres)
-
-BucketOfLines = _reflection.GeneratedProtocolMessageType('BucketOfLines', (_message.Message,), {
-  'DESCRIPTOR' : _BUCKETOFLINES,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.BucketOfLines)
-  })
-_sym_db.RegisterMessage(BucketOfLines)
-
-BucketOfVectors = _reflection.GeneratedProtocolMessageType('BucketOfVectors', (_message.Message,), {
-  'DESCRIPTOR' : _BUCKETOFVECTORS,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.BucketOfVectors)
-  })
-_sym_db.RegisterMessage(BucketOfVectors)
-
-Vector3D = _reflection.GeneratedProtocolMessageType('Vector3D', (_message.Message,), {
-  'DESCRIPTOR' : _VECTOR3D,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.Vector3D)
-  })
-_sym_db.RegisterMessage(Vector3D)
-
-SphereParameters = _reflection.GeneratedProtocolMessageType('SphereParameters', (_message.Message,), {
-  'DESCRIPTOR' : _SPHEREPARAMETERS,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.SphereParameters)
-  })
-_sym_db.RegisterMessage(SphereParameters)
-
-LineParameters = _reflection.GeneratedProtocolMessageType('LineParameters', (_message.Message,), {
-  'DESCRIPTOR' : _LINEPARAMETERS,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.LineParameters)
-  })
-_sym_db.RegisterMessage(LineParameters)
-
-VectorParameters = _reflection.GeneratedProtocolMessageType('VectorParameters', (_message.Message,), {
-  'DESCRIPTOR' : _VECTORPARAMETERS,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.VectorParameters)
-  })
-_sym_db.RegisterMessage(VectorParameters)
-
-TextMessage = _reflection.GeneratedProtocolMessageType('TextMessage', (_message.Message,), {
-  'DESCRIPTOR' : _TEXTMESSAGE,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.TextMessage)
-  })
-_sym_db.RegisterMessage(TextMessage)
-
-SignedTextMessage = _reflection.GeneratedProtocolMessageType('SignedTextMessage', (_message.Message,), {
-  'DESCRIPTOR' : _SIGNEDTEXTMESSAGE,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.SignedTextMessage)
-  })
-_sym_db.RegisterMessage(SignedTextMessage)
-
-ClickedIDs = _reflection.GeneratedProtocolMessageType('ClickedIDs', (_message.Message,), {
-  'DESCRIPTOR' : _CLICKEDIDS,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.ClickedIDs)
-  })
-_sym_db.RegisterMessage(ClickedIDs)
-
-SignedClickedIDs = _reflection.GeneratedProtocolMessageType('SignedClickedIDs', (_message.Message,), {
-  'DESCRIPTOR' : _SIGNEDCLICKEDIDS,
-  '__module__' : 'buckets_with_graphics_pb2'
-  # @@protoc_insertion_point(class_scope:transfers_graphics_protocol.SignedClickedIDs)
-  })
-_sym_db.RegisterMessage(SignedClickedIDs)
-
-
-DESCRIPTOR._options = None
-
-_CLIENTTOSERVER = _descriptor.ServiceDescriptor(
-  name='ClientToServer',
-  full_name='transfers_graphics_protocol.ClientToServer',
-  file=DESCRIPTOR,
-  index=0,
-  serialized_options=None,
-  create_key=_descriptor._internal_create_key,
-  serialized_start=1671,
-  serialized_end=2589,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='introduceClient',
-    full_name='transfers_graphics_protocol.ClientToServer.introduceClient',
-    index=0,
-    containing_service=None,
-    input_type=_CLIENTHELLO,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='addSpheres',
-    full_name='transfers_graphics_protocol.ClientToServer.addSpheres',
-    index=1,
-    containing_service=None,
-    input_type=_BUCKETOFSPHERES,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='addLines',
-    full_name='transfers_graphics_protocol.ClientToServer.addLines',
-    index=2,
-    containing_service=None,
-    input_type=_BUCKETOFLINES,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='addVectors',
-    full_name='transfers_graphics_protocol.ClientToServer.addVectors',
-    index=3,
-    containing_service=None,
-    input_type=_BUCKETOFVECTORS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='showMessage',
-    full_name='transfers_graphics_protocol.ClientToServer.showMessage',
-    index=4,
-    containing_service=None,
-    input_type=_SIGNEDTEXTMESSAGE,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='focusEvent',
-    full_name='transfers_graphics_protocol.ClientToServer.focusEvent',
-    index=5,
-    containing_service=None,
-    input_type=_SIGNEDCLICKEDIDS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='unfocusEvent',
-    full_name='transfers_graphics_protocol.ClientToServer.unfocusEvent',
-    index=6,
-    containing_service=None,
-    input_type=_CLIENTIDENTIFICATION,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='selectEvent',
-    full_name='transfers_graphics_protocol.ClientToServer.selectEvent',
-    index=7,
-    containing_service=None,
-    input_type=_SIGNEDCLICKEDIDS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='unselectEvent',
-    full_name='transfers_graphics_protocol.ClientToServer.unselectEvent',
-    index=8,
-    containing_service=None,
-    input_type=_SIGNEDCLICKEDIDS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-])
-_sym_db.RegisterServiceDescriptor(_CLIENTTOSERVER)
-
-DESCRIPTOR.services_by_name['ClientToServer'] = _CLIENTTOSERVER
-
-
-_SERVERTOCLIENT = _descriptor.ServiceDescriptor(
-  name='ServerToClient',
-  full_name='transfers_graphics_protocol.ServerToClient',
-  file=DESCRIPTOR,
-  index=1,
-  serialized_options=None,
-  create_key=_descriptor._internal_create_key,
-  serialized_start=2592,
-  serialized_end=3076,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='showMessage',
-    full_name='transfers_graphics_protocol.ServerToClient.showMessage',
-    index=0,
-    containing_service=None,
-    input_type=_TEXTMESSAGE,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='focusEvent',
-    full_name='transfers_graphics_protocol.ServerToClient.focusEvent',
-    index=1,
-    containing_service=None,
-    input_type=_CLICKEDIDS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='unfocusEvent',
-    full_name='transfers_graphics_protocol.ServerToClient.unfocusEvent',
-    index=2,
-    containing_service=None,
-    input_type=_EMPTY,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='selectEvent',
-    full_name='transfers_graphics_protocol.ServerToClient.selectEvent',
-    index=3,
-    containing_service=None,
-    input_type=_CLICKEDIDS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-  _descriptor.MethodDescriptor(
-    name='unselectEvent',
-    full_name='transfers_graphics_protocol.ServerToClient.unselectEvent',
-    index=4,
-    containing_service=None,
-    input_type=_CLICKEDIDS,
-    output_type=_EMPTY,
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-  ),
-])
-_sym_db.RegisterServiceDescriptor(_SERVERTOCLIENT)
-
-DESCRIPTOR.services_by_name['ServerToClient'] = _SERVERTOCLIENT
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tracking/blender/buckets_with_graphics_pb2_grpc.py b/tracking/blender/buckets_with_graphics_pb2_grpc.py
deleted file mode 100644
index 5847a82..0000000
--- a/tracking/blender/buckets_with_graphics_pb2_grpc.py
+++ /dev/null
@@ -1,538 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-"""Client and server classes corresponding to protobuf-defined services."""
-import grpc
-
-from . import buckets_with_graphics_pb2 as buckets__with__graphics__pb2
-
-
-class ClientToServerStub(object):
-    """Missing associated documentation comment in .proto file."""
-
-    def __init__(self, channel):
-        """Constructor.
-
-        Args:
-            channel: A grpc.Channel.
-        """
-        self.introduceClient = channel.unary_unary(
-                '/transfers_graphics_protocol.ClientToServer/introduceClient',
-                request_serializer=buckets__with__graphics__pb2.ClientHello.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.addSpheres = channel.stream_unary(
-                '/transfers_graphics_protocol.ClientToServer/addSpheres',
-                request_serializer=buckets__with__graphics__pb2.BucketOfSpheres.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.addLines = channel.stream_unary(
-                '/transfers_graphics_protocol.ClientToServer/addLines',
-                request_serializer=buckets__with__graphics__pb2.BucketOfLines.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.addVectors = channel.stream_unary(
-                '/transfers_graphics_protocol.ClientToServer/addVectors',
-                request_serializer=buckets__with__graphics__pb2.BucketOfVectors.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.showMessage = channel.unary_unary(
-                '/transfers_graphics_protocol.ClientToServer/showMessage',
-                request_serializer=buckets__with__graphics__pb2.SignedTextMessage.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.focusEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ClientToServer/focusEvent',
-                request_serializer=buckets__with__graphics__pb2.SignedClickedIDs.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.unfocusEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ClientToServer/unfocusEvent',
-                request_serializer=buckets__with__graphics__pb2.ClientIdentification.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.selectEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ClientToServer/selectEvent',
-                request_serializer=buckets__with__graphics__pb2.SignedClickedIDs.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.unselectEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ClientToServer/unselectEvent',
-                request_serializer=buckets__with__graphics__pb2.SignedClickedIDs.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-
-
-class ClientToServerServicer(object):
-    """Missing associated documentation comment in .proto file."""
-
-    def introduceClient(self, request, context):
-        """*
-        client should start communication with this message,
-        in this message the client may (not "must") additionally register
-        a call back URL at which the server will be sending notifications
-
-        if this message is omited, unmatched incoming requests will
-        be placed into "unknown_source" collection
-        """
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def addSpheres(self, request_iterator, context):
-        """*
-        one bucket of one-type-of-graphics is requested to be displayed,
-        this request shall contain a burst/batch of instances that all
-        shall appear in this created bucket
-
-        since the display need not be able to distinguish among instances
-        within a bucket, only an ID of the bucket is transfered and no IDs
-        for the individual instances
-        """
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def addLines(self, request_iterator, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def addVectors(self, request_iterator, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def showMessage(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def focusEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def unfocusEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def selectEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def unselectEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-
-def add_ClientToServerServicer_to_server(servicer, server):
-    rpc_method_handlers = {
-            'introduceClient': grpc.unary_unary_rpc_method_handler(
-                    servicer.introduceClient,
-                    request_deserializer=buckets__with__graphics__pb2.ClientHello.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'addSpheres': grpc.stream_unary_rpc_method_handler(
-                    servicer.addSpheres,
-                    request_deserializer=buckets__with__graphics__pb2.BucketOfSpheres.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'addLines': grpc.stream_unary_rpc_method_handler(
-                    servicer.addLines,
-                    request_deserializer=buckets__with__graphics__pb2.BucketOfLines.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'addVectors': grpc.stream_unary_rpc_method_handler(
-                    servicer.addVectors,
-                    request_deserializer=buckets__with__graphics__pb2.BucketOfVectors.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'showMessage': grpc.unary_unary_rpc_method_handler(
-                    servicer.showMessage,
-                    request_deserializer=buckets__with__graphics__pb2.SignedTextMessage.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'focusEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.focusEvent,
-                    request_deserializer=buckets__with__graphics__pb2.SignedClickedIDs.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'unfocusEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.unfocusEvent,
-                    request_deserializer=buckets__with__graphics__pb2.ClientIdentification.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'selectEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.selectEvent,
-                    request_deserializer=buckets__with__graphics__pb2.SignedClickedIDs.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'unselectEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.unselectEvent,
-                    request_deserializer=buckets__with__graphics__pb2.SignedClickedIDs.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-    }
-    generic_handler = grpc.method_handlers_generic_handler(
-            'transfers_graphics_protocol.ClientToServer', rpc_method_handlers)
-    server.add_generic_rpc_handlers((generic_handler,))
-
-
- # This class is part of an EXPERIMENTAL API.
-class ClientToServer(object):
-    """Missing associated documentation comment in .proto file."""
-
-    @staticmethod
-    def introduceClient(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ClientToServer/introduceClient',
-            buckets__with__graphics__pb2.ClientHello.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def addSpheres(request_iterator,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.stream_unary(request_iterator, target, '/transfers_graphics_protocol.ClientToServer/addSpheres',
-            buckets__with__graphics__pb2.BucketOfSpheres.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def addLines(request_iterator,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.stream_unary(request_iterator, target, '/transfers_graphics_protocol.ClientToServer/addLines',
-            buckets__with__graphics__pb2.BucketOfLines.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def addVectors(request_iterator,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.stream_unary(request_iterator, target, '/transfers_graphics_protocol.ClientToServer/addVectors',
-            buckets__with__graphics__pb2.BucketOfVectors.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def showMessage(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ClientToServer/showMessage',
-            buckets__with__graphics__pb2.SignedTextMessage.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def focusEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ClientToServer/focusEvent',
-            buckets__with__graphics__pb2.SignedClickedIDs.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def unfocusEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ClientToServer/unfocusEvent',
-            buckets__with__graphics__pb2.ClientIdentification.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def selectEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ClientToServer/selectEvent',
-            buckets__with__graphics__pb2.SignedClickedIDs.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def unselectEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ClientToServer/unselectEvent',
-            buckets__with__graphics__pb2.SignedClickedIDs.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-
-class ServerToClientStub(object):
-    """Missing associated documentation comment in .proto file."""
-
-    def __init__(self, channel):
-        """Constructor.
-
-        Args:
-            channel: A grpc.Channel.
-        """
-        self.showMessage = channel.unary_unary(
-                '/transfers_graphics_protocol.ServerToClient/showMessage',
-                request_serializer=buckets__with__graphics__pb2.TextMessage.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.focusEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ServerToClient/focusEvent',
-                request_serializer=buckets__with__graphics__pb2.ClickedIDs.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.unfocusEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ServerToClient/unfocusEvent',
-                request_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.selectEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ServerToClient/selectEvent',
-                request_serializer=buckets__with__graphics__pb2.ClickedIDs.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-        self.unselectEvent = channel.unary_unary(
-                '/transfers_graphics_protocol.ServerToClient/unselectEvent',
-                request_serializer=buckets__with__graphics__pb2.ClickedIDs.SerializeToString,
-                response_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                )
-
-
-class ServerToClientServicer(object):
-    """Missing associated documentation comment in .proto file."""
-
-    def showMessage(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def focusEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def unfocusEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def selectEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-    def unselectEvent(self, request, context):
-        """Missing associated documentation comment in .proto file."""
-        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-        context.set_details('Method not implemented!')
-        raise NotImplementedError('Method not implemented!')
-
-
-def add_ServerToClientServicer_to_server(servicer, server):
-    rpc_method_handlers = {
-            'showMessage': grpc.unary_unary_rpc_method_handler(
-                    servicer.showMessage,
-                    request_deserializer=buckets__with__graphics__pb2.TextMessage.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'focusEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.focusEvent,
-                    request_deserializer=buckets__with__graphics__pb2.ClickedIDs.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'unfocusEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.unfocusEvent,
-                    request_deserializer=buckets__with__graphics__pb2.Empty.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'selectEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.selectEvent,
-                    request_deserializer=buckets__with__graphics__pb2.ClickedIDs.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-            'unselectEvent': grpc.unary_unary_rpc_method_handler(
-                    servicer.unselectEvent,
-                    request_deserializer=buckets__with__graphics__pb2.ClickedIDs.FromString,
-                    response_serializer=buckets__with__graphics__pb2.Empty.SerializeToString,
-            ),
-    }
-    generic_handler = grpc.method_handlers_generic_handler(
-            'transfers_graphics_protocol.ServerToClient', rpc_method_handlers)
-    server.add_generic_rpc_handlers((generic_handler,))
-
-
- # This class is part of an EXPERIMENTAL API.
-class ServerToClient(object):
-    """Missing associated documentation comment in .proto file."""
-
-    @staticmethod
-    def showMessage(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ServerToClient/showMessage',
-            buckets__with__graphics__pb2.TextMessage.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def focusEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ServerToClient/focusEvent',
-            buckets__with__graphics__pb2.ClickedIDs.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def unfocusEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ServerToClient/unfocusEvent',
-            buckets__with__graphics__pb2.Empty.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def selectEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ServerToClient/selectEvent',
-            buckets__with__graphics__pb2.ClickedIDs.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
-
-    @staticmethod
-    def unselectEvent(request,
-            target,
-            options=(),
-            channel_credentials=None,
-            call_credentials=None,
-            insecure=False,
-            compression=None,
-            wait_for_ready=None,
-            timeout=None,
-            metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/transfers_graphics_protocol.ServerToClient/unselectEvent',
-            buckets__with__graphics__pb2.ClickedIDs.SerializeToString,
-            buckets__with__graphics__pb2.Empty.FromString,
-            options, channel_credentials,
-            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/tracking/display_graph.ipynb b/tracking/display_graph.ipynb
deleted file mode 100644
index daf9d2b..0000000
--- a/tracking/display_graph.ipynb
+++ /dev/null
@@ -1,1773 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "3b8d72eb-5aba-41d2-beaf-018a503bef48",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "\n",
-    "import random\n",
-    "import gurobipy as grb\n",
-    "import pandas as pd\n",
-    "\n",
-    "\n",
-    "# analyze tracking\n",
-    "import math\n",
-    "import os\n",
-    "import pandas as pd\n",
-    "\n",
-    "from matplotlib import pyplot as plt\n",
-    "import numpy as np\n",
-    "\n",
-    "from skimage import io, measure\n",
-    "from skimage.measure import label, regionprops\n",
-    "from skimage.morphology import reconstruction\n",
-    "from scipy.stats import norm, gaussian_kde\n",
-    "\n",
-    "\n",
-    "\n",
-    "from tqdm.notebook import tqdm\n",
-    "from my_utils.image import Dataset, get_move_dist, get_safe_range, get_normal_distribution, get_gamma_distribution\n",
-    "from my_utils.split import get_split_dist\n",
-    "\n",
-    "#from my_utils.vanilla_dataset import DataGenerator\n",
-    "import pickle\n",
-    "\n",
-    "from scipy.stats import expon\n",
-    "    \n",
-    "    "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "be24a097-e53d-4041-84a3-1027e41859d8",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "%load_ext autoreload\n",
-    "%autoreload 2\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "1e5979b3-cdde-4aa5-8ed0-c149ff30a8de",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from pathlib import Path\n",
-    "\n",
-    "class FlowGraph():\n",
-    "    \n",
-    "    def __init__(self, path, sequence):\n",
-    "        \n",
-    "\n",
-    "        # unique indexes of source and termination vertices\n",
-    "        self.source_idx = 0  # source\n",
-    "        self.sink_idx = 1    # target\n",
-    "\n",
-    "        # first cell index\n",
-    "        self.vertex_index = 2\n",
-    "        self.edge_index = 0\n",
-    "        \n",
-    "        self.vertices = {}   # vertices[f_idx][label] = v_idx\n",
-    "        \n",
-    "        \n",
-    "        # define res_path\n",
-    "        res_path = Path(path, f'{sequence}_DATA')\n",
-    "        assert res_path.exists()\n",
-    "        \n",
-    "        # get datset of frames \n",
-    "        self.dataset = Dataset(res_path, name_pattern='mask')\n",
-    "\n",
-    "\n",
-    "        '''\n",
-    "        #########\n",
-    "        OUTPUTS\n",
-    "        #########\n",
-    "\n",
-    "        each vertex and edge has its index\n",
-    "        v_idx, e_idx\n",
-    "        using this index you can get vertices that it represents\n",
-    "        '''\n",
-    "        self.edges = {} # edges[e_idx] = (v_idx, v_idx)\n",
-    "\n",
-    "        'vertex_map maps vetex id to location on a particular frame'\n",
-    "        self.vertex_map = {}  # maps v_idx to tuple (frame, x, y)\n",
-    "\n",
-    "        'veiws to sets of edges'\n",
-    "        self.edge_from_me = {self.source_idx: [], self.sink_idx: []}     # edge_from[v_idx] = [e_idx, e_idx, ...]\n",
-    "        self.edge_to_me =   {self.source_idx: [], self.sink_idx: []}     # edge_to[v_idx] = [e_idx, e_idx, ...]\n",
-    "\n",
-    "        '''\n",
-    "        'pd DataFrames to store events'\n",
-    "        EDGE_COLUMNS = ['edge_index', 'f0', 'f1', 'x0', 'y0', 'x1', 'y1']\n",
-    "        df_edges = pd.DataFrame(columns=MOVE_COLUMNS)\n",
-    "        df_vertices = pd.DataFrame(columns=MOVE_COLUMNS)\n",
-    "        '''\n",
-    "\n",
-    "        # edge_df_rows = []\n",
-    "\n",
-    "        self.vertex_prob = {0: 1., 1: 1.}\n",
-    "        self.edge_prob = {}\n",
-    "        \n",
-    "        'list of coos from the frames'\n",
-    "        self.coos = {}\n",
-    "        \n",
-    "    def _read_frame(self, frame_idx):\n",
-    "        \n",
-    "        frame = self.dataset.get_frame(frame_idx)\n",
-    "        \n",
-    "        res = {}\n",
-    "        for reg in measure.regionprops(frame):\n",
-    "            res[reg.label] = reg.centroid\n",
-    "            \n",
-    "        return res\n",
-    "        \n",
-    "        \n",
-    "    def read_coos(self, frame_idx, label):\n",
-    "        \n",
-    "        if frame_idx not in self.coos.keys():\n",
-    "            self.coos[frame_idx] = self._read_frame(frame_idx)\n",
-    "            \n",
-    "        coos = self.coos[frame_idx]\n",
-    "        \n",
-    "        assert label in coos.keys(), coos.keys()\n",
-    "        \n",
-    "        return coos[label]\n",
-    "        \n",
-    "    def add_vertex(self, frame_idx, label, prob):\n",
-    "        \n",
-    "        frame_idx, label = int(frame_idx), int(label)\n",
-    "        \n",
-    "        # get new index\n",
-    "        v_idx = self.vertex_index\n",
-    "        self.vertex_index += 1\n",
-    "        \n",
-    "        # create dict, if necessary\n",
-    "        if frame_idx not in self.vertices.keys():\n",
-    "            self.vertices[frame_idx] = {}\n",
-    "            \n",
-    "        # check\n",
-    "        assert label not in self.vertices[frame_idx].keys()\n",
-    "        \n",
-    "        self.vertices[frame_idx][label] = v_idx\n",
-    "        \n",
-    "        #if prob < .\n",
-    "        \n",
-    "        self.vertex_prob[v_idx] = prob\n",
-    "        \n",
-    "        # frame, label, x, y\n",
-    "        # TODO: add coordinates\n",
-    "        #            - contain dataset\n",
-    "        #            - load each frame only ones\n",
-    "        x, y = self.read_coos(frame_idx, label)\n",
-    "        \n",
-    "        self.vertex_map[v_idx] = (frame_idx, label, x, y)\n",
-    "        \n",
-    "        # prepare edge containers\n",
-    "        self.edge_from_me[v_idx] = []\n",
-    "        self.edge_to_me[v_idx] = []\n",
-    "        \n",
-    "        return v_idx\n",
-    "        \n",
-    "    def add_edge(self, frame1, label1, frame2, label2, prob):\n",
-    "        \n",
-    "        frame1, label1, frame2, label2 = int(frame1), int(label1), int(frame2), int(label2)\n",
-    "        \n",
-    "        v1_idx = self.get_vertex_index(frame1, label1)\n",
-    "        v2_idx = self.get_vertex_index(frame2, label2)\n",
-    "        \n",
-    "        e_idx = self.edge_index\n",
-    "        self.edge_index += 1\n",
-    "        \n",
-    "        self.__add_edge(v1_idx, v2_idx, e_idx, prob)\n",
-    "        \n",
-    "        return e_idx\n",
-    "    \n",
-    "    def __add_edge(self, v1_idx, v2_idx, e_idx, prob):\n",
-    "        \n",
-    "        self.edges[e_idx] = (v1_idx, v2_idx)\n",
-    "        self.edge_prob[e_idx] = prob\n",
-    "        \n",
-    "        self.edge_from_me[v1_idx].append(e_idx)\n",
-    "        self.edge_to_me[v2_idx].append(e_idx)\n",
-    "    \n",
-    "    def add_source_edge(self, frame2, label2):\n",
-    "        \n",
-    "        frame2, label2 = int(frame2), int(label2)\n",
-    "        \n",
-    "        v2_idx = self.get_vertex_index(frame2, label2)\n",
-    "        \n",
-    "        e_idx = self.edge_index\n",
-    "        self.edge_index += 1\n",
-    "        \n",
-    "        self.__add_edge(self.source_idx, v2_idx, e_idx, 1.)\n",
-    "        \n",
-    "        return e_idx\n",
-    "    \n",
-    "    def add_sink_edge(self, frame1, label1):\n",
-    "        frame1, label1 = int(frame1), int(label1)\n",
-    "        \n",
-    "        v1_idx = self.get_vertex_index(frame1, label1)\n",
-    "        \n",
-    "        e_idx = self.edge_index\n",
-    "        self.edge_index += 1\n",
-    "        \n",
-    "        self.__add_edge(v1_idx, self.sink_idx, e_idx, 1.)\n",
-    "        \n",
-    "        return e_idx\n",
-    "    \n",
-    "    def get_vertex_index(self, frame, label):\n",
-    "        \n",
-    "        # appearance\n",
-    "        if label == 0:\n",
-    "            return self.source_idx \n",
-    "        \n",
-    "        assert frame in self.vertices.keys(), f'{frame} {label}'\n",
-    "        assert label in self.vertices[frame].keys(), f'{frame} {label}'\n",
-    "        \n",
-    "        return self.vertices[frame][label]\n",
-    "    \n",
-    "    \n",
-    "    def knn(self, frame_idx, x, y, n=3, limit_dist=100):\n",
-    "        \n",
-    "        # get v_idxs in the frame\n",
-    "        # for each compute distance (limit by x, y square\n",
-    "        # sort by distance\n",
-    "        # take first n\n",
-    "        \n",
-    "        all_dets = self.vertices[frame_idx].values()\n",
-    "        \n",
-    "        cands = {}\n",
-    "        for v_idx in all_dets:\n",
-    "            _, _, x0, y0 = self.vertex_map[v_idx]\n",
-    "            \n",
-    "            if (abs(x0 - x) < limit_dist) and (abs(y0 - y) < limit_dist):\n",
-    "                cands[v_idx] = abs(x0 - x) + abs(y0 - y)\n",
-    "                \n",
-    "        res = sorted(cands.items(), key=lambda x : x[1])[:3]\n",
-    "        return [key for key, _ in res]\n",
-    "                \n",
-    "        \n",
-    "\n",
-    "\n",
-    "        \n",
-    "        "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "feb099ea-511b-4c92-b421-189b2516286f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "dict_keys([1, 5])\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|██████████████████████████████████████| 1763/1763 [00:15<00:00, 114.89it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "sending buckets\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|██████████████████████████████████████████| 2/2 [00:00<00:00, 10058.28it/s]\n",
-      "100%|██████████████████████████████████████████| 1/1 [00:00<00:00, 14074.85it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "dict_keys([1])\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|██████████████████████████████████████| 1763/1763 [00:11<00:00, 151.16it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "sending buckets\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|███████████████████████████████████████████| 2/2 [00:00<00:00, 7256.58it/s]\n",
-      "100%|██████████████████████████████████████████| 1/1 [00:00<00:00, 10180.35it/s]\n"
-     ]
-    },
-    {
-     "ename": "AssertionError",
-     "evalue": "",
-     "output_type": "error",
-     "traceback": [
-      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
-      "\u001b[0;31mAssertionError\u001b[0m                            Traceback (most recent call last)",
-      "Cell \u001b[0;32mIn[4], line 11\u001b[0m\n\u001b[1;32m      8\u001b[0m send_gt(gt_path, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m02\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;241m5\u001b[39m, first_frame\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m, fixed_id\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m      9\u001b[0m send_gt(gt_path, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m01\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;241m0\u001b[39m, first_frame\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m, fixed_id\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m---> 11\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m\n",
-      "\u001b[0;31mAssertionError\u001b[0m: "
-     ]
-    }
-   ],
-   "source": [
-    "# not a part of tracking\n",
-    "\n",
-    "from blender.blenderpy import send_gt\n",
-    "from pathlib import Path\n",
-    "\n",
-    "gt_path = Path('..', 'DATA', 'BF-C2DL-HSC')\n",
-    "\n",
-    "send_gt(gt_path, '02', 5, first_frame=1, fixed_id=True)\n",
-    "send_gt(gt_path, '01', 0, first_frame=1, fixed_id=True)\n",
-    "\n",
-    "assert False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "id": "f67d49e4-12a4-4737-b138-cc82f41a0adf",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The autoreload extension is already loaded. To reload it, use:\n",
-      "  %reload_ext autoreload\n"
-     ]
-    }
-   ],
-   "source": [
-    "%load_ext autoreload\n",
-    "%autoreload 2\n",
-    "\n",
-    "# do the same, but frame by frame\n",
-    "\n",
-    "\n",
-    "def get_graph_et(\n",
-    "        path,\n",
-    "        sequence,\n",
-    "        min_frame=0,\n",
-    "        max_frame=2000,\n",
-    "        virtual_edge_prob=2,\n",
-    "        limit_dist=100,\n",
-    "        n_neighbours=3\n",
-    "    ):\n",
-    "    '''\n",
-    "    creates instance of candidate graph\n",
-    "    \n",
-    "    '''\n",
-    "    \n",
-    "    res_path = path / f'{sequence}_DATA'\n",
-    "    \n",
-    "    # create consistent graph structure\n",
-    "    graph = FlowGraph(path, sequence)\n",
-    "\n",
-    "    #pd_edge_path = os.path.join(res_path, 'edge_prob.csv')\n",
-    "    pd_edge_path = os.path.join(res_path, 'edge_prob_divergence.csv')\n",
-    "    pd_vertex_path = os.path.join(res_path, 'vertex_prob.csv')\n",
-    "    assert os.path.isfile(pd_edge_path), pd_edge_path\n",
-    "    assert os.path.isfile(pd_vertex_path), pd_vertex_path\n",
-    "\n",
-    "    df_edges = pd.read_csv(pd_edge_path, index_col=False)\n",
-    "    df_vertices = pd.read_csv(pd_vertex_path, index_col=False)\n",
-    "    \n",
-    "    # compute cost\n",
-    "    df_edges['cost'] = - np.log(df_edges.divergence)\n",
-    "        \n",
-    "    # list of real frame indexes\n",
-    "    frame_indexes = df_vertices['time'].unique()\n",
-    "    \n",
-    "    # TODO: sort in reverse order\n",
-    "    frame_indexes.sort()\n",
-    "    min_frame = int(np.maximum(frame_indexes.min(), min_frame))\n",
-    "    max_frame = int(np.minimum(frame_indexes.max(), max_frame))\n",
-    "    \n",
-    "    ########## \n",
-    "    # ADD ALL VERTICES\n",
-    "    print('GET GRAPH: adding vertices')\n",
-    "    for i, row in df_vertices.iterrows():\n",
-    "        \n",
-    "        \n",
-    "        prob = row['prob']\n",
-    "        label = row['label']\n",
-    "        frame_idx = row['time']\n",
-    "        \n",
-    "        if not (min_frame <= frame_idx <= max_frame):\n",
-    "            continue\n",
-    "        \n",
-    "        assert label > 0\n",
-    "        \n",
-    "        graph.add_vertex(frame_idx, label, prob)\n",
-    "        \n",
-    "        ########## \n",
-    "        # ADD SOURCE AND SINK EDGES\n",
-    "        if int(frame_idx) == min_frame:\n",
-    "            graph.add_source_edge(frame_idx, label)\n",
-    "            \n",
-    "        elif int(frame_idx) == max_frame:\n",
-    "            graph.add_sink_edge(frame_idx, label)\n",
-    "        \n",
-    "    ###########\n",
-    "    # ADD ALL MIDDLE EDGES\n",
-    "    print('GET GRAPH: adding edges')\n",
-    "    for i, row in df_edges.iterrows():\n",
-    "        \n",
-    "        #prob = row['prob']\n",
-    "        cost = row['cost']\n",
-    "        label_curr = row['label_curr']\n",
-    "        frame_curr = row['time_curr']\n",
-    "        label_prev = row['label_prev']\n",
-    "        frame_prev = row['time_prev']\n",
-    "        \n",
-    "        if (frame_prev < min_frame) or (frame_curr > max_frame):\n",
-    "            continue\n",
-    "        \n",
-    "        # no appearance in the sequence\n",
-    "        graph.add_edge(frame_curr, label_curr, frame_prev, label_prev, cost)\n",
-    "        \n",
-    "        \n",
-    "    ###########\n",
-    "    # ADD VIRTUAL edges\n",
-    "    # for every edge, that has no connection to t+1:\n",
-    "    #     - add adges to three closest edges (up to 'limit_dist')\n",
-    "    #     - the edge prob is 'virtual_edge_prob' (=.25)\n",
-    "    \n",
-    "    print(f'GET GRAPH: entangling to {n_neighbours} neighbours')\n",
-    "    if n_neighbours > 0:\n",
-    "        \n",
-    "\n",
-    "        to_me, from_me = [], []\n",
-    "        for v_idx in graph.vertex_map.keys():\n",
-    "\n",
-    "\n",
-    "            if len(graph.edge_to_me[v_idx]) < n_neighbours:\n",
-    "\n",
-    "                to_me_ = [graph.edges[e_idx][0] for e_idx in graph.edge_to_me[v_idx]]\n",
-    "                #from_me_ = graph.edge_from_me[v_idx]\n",
-    "\n",
-    "                frame_idx, label, x, y = graph.vertex_map[v_idx]\n",
-    "                if  frame_idx == max_frame:\n",
-    "                    continue\n",
-    "\n",
-    "                to_me.append(v_idx)\n",
-    "\n",
-    "                neighbors = graph.knn(frame_idx+1, x, y, n=n_neighbours, limit_dist=limit_dist) # returns list of indexes\n",
-    "\n",
-    "                # add virtual edges\n",
-    "                for n_v_idx in neighbors:\n",
-    "                    \n",
-    "                    # skip already included neighbours\n",
-    "                    if n_v_idx in to_me_:\n",
-    "                        #print(v_idx, to_me_, n_v_idx, neighbors)\n",
-    "                        continue\n",
-    "\n",
-    "                    frame_next, label_next, _, _ = graph.vertex_map[n_v_idx]\n",
-    "                    graph.add_edge(frame_next, label_next, frame_idx, label, virtual_edge_prob)\n",
-    "\n",
-    "                #print(f'{v_idx} to me:{graph.edge_to_me[v_idx]}')\n",
-    "        \n",
-    "    ''' OUTPUT '''\n",
-    "    indexes = graph.vertex_index, graph.edge_index\n",
-    "    probs = graph.edge_prob, graph.vertex_prob\n",
-    "    move_edges = graph.edge_to_me, graph.edge_from_me\n",
-    "\n",
-    "\n",
-    "    return indexes, probs, graph.edges, move_edges, graph.vertex_map"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "id": "5e2f3965-3883-4b5e-8fc3-b872cf3997ce",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "GET GRAPH: adding vertices\n",
-      "GET GRAPH: adding edges\n",
-      "GET GRAPH: entangling to 0 neighbours\n"
-     ]
-    }
-   ],
-   "source": [
-    "subset = 'train'\n",
-    "sequence = '01'\n",
-    "dataset = 'BF-C2DL-HSC'\n",
-    "model = \"adam_norm_onecycle_allcrops_15\"\n",
-    "#model = \"model2\"\n",
-    "\n",
-    "path = Path('..', 'embedtrack_me', 'results10', dataset, model, subset)\n",
-    "assert path.exists(), path\n",
-    "\n",
-    "\n",
-    "res_path = path / f'{sequence}_DATA'\n",
-    "assert res_path.exists(), res_path   \n",
-    "\n",
-    "        \n",
-    "graph = get_graph_et(\n",
-    "        path,\n",
-    "        sequence,\n",
-    "        min_frame=0,\n",
-    "        limit_dist=150,\n",
-    "        n_neighbours=0\n",
-    "    )"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "id": "220a2933-2f22-4199-a9fa-9c78621bd9a4",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "### WHAT IS IT?\n",
-    "\n",
-    "I_KNOW_WHAT_IS_THIS_GOOD_FOR = False\n",
-    "\n",
-    "if I_KNOW_WHAT_IS_THIS_GOOD_FOR:\n",
-    "    res_path = path / f'{sequence}_DATA'\n",
-    "\n",
-    "\n",
-    "    #pd_edge_path = os.path.join(res_path, 'edge_prob.csv')\n",
-    "    pd_edge_path = os.path.join(res_path, 'edge_prob_divergence.csv')\n",
-    "    pd_vertex_path = os.path.join(res_path, 'vertex_prob.csv')\n",
-    "    assert os.path.isfile(pd_edge_path), pd_edge_path\n",
-    "    assert os.path.isfile(pd_vertex_path), pd_vertex_path\n",
-    "\n",
-    "    df_edges = pd.read_csv(pd_edge_path, index_col=False)\n",
-    "    df_vertices = pd.read_csv(pd_vertex_path, index_col=False)\n",
-    "\n",
-    "    edges_list = []\n",
-    "    vertices_list = []\n",
-    "    edges_list2 = []\n",
-    "    vertices_list2 = []\n",
-    "\n",
-    "\n",
-    "    for i in range(100, 200):\n",
-    "        edges = df_edges[df_edges.time_prev == i]\n",
-    "        vertices_curr = df_vertices[df_vertices.time == i].label.values\n",
-    "        vertices_prev = df_vertices[df_vertices.time == i].label.values\n",
-    "\n",
-    "        ev_prev = edges.label_prev.values\n",
-    "        ev_curr = edges.label_curr.values\n",
-    "\n",
-    "        # print(f'\\ntime {i}')\n",
-    "        # print(f'edges \\t\\t{ev_prev} \\t {ev_curr}')\n",
-    "        # print(f'vertices\\t{vertices_prev} \\t {vertices_curr}')\n",
-    "\n",
-    "        edges_list.append(ev_curr.max())\n",
-    "        vertices_list.append(vertices_curr.max())\n",
-    "\n",
-    "        edges_list2.append(ev_prev.max())\n",
-    "        vertices_list2.append(vertices_prev.max())\n",
-    "\n",
-    "\n",
-    "        for v_idx in ev_prev:\n",
-    "            assert v_idx in vertices_prev, f'ev_prev\\n {ev_prev},\\n vertices\\n{vertices_prev} {vertices_curr}\\n{v_idx}\\n{i}'\n",
-    "\n",
-    "        for v_idx in ev_curr:\n",
-    "            assert v_idx in vertices_curr, edges\n",
-    "\n",
-    "\n",
-    "\n",
-    "\n",
-    "        #print('edges\\n', edges)\n",
-    "        #print('vertices\\n', vertices)\n",
-    "\n",
-    "        #break\n",
-    "\n",
-    "    plt.figure()\n",
-    "    plt.plot( np.arange(len(edges_list)), edges_list)\n",
-    "    plt.plot( np.arange(len(vertices_list)), vertices_list)\n",
-    "    plt.legend([])\n",
-    "    plt.show()\n",
-    "\n",
-    "    plt.figure()\n",
-    "    plt.plot( np.arange(len(edges_list2)), edges_list2)\n",
-    "    plt.plot( np.arange(len(vertices_list2)), vertices_list2)\n",
-    "    plt.show()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "id": "f1f06c2b-3990-41ca-b013-589626a5a4f5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "edges:  17715\n",
-      "vertices:  18384\n"
-     ]
-    }
-   ],
-   "source": [
-    "indexes, probs, edges, move_edges, vertex_map = graph\n",
-    "\n",
-    "vertex_index, edge_index = indexes\n",
-    "edge_prob, vertex_prob = probs\n",
-    "edge_to, edge_from = move_edges\n",
-    "\n",
-    "\n",
-    "print('edges: ', len(edges))\n",
-    "print('vertices: ', len(vertex_map))\n",
-    "assert edge_index == len(edge_prob) == len(edges), f'{edge_index}, {len(edge_prob)}, {len(edges)}'\n",
-    "#print(len(vertex_map) , len(conflicts) , vertex_index - 2, len(move_to) -1, len(move_from) -1, len(split_mother), len(split_daughter))\n",
-    "assert len(vertex_map) == vertex_index - 2 == len(edge_to) - 2 == len(edge_from) - 2, f'{len(vertex_map)} == {vertex_index - 2} == {len(edge_to) - 2} == {len(edge_from) - 2}'"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "id": "08b51f7d-359d-48ce-9259-15b4d44a07bc",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "def log_cost(prob):\n",
-    "    \n",
-    "    prob = max(prob, 0.05)\n",
-    "    return - math.log(prob)\n",
-    "\n",
-    "\n",
-    "def lin_cost(prob):\n",
-    "    return - prob\n",
-    "\n",
-    "\n",
-    "def vertex_cost(prob, mean_prob=.95):\n",
-    "    prob = max(prob, .05)**2\n",
-    "    return -math.log(prob/mean_prob)\n",
-    "\n",
-    "def vertex_cost(prob, mean_prob=.95):\n",
-    "    return -math.log(prob)*21\n",
-    "\n",
-    "def edge_cost(prob):\n",
-    "    return 1 - prob\n",
-    "\n",
-    "# KL\n",
-    "def vertex_cost(prob):\n",
-    "    '''\n",
-    "    probs of real vertices is higher than .95\n",
-    "    '''\n",
-    "    return -prob\n",
-    "\n",
-    "def edge_cost(cost):\n",
-    "    '''\n",
-    "    edge cost is log(divergence)\n",
-    "    \n",
-    "    '''\n",
-    "    return cost\n",
-    "\n",
-    "\n",
-    "\n",
-    "DEBUG = False\n",
-    "\n",
-    "def graph2tracking(graph, max_cost=10000):\n",
-    "    \n",
-    "    # decompose graph\n",
-    "    indexes, probs, edges, move_edges, vertex_map = graph\n",
-    "\n",
-    "    vertex_index, edge_index = indexes\n",
-    "    edge_prob, vertex_prob = probs\n",
-    "    edge_to, edge_from = move_edges\n",
-    "\n",
-    "    # variables\n",
-    "    tracking = []\n",
-    "    v_ids = set()\n",
-    "    \n",
-    "\n",
-    "    last_frame_index = np.max([f_idx for f_idx, _, _, _ in vertex_map.values()])\n",
-    "    first_frame_index = np.min([f_idx for f_idx, _, _, _ in vertex_map.values()])\n",
-    "    \n",
-    "    frame_shift = first_frame_index\n",
-    "    \n",
-    "    # vertex hypothesis\n",
-    "    # iterate over vertex_map\n",
-    "    \n",
-    "    for v_idx in tqdm(vertex_map.keys()):\n",
-    "        \n",
-    "        frame_idx, label, x, y = vertex_map[v_idx]\n",
-    "        prob = vertex_prob[v_idx]\n",
-    "        \n",
-    "        '''\n",
-    "        for i, line in tqdm(df_vertex.iterrows(), 'vertices', total=len(df_vertex)):\n",
-    "\n",
-    "            time = int(line[\"time\"])\n",
-    "            label = int(line[\"label\"])\n",
-    "            prob = float(line[\"prob\"])\n",
-    "        '''\n",
-    "        cost = vertex_cost(prob)\n",
-    "        \n",
-    "        \n",
-    "        \n",
-    "        #unique_id = f'V_{time}_{label}' \n",
-    "        unique_id = f'1{frame_idx:05d}{label:05d}' \n",
-    "        \n",
-    "        assert len(unique_id) == 11, unique_id\n",
-    "        \n",
-    "        tracking.append(f'H {frame_idx-frame_shift} {unique_id} {cost} {x} {y}')\n",
-    "        \n",
-    "        # no appearance and diappearance -> cost 10000\n",
-    "        app_cost, disapp_cost = max_cost, max_cost\n",
-    "        if (frame_idx == first_frame_index):\n",
-    "            app_cost = 0.\n",
-    "        if (frame_idx == last_frame_index):\n",
-    "            disapp_cost = 0.\n",
-    "\n",
-    "        # add appearance and disappearance\n",
-    "        unique_id_app = f'4{frame_idx:05d}{label:05d}' \n",
-    "        unique_id_dis = f'5{frame_idx:05d}{label:05d}' \n",
-    "        tracking.append(f'APP {unique_id_app} {unique_id} {app_cost}')\n",
-    "        tracking.append(f'DISAPP {unique_id_dis} {unique_id} {disapp_cost}')\n",
-    "        \n",
-    "        # DEBUG\n",
-    "        v_ids.add(unique_id)\n",
-    "        \n",
-    "\n",
-    "    edges_ = {}\n",
-    "    for e_idx in edges.keys():\n",
-    "        \n",
-    "        v_idx_curr, v_idx_prev  = edges[e_idx]\n",
-    "        \n",
-    "        # appearance\n",
-    "        if v_idx_prev in [0, 1]:\n",
-    "            continue\n",
-    "            \n",
-    "        if v_idx_curr == 0:\n",
-    "            continue\n",
-    "        \n",
-    "        frame_idx_prev, label_prev, _, _ = vertex_map[v_idx_prev]\n",
-    "        frame_idx_curr, label_curr, _, _ = vertex_map[v_idx_curr]\n",
-    "        prob = edge_prob[e_idx]\n",
-    "        cost = edge_cost(prob)\n",
-    "        \n",
-    "\n",
-    "\n",
-    "            \n",
-    "        #print('correct', prob)\n",
-    "\n",
-    "        seg_id_right = f'1{frame_idx_curr:05d}{label_curr:05d}' \n",
-    "        seg_id_left = f'1{frame_idx_prev:05d}{label_prev:05d}' \n",
-    "        unique_id = f'2{seg_id_left}{seg_id_right}' \n",
-    "        \n",
-    "        # update tracking\n",
-    "        tracking.append(f'MOVE {unique_id} {seg_id_left} {seg_id_right} {cost}')\n",
-    "\n",
-    "        # DEBUG\n",
-    "        if DEBUG:\n",
-    "            if seg_id_right not in v_ids:\n",
-    "                print('right', seg_id_right, 'from', seg_id_left)\n",
-    "                #return\n",
-    "                continue\n",
-    "            if seg_id_left not in v_ids:\n",
-    "                print('left', seg_id_left, 'to', seg_id_right)\n",
-    "                continue\n",
-    "\n",
-    "            assert len(unique_id) == 23, unique_id\n",
-    "\n",
-    "\n",
-    "\n",
-    "        edges_[seg_id_left] = edges_.get(seg_id_left, [])\n",
-    "        edges_[seg_id_left].append((seg_id_right, prob))\n",
-    "\n",
-    "            \n",
-    "\n",
-    "    \n",
-    "        \n",
-    "        \n",
-    "    # division\n",
-    "    di = 0\n",
-    "    for id_left in tqdm(edges_.keys(), 'divisions'):\n",
-    "        \n",
-    "        right_ids = edges_[id_left]\n",
-    "                \n",
-    "        for id_right1, prob1 in right_ids:\n",
-    "            \n",
-    "            #prob1 = edge_prob[id_right1]\n",
-    "            \n",
-    "            '''\n",
-    "            if prob1 < 0.05:\n",
-    "                print(f'low prob1 {prob1}')\n",
-    "                continue\n",
-    "            ''' \n",
-    "            \n",
-    "            for id_right2, prob2 in right_ids:\n",
-    "                \n",
-    "                if id_right1 >= id_right2:\n",
-    "                    continue\n",
-    "                    \n",
-    "                #prob2 = edge_prob[id_right2]\n",
-    "                    \n",
-    "                '''\n",
-    "                if prob2 < 0.05:\n",
-    "                    print(f'low prob2 {prob2}')\n",
-    "                    continue\n",
-    "                '''\n",
-    "                \n",
-    "                #div_prob = (prob1 + prob2) / 2\n",
-    "                cost = log_cost(prob1) + log_cost(prob2)\n",
-    "                \n",
-    "                unique_id = f'3{id_left}{id_right1}{id_right2}'\n",
-    "                \n",
-    "                assert len(unique_id) == 34, unique_id\n",
-    "                \n",
-    "                \n",
-    "                \n",
-    "                tracking.append(f'DIV {unique_id} {id_left} {id_right1} {id_right2} {cost}')\n",
-    "                                \n",
-    "                di += 1\n",
-    "                \n",
-    "    print(f'Added {di} division candidates.')\n",
-    "        \n",
-    "\n",
-    "    \n",
-    "    return tracking, edges_\n",
-    "\n",
-    "\n",
-    "from itertools import repeat, chain\n",
-    "\n",
-    "def save_tracking(tracking, data_path):\n",
-    "    \n",
-    "    tracking_path = os.path.join(data_path, 'tracking.txt')\n",
-    "    with open(tracking_path, 'w', encoding='utf8') as f:\n",
-    "        line_end = repeat(\"\\n\")\n",
-    "        lines = chain.from_iterable(zip(tracking, line_end))\n",
-    "        f.writelines(lines)\n",
-    "    "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "id": "b46c6655-c943-47dc-83b7-7752d437cdec",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|█████████████████████████████████| 18384/18384 [00:00<00:00, 220905.26it/s]\n",
-      "divisions: 100%|█████████████████████| 17154/17154 [00:00<00:00, 1310834.62it/s]"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Added 560 division candidates.\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\n"
-     ]
-    }
-   ],
-   "source": [
-    "from tqdm import tqdm\n",
-    "from pathlib import Path\n",
-    "\n",
-    "\n",
-    "tracking, edges_ = graph2tracking(graph)\n",
-    "\n",
-    "save_tracking(tracking, res_path)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "cb2942d6-952b-42c6-b760-4eeb2d267380",
-   "metadata": {},
-   "source": [
-    "## Display libct tracking.txt by blender "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "id": "5a8aba98-aa31-41c6-91d2-92ffbe54b195",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "../embedtrack_me/results10/BF-C2DL-HSC/adam_norm_onecycle_allcrops_15/train/01_DATA/tracking.txt\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|█████████████████████████████████| 73404/73404 [00:00<00:00, 217798.06it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "sending buckets\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "100%|██████████████████████████████████████████| 1/1 [00:00<00:00, 15827.56it/s]\n",
-      "100%|██████████████████████████████████████████| 4/4 [00:00<00:00, 42366.71it/s]\n"
-     ]
-    }
-   ],
-   "source": [
-    "#TODO: add division candidates\n",
-    "\n",
-    "from blender.blenderpy import BlenderViewer\n",
-    "import math\n",
-    "\n",
-    "\n",
-    "def choose_color_and_size(prob):\n",
-    "    \n",
-    "    COL = {'red':0xF54731, 'green':0xB4FA41, 'blue':0x1122FF, 'black':0x000000, 'gray':0xEEEEEE, 'yellow':0xF5BC00, 'pink':0xD6559E}\n",
-    "    \n",
-    "    size = ((prob * 10) // 1) * .1\n",
-    "    size = max(size, .05)\n",
-    "\n",
-    "    \n",
-    "    # three types\n",
-    "    if prob > .95:\n",
-    "        return COL['green'], size\n",
-    "    elif prob > .6:\n",
-    "        return COL['blue'], size\n",
-    "    elif prob > .2:\n",
-    "        return COL['yellow'], size\n",
-    "    else:\n",
-    "        return COL['red'], size\n",
-    "\n",
-    "    \n",
-    "    \n",
-    "def transform_coo(x, y, t, mz=1000, dz=1):\n",
-    "    \n",
-    "    x = (x - 600) / 10\n",
-    "    y = (y - 100) / 10\n",
-    "    \n",
-    "    t = (t - mz) / dz\n",
-    "    \n",
-    "    return x, y, t\n",
-    "\n",
-    "\n",
-    "\n",
-    "def show_libct(file_path,\n",
-    "               sequence,\n",
-    "               suffix='',\n",
-    "               time=10,\n",
-    "               object_id=5,\n",
-    "               lower_limit=1000,\n",
-    "               upper_limit=2000):\n",
-    "    \n",
-    "    assert os.path.isfile(file_path)\n",
-    "    \n",
-    "    \n",
-    "    blender = BlenderViewer(f'{file_path}_{sequence}_{suffix}')\n",
-    "    \n",
-    "    e_id = 5\n",
-    "    e_count= 1\n",
-    "    \n",
-    "    vertices = {}\n",
-    "    \n",
-    "    with open(file_path, 'r') as f:\n",
-    "        \n",
-    "        # show vertices\n",
-    "        for line in tqdm(f.readlines()):\n",
-    "            key = line.split(' ')[0]\n",
-    "            if key == 'H':\n",
-    "                \n",
-    "                _, frame_idx, unique_id, cost, x, y = line.split(' ')\n",
-    "                # transform coordinates\n",
-    "                x, y, frame_idx = transform_coo(float(x), float(y), int(frame_idx))\n",
-    "                \n",
-    "                color, size = choose_color_and_size(-float(cost))\n",
-    "\n",
-    "                blender.put_sphere((x, y, frame_idx, time),\n",
-    "                   object_id=1,\n",
-    "                   color=color, size=size*.2)\n",
-    "                \n",
-    "                # save vertex\n",
-    "                vertices[unique_id] = (x, y, frame_idx)\n",
-    "            elif key == 'APP':\n",
-    "                \n",
-    "                _, event_id, vertex_id, cost = line.split(' ')\n",
-    "                x1, y1, frame_idx = vertices[vertex_id]\n",
-    "                x2, y2 = x1 + 100, y1 + 100\n",
-    "                \n",
-    "                prob = 0.05 if float(cost) > 1 else 0.5\n",
-    "                if prob == 0.05:\n",
-    "                    continue\n",
-    "                color, size = choose_color_and_size(prob)\n",
-    "                blender.put_vector(\n",
-    "                           (x1, y1, frame_idx, time),\n",
-    "                           (x2, y2, frame_idx, time),\n",
-    "                           object_id=2,\n",
-    "                           color=color,\n",
-    "                           radius=size)\n",
-    "                \n",
-    "                x, y, frame_idx = vertices[vertex_id]\n",
-    "                pass\n",
-    "            elif key == 'DISAPP':\n",
-    "                _, event_id, vertex_id, cost = line.split(' ')\n",
-    "                x1, y1, frame_idx = vertices[vertex_id]\n",
-    "                x2, y2 = x1 + 100, y1 - 100\n",
-    "                \n",
-    "                prob = 0.05 if float(cost) > 1 else 0.5\n",
-    "                \n",
-    "                if prob == 0.05:\n",
-    "                    continue\n",
-    "                \n",
-    "                color, size = choose_color_and_size(prob)\n",
-    "                blender.put_vector(\n",
-    "                           (x1, y1, frame_idx, time),\n",
-    "                           (x2, y2, frame_idx, time),\n",
-    "                           object_id=3,\n",
-    "                           color=color,\n",
-    "                           radius=size)\n",
-    "                \n",
-    "                x, y, frame_idx = vertices[vertex_id]\n",
-    "                pass\n",
-    "            elif key == 'MOVE':\n",
-    "                _, event_id, left_id, right_id, cost = line.split(' ')\n",
-    "                \n",
-    "                x1, y1, frame_idx1 = vertices[left_id]\n",
-    "                x2, y2, frame_idx2 = vertices[right_id]\n",
-    "                \n",
-    "                #prob = math.exp(-float(cost))\n",
-    "                prob = 1/(1 + np.exp(-float(cost)))\n",
-    "                \n",
-    "                color, size = choose_color_and_size(prob)\n",
-    "                blender.put_vector(\n",
-    "                           (x1, y1, frame_idx1, time),\n",
-    "                           (x2, y2, frame_idx2, time),\n",
-    "                           object_id=5,\n",
-    "                           color=color,\n",
-    "                           radius=size)\n",
-    "                \n",
-    "\n",
-    "\n",
-    "                \n",
-    "            elif key == 'DIV':\n",
-    "                _, event_id, mother_id, left_id, right_id, cost = line.split(' ')\n",
-    "                \n",
-    "                \n",
-    "                x1, y1, frame_idx1 = vertices[left_id]\n",
-    "                x2, y2, frame_idx2 = vertices[right_id]\n",
-    "                \n",
-    "                prob = math.exp(-float(cost))\n",
-    "                color, size = choose_color_and_size(prob)\n",
-    "                blender.put_vector(\n",
-    "                           (x1, y1, frame_idx1, time),\n",
-    "                           (x2, y2, frame_idx2, time),\n",
-    "                           object_id=4,\n",
-    "                           color=0x1122FF,\n",
-    "                           radius=size)\n",
-    "                \n",
-    "                \n",
-    "            else:\n",
-    "                assert False, line\n",
-    "                \n",
-    "                \n",
-    "            # limit number of vectors in a bucket\n",
-    "            e_count += 1\n",
-    "            if e_count % 50000 == 0:\n",
-    "                e_id += 1\n",
-    "                print('sending buckets')\n",
-    "                blender.send_buckets()\n",
-    "                blender = BlenderViewer(f'{file_path}_{sequence}_{suffix}_{e_id}')\n",
-    "                \n",
-    "                \n",
-    "    blender.send_buckets()\n",
-    "                \n",
-    "    return\n",
-    "    \n",
-    "    \n",
-    "\n",
-    "libct_path = res_path / 'tracking.txt'\n",
-    "\n",
-    "print(libct_path)\n",
-    "    \n",
-    "show_libct(libct_path, sequence, time=6, lower_limit=0, upper_limit=2000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e8165086-11cb-4393-aa37-37061ccaaa47",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "#TODO: call libct from here\n",
-    "\n",
-    "\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "54884287-c1cc-4b38-9a9d-ad6ee4996781",
-   "metadata": {},
-   "source": [
-    "## Display solution"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4966af42-e145-4705-8a37-f4c3a8fb65ec",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "#TODO: add division candidates\n",
-    "\n",
-    "from blender.blenderpy import BlenderViewer\n",
-    "import math\n",
-    "\n",
-    "\n",
-    "def choose_color_and_size(prob):\n",
-    "    \n",
-    "    COL = {'red':0xF54731, 'green':0xB4FA41, 'blue':0x1122FF, 'black':0x000000, 'gray':0xEEEEEE, 'yellow':0xF5BC00, 'pink':0xD6559E}\n",
-    "    \n",
-    "    size = ((prob * 10) // 1) * .1\n",
-    "    size = max(size, 0.1)\n",
-    "\n",
-    "    \n",
-    "    # three types\n",
-    "    if prob > .95:\n",
-    "        return COL['green'], size\n",
-    "    elif prob > .5:\n",
-    "        return COL['yellow'], size\n",
-    "    else:\n",
-    "        return COL['red'], size\n",
-    "    \n",
-    "    \n",
-    "def transform_coo(x, y, t, mz=1000, dz=1):\n",
-    "    \n",
-    "    x = (x - 600) / 10\n",
-    "    y = (y - 100) / 10\n",
-    "    \n",
-    "    t = (t - mz) / dz\n",
-    "    \n",
-    "    return x, y, t\n",
-    "\n",
-    "\n",
-    "from pathlib import Path\n",
-    "\n",
-    "def show_libct_solution(sol_path,\n",
-    "               time=10,\n",
-    "               lower_limit=1000,\n",
-    "               upper_limit=2000):\n",
-    "    \n",
-    "    tra_path = Path(sol_path, 'tracking.txt')\n",
-    "    sol_path = Path(sol_path, 'libct_sol.txt')\n",
-    "    \n",
-    "    assert os.path.isfile(tra_path), tra_path\n",
-    "    assert os.path.isfile(sol_path), sol_path\n",
-    "    \n",
-    "    \n",
-    "    # COLORS\n",
-    "    COL = {'red':0xF54731, 'green':0xB4FA41, 'blue':0x1122FF, 'black':0x000000, 'gray':0xEEEEEE, 'yellow':0xF5BC00, 'pink':0xD6559E}\n",
-    "\n",
-    "    \n",
-    "    \n",
-    "    \n",
-    "    # get vertex map from 'tracking.txt'\n",
-    "    \n",
-    "    vertex_map = {} # vertex_id : (x, y, frame_idx)\n",
-    "    with open(tra_path, 'r') as f:\n",
-    "        \n",
-    "        # show vertices\n",
-    "        for line in tqdm(f.readlines()):\n",
-    "            key = line.split(' ')[0]\n",
-    "            if key == 'H':\n",
-    "                \n",
-    "                _, frame_idx, unique_id, cost, x, y = line.split(' ')\n",
-    "                x, y, frame_idx = transform_coo(float(x), float(y), int(frame_idx))\n",
-    "                vertex_map[unique_id] = (float(x), float(y), int(frame_idx))\n",
-    "\n",
-    "    \n",
-    "    \n",
-    "    blender = BlenderViewer(f'solution_{sol_path}')\n",
-    "    \n",
-    "    \n",
-    "    with open(sol_path, 'r') as f:\n",
-    "        \n",
-    "        # show vertices\n",
-    "        for line in tqdm(f.readlines()):\n",
-    "            \n",
-    "            line = line.rstrip()\n",
-    "            \n",
-    "            key = line.split(' ')[0]\n",
-    "            if key == 'H':\n",
-    "                \n",
-    "                _, unique_id = line.split(' ')\n",
-    "                x, y, frame_idx = vertex_map[unique_id]\n",
-    "    \n",
-    "                blender.put_sphere((x, y, frame_idx, time),\n",
-    "                   object_id=1,\n",
-    "                   color=COL['black'], size=.05)\n",
-    "                \n",
-    "            elif key == 'APP':\n",
-    "                \n",
-    "                _, unique_id = line.split(' ')\n",
-    "                vertex_id = '1' + str(unique_id[1:])\n",
-    "                x, y, frame_idx = vertex_map[vertex_id]\n",
-    "    \n",
-    "                blender.put_sphere((x, y, frame_idx, time),\n",
-    "                   object_id=1,\n",
-    "                   color=COL['yellow'], size=1)\n",
-    "                \n",
-    "\n",
-    "            elif key == 'DISAPP':\n",
-    "                \n",
-    "                _, unique_id = line.split(' ')\n",
-    "                vertex_id = '1' + str(unique_id[1:])\n",
-    "                x, y, frame_idx = vertex_map[vertex_id]\n",
-    "    \n",
-    "                blender.put_sphere((x, y, frame_idx, time),\n",
-    "                   object_id=1,\n",
-    "                   color=COL['red'], size=1)\n",
-    "        \n",
-    "            elif key == 'MOVE':\n",
-    "                \n",
-    "                _, unique_id = line.split(' ')\n",
-    "                vertex1_id = str(unique_id)[1:12]\n",
-    "                vertex2_id = str(unique_id)[12:]\n",
-    "                assert len(vertex1_id) == 11, vertex1_id\n",
-    "                assert len(vertex2_id) == 11, vertex2_id\n",
-    "                \n",
-    "                x1, y1, frame_idx1 = vertex_map[vertex1_id]\n",
-    "                x2, y2, frame_idx2 = vertex_map[vertex2_id]\n",
-    "                \n",
-    "                blender.put_vector(\n",
-    "                           (x1, y1, frame_idx1, time),\n",
-    "                           (x2, y2, frame_idx2, time),\n",
-    "                           object_id=4,\n",
-    "                           color=COL['black'],\n",
-    "                           radius=.3)\n",
-    "                \n",
-    "            elif key == 'DIV':\n",
-    "                \n",
-    "                \n",
-    "                _, unique_id = line.split(' ')\n",
-    "                mother_id = str(unique_id[1:12])\n",
-    "                x, y, frame_idx = vertex_map[mother_id]\n",
-    "    \n",
-    "                blender.put_sphere((x, y, frame_idx, time),\n",
-    "                   object_id=1,\n",
-    "                   color=COL['black'], size=1)\n",
-    "                \n",
-    "                \n",
-    "            else:\n",
-    "                assert False, line\n",
-    "                \n",
-    "                \n",
-    "    blender.send_buckets()\n",
-    "                \n",
-    "    return\n",
-    "    \n",
-    "    \n",
-    "    \n",
-    "show_libct_solution(res_path, time=12)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "ed97fa2a-7a1e-4e6d-a8f0-abde1501af19",
-   "metadata": {},
-   "source": [
-    "## Display FlowGraph by blender "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "552008a2-bee9-49c8-8eef-31d6b368edc0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def get_label(string):\n",
-    "    \n",
-    "    string = string.replace(']', '').replace('[', '')\n",
-    "    \n",
-    "    time, label = string.strip().split(' ')\n",
-    "    time = time.replace('T=', '')\n",
-    "    label = label.replace('Label=', '').replace('GT_label=', '')\n",
-    "    \n",
-    "    return int(time), int(label)\n",
-    "\n",
-    "def read_det_log(txt_path):\n",
-    "        \n",
-    "    \n",
-    "    with open(txt_path, 'r') as f:\n",
-    "\n",
-    "        lines = f.readlines()[:-2]\n",
-    "        sep = []\n",
-    "        \n",
-    "        for idx, line in enumerate(lines):\n",
-    "            if line[0] == '-':\n",
-    "                sep.append(idx)\n",
-    "        print(sep)\n",
-    "        \n",
-    "        secs = np.split(lines, sep)\n",
-    "        res = [sec[1:] for sec in secs[1:]]\n",
-    "        \n",
-    "        print(len(res))\n",
-    "        \n",
-    "        lines_split, lines_fn, lines_fp = res\n",
-    "        \n",
-    "        \n",
-    "        '----------Splitting Operations (Penalty=5)----------'\n",
-    "        splitting_op = {}\n",
-    "        for line in lines_split:\n",
-    "            \n",
-    "            time, label = get_label(line)\n",
-    "            \n",
-    "            splitting_op[time] = splitting_op.get(time, [])\n",
-    "            splitting_op[time].append(label)\n",
-    "\n",
-    "        \n",
-    "        'GT - list'\n",
-    "        '----------False Negative Vertices (Penalty=10)----------'\n",
-    "        fn_vertices = []\n",
-    "        for line in lines_fn:\n",
-    "            \n",
-    "            time, label = get_label(line)\n",
-    "            fn_vertices.append((time, label))\n",
-    "            \n",
-    "        \n",
-    "        '----------False Positive Vertices (Penalty=1)----------'\n",
-    "        fp_vertices = {}\n",
-    "        for line in lines_fp:\n",
-    "            \n",
-    "            time, label = get_label(line)\n",
-    "            \n",
-    "            fp_vertices[time] = fp_vertices.get(time, [])\n",
-    "            fp_vertices[time].append(label)\n",
-    "\n",
-    "            \n",
-    "        \n",
-    "        res = splitting_op, fn_vertices, fp_vertices\n",
-    "        \n",
-    "        return res\n",
-    "    \n",
-    "    \n",
-    "def read_tra_log(txt_path):\n",
-    "        \n",
-    "    # for each time frame list index of daughters (time t) and their mothers (time t-1)\n",
-    "    \n",
-    "    keys = set()\n",
-    "    t0_init = set()\n",
-    "    \n",
-    "    tags = []\n",
-    "    with open(txt_path, 'r') as f:\n",
-    "\n",
-    "        lines = f.readlines()\n",
-    "        sep = []\n",
-    "        \n",
-    "        for idx, line in enumerate(lines):\n",
-    "            if line[0] == '-':\n",
-    "                sep.append(idx)\n",
-    "        print(sep)\n",
-    "        \n",
-    "        secs = np.split(lines, sep)\n",
-    "        res = [sec[1:] for sec in secs[1:]]\n",
-    "        \n",
-    "        print(len(res))\n",
-    "        \n",
-    "        lines_split, lines_fn, lines_fp, lines_remove, lines_add, lines_sem = res\n",
-    "        \n",
-    "        \n",
-    "        '----------Splitting Operations (Penalty=5)----------'\n",
-    "        splitting_op = {}\n",
-    "        for line in lines_split:\n",
-    "            \n",
-    "            time, label = get_label(line)\n",
-    "            \n",
-    "            splitting_op[time] = splitting_op.get(time, [])\n",
-    "            splitting_op[time].append(label)\n",
-    "            \n",
-    "            keys.add(time)\n",
-    "        \n",
-    "        'GT - list'\n",
-    "        '----------False Negative Vertices (Penalty=10)----------'\n",
-    "        fn_vertices = []\n",
-    "        for line in lines_fn:\n",
-    "            \n",
-    "            time, label = get_label(line)\n",
-    "            fn_vertices.append((time, label))\n",
-    "            \n",
-    "        \n",
-    "        '----------False Positive Vertices (Penalty=1)----------'\n",
-    "        fp_vertices = {}\n",
-    "        for line in lines_fp:\n",
-    "            \n",
-    "            time, label = get_label(line)\n",
-    "            \n",
-    "            fp_vertices[time] = fp_vertices.get(time, [])\n",
-    "            fp_vertices[time].append(label)\n",
-    "            \n",
-    "            keys.add(time)\n",
-    "            \n",
-    "        \n",
-    "        '----------Redundant Edges To Be Deleted (Penalty=1)----------'\n",
-    "        remove_edges = {}\n",
-    "        for line in lines_remove:\n",
-    "            \n",
-    "            start, end = line.strip().split(' -> ')\n",
-    "            time0, label0 = get_label(start)\n",
-    "            time1, label1 = get_label(end)\n",
-    "            \n",
-    "            if not time0 + 1 == time1:\n",
-    "                print(f'Redundant edge: {line}')\n",
-    "                continue\n",
-    "            \n",
-    "            remove_edges[time1] = remove_edges.get(time1, [])\n",
-    "            remove_edges[time1].append((label0, label1))\n",
-    "            \n",
-    "            keys.add(time0)\n",
-    "            keys.add(time1)\n",
-    "            t0_init.add(time0)\n",
-    "            \n",
-    "        'GT - list'\n",
-    "        '----------Edges To Be Added (Penalty=1.5)----------'\n",
-    "        add_edges = []\n",
-    "        for line in lines_add:\n",
-    "            \n",
-    "            start, end = line.strip().split(' -> ')\n",
-    "            time0, label0 = get_label(start)\n",
-    "            time1, label1 = get_label(end)\n",
-    "            \n",
-    "            #assert time0 + 1 == time1, f'{time0} {time1}, {line}'\n",
-    "            if not time0 + 1 == time1:\n",
-    "                print(f'To be added: {line}')\n",
-    "                continue\n",
-    "            \n",
-    "            add_edges.append((time0, label0, time1, label1))\n",
-    "            \n",
-    "            t0_init.add(time0)\n",
-    "            \n",
-    "            \n",
-    "        '----------Edges with Wrong Semantics (Penalty=1)----------'\n",
-    "        semantic_edges = {}\n",
-    "        for line in lines_sem:\n",
-    "            \n",
-    "            if line[0] == '=':\n",
-    "                break\n",
-    "            \n",
-    "            start, end = line.strip().split(' -> ')\n",
-    "            time0, label0 = get_label(start)\n",
-    "            time1, label1 = get_label(end)\n",
-    "            \n",
-    "            if not time0 + 1 == time1:\n",
-    "                print(f'Wrong semantics: {line}')\n",
-    "                continue\n",
-    "                \n",
-    "            semantic_edges[time1] = semantic_edges.get(time1, [])\n",
-    "            semantic_edges[time1].append((label0, label1))\n",
-    "            \n",
-    "            keys.add(time0)\n",
-    "            keys.add(time1)\n",
-    "            t0_init.add(time0)\n",
-    "            \n",
-    "        \n",
-    "        print(lines_sem[-1])\n",
-    "        \n",
-    "        res = (splitting_op, fn_vertices, fp_vertices, remove_edges, add_edges, semantic_edges), (keys, t0_init)\n",
-    "        \n",
-    "        return res\n",
-    "    "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1352a29d-00cf-4a73-a36c-f481e82f1314",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "80e07a87-2c75-4653-9781-571d9d06e99d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from blender.blenderpy import BlenderViewer\n",
-    "\n",
-    "\n",
-    "def choose_color_and_size(prob):\n",
-    "    \n",
-    "    COL = {'red':0xF54731, 'green':0xB4FA41, 'blue':0x1122FF, 'gray':0xEEEEEE, 'yellow':0xF5BC00, 'pink':0xD6559E}\n",
-    "    \n",
-    "    size = ((prob * 10) // 1) * .1\n",
-    "    size = max(size, 0.1)\n",
-    "\n",
-    "    \n",
-    "    # three types\n",
-    "    if prob > .95:\n",
-    "        return COL['green'], size\n",
-    "    elif prob > .5:\n",
-    "        return COL['yellow'], size\n",
-    "    else:\n",
-    "        return COL['red'], size\n",
-    "    \n",
-    "    \n",
-    "def transform_coo(x, y, t, mz=1000, dz=1):\n",
-    "    \n",
-    "    x = (x - 600) / 10\n",
-    "    y = (y - 100) / 10\n",
-    "    \n",
-    "    t = (t - mz) / dz\n",
-    "    \n",
-    "    return x, y, t\n",
-    "\n",
-    "    \n",
-    "\n",
-    "\n",
-    "\n",
-    "\n",
-    "def show_graph(graph, subset, seq, suffix='', time=10, object_id=5, lower_limit=1000, upper_limit=2000):\n",
-    "    \n",
-    "    # unpack graph\n",
-    "    indexes, probs, edges, move_edges, vertex_map = graph\n",
-    "\n",
-    "    vertex_index, edge_index = indexes\n",
-    "    edge_prob, vertex_prob = probs\n",
-    "    edge_to, edge_from = move_edges\n",
-    "    \n",
-    "    \n",
-    "    blender = BlenderViewer(f'{subset}_{seq}_{suffix}')\n",
-    "    \n",
-    "    # display vertices\n",
-    "    for v_idx in tqdm(vertex_map.keys()):\n",
-    "        \n",
-    "        frame_idx, label, x, y = vertex_map[v_idx]\n",
-    "        \n",
-    "        if (lower_limit > frame_idx) or (upper_limit < frame_idx):\n",
-    "            continue\n",
-    "        \n",
-    "        assert label > 0, vertex_map[v_idx]\n",
-    "        \n",
-    "        prob = vertex_prob[v_idx]\n",
-    "        \n",
-    "        color, size = choose_color_and_size(prob)\n",
-    "        \n",
-    "                # transform coordinates\n",
-    "        x, y, frame_idx = transform_coo(x, y, frame_idx)\n",
-    "    \n",
-    "        blender.put_sphere((x, y, frame_idx, time),\n",
-    "           object_id=object_id,\n",
-    "           color=color, size=size*.2)\n",
-    "        \n",
-    "        \n",
-    "    # display edges\n",
-    "    for e_idx in tqdm(edges.keys()):\n",
-    "        v1_idx, v2_idx = edges[e_idx]\n",
-    "        \n",
-    "        # TODO: validate\n",
-    "        if v1_idx in [0, 1]:\n",
-    "            frame_idx2, _, x2, y2 = vertex_map[v2_idx]\n",
-    "            frame_idx1, x1, y1 = frame_idx1, x2 + 1000, y2 + 1000\n",
-    "            print(frame_idx1, x1, y1)\n",
-    "            \n",
-    "            \n",
-    "        elif v2_idx in [0, 1]:\n",
-    "            frame_idx1, _, x1, y1 = vertex_map[v1_idx]\n",
-    "            frame_idx2, x2, y2 = frame_idx1, x1 + 100, y1 + 100\n",
-    "\n",
-    "        else:\n",
-    "            frame_idx1, _, x1, y1 = vertex_map[v1_idx]\n",
-    "            frame_idx2, _, x2, y2 = vertex_map[v2_idx]\n",
-    "        \n",
-    "        # filter out coordinates\n",
-    "        if (lower_limit > frame_idx1) or (upper_limit < frame_idx1):\n",
-    "            continue\n",
-    "\n",
-    "        # transform coordinates\n",
-    "        x1, y1, frame_idx1 = transform_coo(x1, y1, frame_idx1)\n",
-    "        x2, y2, frame_idx2 = transform_coo(x2, y2, frame_idx2)\n",
-    "        \n",
-    "        # send vectors\n",
-    "    \n",
-    "        prob = edge_prob[e_idx]\n",
-    "        color, size = choose_color_and_size(prob)\n",
-    "        \n",
-    "        blender.put_vector(\n",
-    "                   (x1, y1, frame_idx1, time),\n",
-    "                   (x2, y2, frame_idx2, time),\n",
-    "        \n",
-    "                   object_id=object_id,\n",
-    "                   color=color,\n",
-    "                   radius=size)\n",
-    "        \n",
-    "    \n",
-    "        \n",
-    "    blender.send_buckets()\n",
-    "    \n",
-    "    \n",
-    "show_graph(graph, subset, sequence, time=int(sequence), lower_limit=1400, upper_limit=2000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "21d29630-9ee7-494b-ac7a-66923e2dd69d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "V_IDX = 392\n",
-    "\n",
-    "print('index', V_IDX, vertex_map[V_IDX])\n",
-    "print('EDGES from: ', [(edges[e_idx], edge_prob[e_idx]) for e_idx in edge_to[V_IDX]])\n",
-    "print('EDGES to: ', [(edges[e_idx], edge_prob[e_idx]) for e_idx in edge_from[V_IDX]])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "0e6ce41e-dff5-4c4b-a0f0-77abe4ede091",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "edge_from[590]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "315ad478-3bde-452e-969f-4934ce240e58",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "assert False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4eab414f-6b5f-46cd-9a24-8443e9d2e7a1",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from blender.blenderpy import send_res\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "79df24fe-9689-4520-9fbe-2a6655205914",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from pathlib import Path\n",
-    "\n",
-    "seq = '02'\n",
-    "res_path = Path('datasets_local', 'BF-C2DL-HSC')\n",
-    "send_res(res_path, seq, 5, suffix='')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "64e0faa3-cf05-4daf-9ce0-8fb5fb365abf",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/tracking/divergence_tools.py b/tracking/divergence_tools.py
index 9ee23a0..523c98a 100644
--- a/tracking/divergence_tools.py
+++ b/tracking/divergence_tools.py
@@ -8,13 +8,15 @@ import numpy as np
 import pandas as pd
 from tqdm import tqdm
 
-def compute_divergence(path, plot=False):
+import tifffile as tiff
+
+from skimage import io
+import os
     
-    '''
-    offset_path = path / 'wavefunc_offset.csv'
-    seg_path = path / 'wavefunc_seg.csv'
-    vertex_prob_path =  path / 'vertex_prob.csv'
-    '''
+def compute_divergence(path,
+                       plot=False,          # TO REMOVE
+                       divergence_thr=50,
+                      ):
     
     offset_path = path / 'tra_offsets.csv'
     seg_path = path / 'seg_offsets.csv'
@@ -25,6 +27,12 @@ def compute_divergence(path, plot=False):
     assert seg_path.exists()
     assert vertex_prob_path.exists(), vertex_prob_path
     
+    divergence_path = path / "edge_prob_divergence.csv"
+    
+    if divergence_path.exists():
+        print(divergence_path, 'already exists')
+        return 
+    
 
     df_off = pd.read_csv(offset_path)
     df_seg = pd.read_csv(seg_path)
@@ -35,58 +43,60 @@ def compute_divergence(path, plot=False):
     times = np.unique(df_off.time)
     times[::-1].sort()
 
-    for time_offset in tqdm(times):
+    for time_curr in tqdm(times):
 
         # correction of the offset time
-        time = time_offset - 1
+        time_prev = time_curr - 1
 
         # get slices of time 'time'
-        seg_prev = df_seg[df_seg.time == time - 1]
-        off_curr = df_off[df_off.time == time_offset]
-
-        seg_curr = df_seg[df_seg.time == time]
-
+        seg_prev = df_seg[df_seg.time == time_prev]
+        off_curr = df_off[df_off.time == time_curr]
+        
+        # validation of the input file
+        # TODO: remove
+        seg_curr = df_seg[df_seg.time == time_curr]
         assert len(off_curr) == len(seg_curr)
 
-        if plot:
-            plot_offsets(seg, off)
+        samples += compute_kl_multivariate(seg_prev,
+                                           off_curr,
+                                           time_curr,
+                                           divergence_thr=divergence_thr)
 
-        samples += compute_kl_multivariate(seg_prev, off_curr, time)
-
-
-    edge_prob_df = pd.DataFrame(samples, columns=['time_curr', 'time_prev', 'label_curr', 'label_prev', 'divergence'])
+    edge_prob_df = pd.DataFrame(samples, columns=['time_curr',
+                                                  'time_prev',
+                                                  'label_curr',
+                                                  'label_prev',
+                                                  'divergence'])
     edge_prob_df['cost'] = np.log(edge_prob_df.divergence)
 
-    # store edge and vertex probs as .csv files
-    divergence_path = path / "edge_prob_divergence.csv"
-    
+    # store edge and vertex probs as .csv files   
     print(f'storing {divergence_path}')
     edge_prob_df.to_csv(
         divergence_path,
         index=False,
     )
     
-def compute_divergence_v2(path, plot=False):
+    
+def compute_distance(path, distance_thr=100):
     
     offset_path = path / 'tra_offsets.csv'
     seg_path = path / 'seg_offsets.csv'
-    vertex_prob_path =  path / 'vertex_prob.csv'
-
+    
+    print('offset_path', offset_path)
+    print('seg_path', seg_path)
 
     assert offset_path.exists(), offset_path
-    assert seg_path.exists()
-    assert vertex_prob_path.exists(), vertex_prob_path
+    assert seg_path.exists(), seg_path
     
-    divergence_path = path / "edge_prob_divergence.csv"
+    distance_path = path / "edge_prob_distance.csv"
     
-    if divergence_path.exists():
-        print(divergence_path, 'already exists')
+    if distance_path.exists():
+        print(distance_path, 'already exists')
         return 
     
 
     df_off = pd.read_csv(offset_path)
     df_seg = pd.read_csv(seg_path)
-    df_vprob = pd.read_csv(vertex_prob_path)
 
     samples = []
 
@@ -101,28 +111,279 @@ def compute_divergence_v2(path, plot=False):
         # get slices of time 'time'
         seg_prev = df_seg[df_seg.time == time_prev]
         off_curr = df_off[df_off.time == time_curr]
-
-        seg_curr = df_seg[df_seg.time == time_curr]
         
+        # validation of the input file
+        # TODO: remove
+        seg_curr = df_seg[df_seg.time == time_curr]
         assert len(off_curr) == len(seg_curr)
 
-        if plot:
-            plot_offsets(seg, off)
+        samples += compute_distance_samples(seg_prev,
+                                           off_curr,
+                                           time_curr,
+                                           distance_thr=distance_thr)
 
-        samples += compute_kl_multivariate(seg_prev, off_curr, time_curr)
+    edge_prob_df = pd.DataFrame(samples, columns=['time_curr',
+                                                  'time_prev',
+                                                  'label_curr',
+                                                  'label_prev',
+                                                  'distance'])
 
+    # store edge and vertex probs as .csv files   
+    print(f'storing {distance_path}')
+    edge_prob_df.to_csv(
+        distance_path,
+        index=False,
+    )
+    
+    
+def compute_distance2(path, distance_thr=100):
+    
+    offset_path = path / 'tra_offsets.csv'
+    seg_path = path / 'seg_offsets.csv'
 
-    edge_prob_df = pd.DataFrame(samples, columns=['time_curr', 'time_prev', 'label_curr', 'label_prev', 'divergence'])
-    edge_prob_df['cost'] = np.log(edge_prob_df.divergence)
+    assert offset_path.exists(), offset_path
+    assert seg_path.exists(), seg_path
+    
+    distance_path = path / "edge_prob_distance2.csv"
+    
+    if distance_path.exists():
+        print(distance_path, 'already exists')
+        return 
+    
+
+    df_off = pd.read_csv(offset_path)
+    df_seg = pd.read_csv(seg_path)
+
+    samples = []
+
+    times = np.unique(df_off.time)
+    times[::-1].sort()
+
+    for time_curr in tqdm(times):
+
+        # correction of the offset time
+        time_prev = time_curr - 1
+
+        # get slices of time 'time'
+        seg_prev = df_seg[df_seg.time == time_prev]
+        off_curr = df_off[df_off.time == time_curr]
+        
+        # read labeled images
+        li_curr = io.imread(path / f'mask{time_curr:04d}.tif', dtype=np.uint16)
+        li_prev = io.imread(path / f'mask{time_prev:04d}.tif', dtype=np.uint16)
+
+        samples += compute_distance2_samples(seg_prev,
+                                           off_curr,
+                                           time_curr,
+                                           distance_thr=distance_thr,
+                                           li_curr=li_curr,
+                                           li_prev=li_prev)
+
+    edge_prob_df = pd.DataFrame(samples, columns=['time_curr',
+                                                  'time_prev',
+                                                  'label_curr',
+                                                  'label_prev',
+                                                  'distance',
+                                                  'distance_markers'])
 
     # store edge and vertex probs as .csv files   
-    print(f'storing {divergence_path}')
+    print(f'storing {distance_path}')
     edge_prob_df.to_csv(
-        divergence_path,
+        distance_path,
         index=False,
     )
     
     
+def analyze_csv(data_path, csv_name, gt_path):
+    
+    '''
+    csv contains the following columns: label_prev, label_curr, time_prev, time_curr
+    
+    the function adds columns:
+        class :  {VALID, NONOVALID, DIVISION}
+        v_prev_prob : float (0, 1)
+        v_curr_prob : float (0, 1)
+        
+    params
+    res_path : string
+        path to the directory with results: seg_offset
+    gt_path : string
+        
+    '''
+    
+    
+
+    assert os.path.isdir(gt_path), gt_path
+    assert os.path.isdir(data_path), data_path
+
+    
+    # get additional informations about the vertices
+    csv_path = os.path.join(data_path, csv_name)
+    seg_path = os.path.join(data_path, 'seg_offsets.csv')
+    prob_path = os.path.join(data_path, 'vertex_prob.csv')
+    
+    assert os.path.isfile(csv_path), csv_path
+    assert os.path.isfile(seg_path), seg_path
+    assert os.path.isfile(prob_path), prob_path
+    
+    # get resources
+    seg_df = pd.read_csv(seg_path, index_col=['time', 'label'])
+    prob_df = pd.read_csv(prob_path, index_col=['time', 'label'])
+    df = pd.read_csv(csv_path)
+    mothers = read_mothers(os.path.join(gt_path, 'TRA', 'man_track.txt'))
+    print('mothers', mothers)
+    
+    # false negatives - at least one detetion is missing
+    # columns = (time_curr, time_prev, label_gt_curr, label_gt_prev)
+    false_negatives = []
+    
+    # gt_edges - all gt edges, where both detections exist in res
+    # columns = (time_curr, time_prev, label_gt_curr, label_gt_prev, tag)
+    gt_edges = []
+    
+    print('np.unique(df.time_curr)', np.unique(df.time_curr))
+    
+    # for every time_curr
+    for time_curr in tqdm(np.unique(df.time_curr)):
+        
+        view = df[df.time_curr == time_curr]
+        time_prev = time_curr - 1
+        
+        # read gt
+        gt_curr_path = os.path.join(gt_path, 'TRA', f'man_track{time_curr:04d}.tif')
+        gt_prev_path = os.path.join(gt_path, 'TRA', f'man_track{time_prev:04d}.tif')
+        
+        
+        
+        gt_curr = tiff.imread(gt_curr_path)
+        gt_prev = tiff.imread(gt_prev_path)
+        
+        # read res
+        res_curr_path = os.path.join(data_path, f'mask{time_curr:04d}.tif')
+        res_prev_path = os.path.join(data_path, f'mask{time_prev:04d}.tif')
+        
+        res_curr = tiff.imread(res_curr_path)
+        res_prev = tiff.imread(res_prev_path)
+        
+        labels_gt_prev = np.unique(gt_prev)
+        
+        '''
+        
+        # mapping = { gt_label : res_label }
+        mapping_curr = {label_gt : get_result_label(res_curr, gt_curr == label_gt )
+                        for label_gt in np.unique(gt_curr) if label_gt != 0}
+        mapping_prev = {label_gt : get_result_label(res_prev, gt_prev == label_gt )
+                        for label_gt in np.unique(gt_prev) if label_gt != 0}
+        
+        
+        
+        '''
+        if (time_curr % 100) == 0:
+            print(time_curr, 'np.unique(gt_curr)', np.unique(gt_curr), gt_curr.dtype)
+        
+        #iterate over all indexes in a gt_curr
+        for label_gt_curr in np.unique(gt_curr):
+            
+            if label_gt_curr == 0:
+                continue
+                
+            # 1. MOVE
+            if label_gt_curr in labels_gt_prev:
+                label_gt_prev = label_gt_curr
+                tag = 'MOVE'
+            else:
+                label_gt_prev = mothers[label_gt_curr]
+                tag = 'DIVISION'
+                
+            mask_gt_curr = gt_curr == label_gt_curr
+            mask_gt_prev = gt_prev == label_gt_prev
+
+            label_curr = get_result_label(res_curr, mask_gt_curr) 
+            label_prev = get_result_label(res_prev, mask_gt_prev) 
+            
+            
+            if (time_curr % 100) == 0:
+                print(time_curr, 'label_gt_curr', label_gt_curr, label_curr, label_prev)
+            
+            # missing detections
+            if (label_curr == 0) or (label_prev == 0):
+                
+                # report in false negatives
+                # TODO: debug
+                # reports wierd values
+                false_negatives.append((time_curr, time_prev, label_gt_curr, label_gt_prev))
+                
+            else:
+                
+                # gt_edges
+                gt_edges.append((time_curr, time_prev, label_curr, label_prev, tag))
+            
+            
+    # merge gt_edges with df and save
+    # TODO: add tags
+    df_gt_edges = pd.DataFrame(gt_edges, columns = ('time_curr', 'time_prev', 'label_curr', 'label_prev', 'tag'))
+    
+    df_gt_edges.set_index(['time_curr', 'time_prev', 'label_curr', 'label_prev'], inplace=True)
+    df.set_index(['time_curr', 'time_prev', 'label_curr', 'label_prev'], inplace=True)
+    
+    result = df.join(df_gt_edges, how='outer')
+    result['tag'] = result['tag'].replace('NaN','NONVALID')
+    
+    analysis_path = csv_path[:-4] + '_analysis.csv'
+    print(f'storing {analysis_path}')
+    result.to_csv(
+        analysis_path,
+    )
+    
+    # save false_negatives
+    df_fn = pd.DataFrame(false_negatives, columns = ('time_curr', 'time_prev', 'label_gt_curr', 'label_gt_prev'))
+    
+    fn_path = os.path.join(data_path, 'false_negatives.csv')
+    print(f'storing {fn_path}')
+    df_fn.to_csv(
+        fn_path,
+    )
+            
+
+            
+            
+def get_result_label(res, gt_mask):
+    '''
+    reads label that covers at least 50 % of the mask
+    '''
+    size = gt_mask.sum()
+    labels, counts = np.unique(res * gt_mask, return_counts=True)
+    
+    #print(counts, labels, size)
+
+    
+    for count, label in zip(counts, labels):
+        if (label != 0) and (count >= (size//2)):
+            return label
+        
+    return 0
+            
+        
+        
+        
+def read_mothers(man_track_path):
+    '''
+    mother = {daughter_idx : mother_idx}
+    '''
+    
+    os.path.isfile(man_track_path), man_track_path
+    
+    mothers = {}
+    
+    with open(man_track_path, 'r') as f:
+        
+        for line in f.readlines():
+            daughter_idx, _, _, mother_idx = line.strip().split(' ')
+            mothers[int(daughter_idx)] = int(mother_idx)
+            
+    return mothers
+    
+    
 def kl_divergence(mu1, mu2, sigma1, sigma2):
     return np.log1p(sigma2/sigma1) + (sigma1**2 + (mu1 - mu2) ** 2) / (2 * sigma2 ** 2) - .5
 
@@ -145,7 +406,7 @@ def print_kl(seg, off):
     return pd.DataFrame(samples, columns=['label from', 'label to', 'divergence'])
 
 
-def compute_kl_multivariate(seg, off, time, divergence_thr=50, distance_thr=100):
+def compute_kl_multivariate(seg, off, time, divergence_thr=50):
     
     samples = []
     for i, line in enumerate(seg.values):
@@ -164,16 +425,126 @@ def compute_kl_multivariate(seg, off, time, divergence_thr=50, distance_thr=100)
             S1 = np.array([[c00, c01], [c10, c11]])
             m1 = np.array([mx, my])
             
-            if euklidian_distance(m0, m1) > distance_thr:
-                continue
+            div = kl_mvn(m0, S0, m1, S1)
+            
+            if div < divergence_thr:
+                samples.append([time, time-1, int(label_off_curr), int(label_seg_prev), div])
+            
+    return samples
+
+
+def compute_distance_samples(seg, off, time, distance_thr=100):
+    
+    samples = []
+    for i, line in enumerate(seg.values):
+        
+        # time prev
+        _, label_seg_prev, mx, my, sx, sy, c00, c01, c10, c11 = line
+        
+        S0 = np.array([[c00, c01], [c10, c11]])
+        m0 = np.array([mx, my])
+        
+        for j, line in enumerate(off.values):
+            
+            # time curr
+            _, label_off_curr, mx, my, sx, sy, c00, c01, c10, c11 = line
+            
+            S1 = np.array([[c00, c01], [c10, c11]])
+            m1 = np.array([mx, my])
+            
+            dist = euklidian_distance(m0, m1)
+            
+            if dist < distance_thr:
+                samples.append([time, time-1, int(label_off_curr), int(label_seg_prev), dist])
+            
+    return samples
+
+
+def compute_distance2_samples(seg, off, time, distance_thr=100, li_curr=None, li_prev=None):
+    
+    assert li_curr is not None
+    assert li_prev is not None
+    
+    off_coo_dict = {}
+    for j, line in enumerate(off.values):
+
+        # time curr
+        _, label_off_curr, mx, my, sx, sy, c00, c01, c10, c11 = line
+        mask_curr = (li_curr == label_off_curr)
+        coo_curr = np.array([coo.mean() for coo in mask_curr.nonzero()])
+        
+        S1 = np.array([[c00, c01], [c10, c11]])
+        m1 = np.array([mx, my])
+        
+        off_coo_dict[label_off_curr] = coo_curr, S1, m1
+    
+    samples = []
+    for i, line in enumerate(seg.values):
+        
+        # time prev
+        _, label_seg_prev, mx, my, sx, sy, c00, c01, c10, c11 = line
+        
+        mask_prev = (li_prev == label_seg_prev)
+        coo_prev = np.array([coo.mean() for coo in mask_prev.nonzero()])
+
+        
+        S0 = np.array([[c00, c01], [c10, c11]])
+        m0 = np.array([mx, my])
+        
+        
+        for label_off_curr in off_coo_dict.keys():
+            coo_curr, S1, m1 = off_coo_dict[label_off_curr]
+
+            dist = euklidian_distance(m0, m1)
+            dist_masks = euklidian_distance(coo_prev, coo_curr)
+            
+            if dist < distance_thr:
+                samples.append([time, time-1, int(label_off_curr), int(label_seg_prev), dist, dist_masks])
+            
+    return samples
+
+
+
+def compute_distance2_samples_fast(seg, off, time, distance_thr=100, li_curr=None, li_prev=None):
+    
+    assert li_curr is not None
+    assert li_prev is not None
+    
+    
+    labels_curr = np.unique(li_curr)
+    labels_prev = np.unique(li_prev)
+    
+    samples = []
+    
+    
+    dict_coo_prev = {}
+    for l_prev in labels_prev:
+        if l_prev == 0:
+            continue
+
+        mask_prev = (li_prev == l_prev)
+        coo_prev = np.array([coo.mean() for coo in mask_prev.nonzero()])
+        
+        dict_coo_prev[l_prev] = coo_prev
+        
+        
+
+    for l_curr in labels_curr:
+        
+        if l_curr == 0:
+            continue
+            
+        mask_curr = (li_curr == l_curr)
+        coo_curr = np.array([coo.mean() for coo in mask_curr.nonzero()])
+        
+        for l_prev in dict_coo_prev.keys():
+            coo_prev = dict_coo_prev[l_prev]
             
-            kl_divergence = kl_mvn(m0, S0, m1, S1)
+            dist_masks = euklidian_distance(coo_prev, coo_curr)
             
-            if kl_divergence < divergence_thr:
-                # columns=['time_curr', 'time_prev', 'label_curr', 'label_prev', 'divergence']
-                samples.append([time, time-1, int(label_off_curr), int(label_seg_prev), kl_divergence])
+            if dist_masks < distance_thr:
+                samples.append([time, time-1, int(l_curr), int(l_prev), 0, dist_masks])
             
-    #return pd.DataFrame(samples, columns=['time_curr', 'time_prev', 'label_curr', 'label_prev', 'divergence'])
     return samples
 
 
diff --git a/tracking/embedtrack.py b/tracking/embedtrack.py
index 76dc8ea..fbfe560 100644
--- a/tracking/embedtrack.py
+++ b/tracking/embedtrack.py
@@ -2,8 +2,8 @@ from .sys_tools import run_procedure
 
 
 # TODO: communicate with an embedtrack  a config file
-def run_embedtrack(data_path, seq, model_path):
+def run_embedtrack(data_path, seq, model_path, batch_size=1):
 
-    prompt = f'python3 EmbedTrack/embedtrack.py --sequence {seq} --data_path {data_path} --model_path {model_path}'
+    prompt = f'python3 EmbedTrack/embedtrack.py --sequence {seq} --data_path {data_path} --model_path {model_path} --batch_size {batch_size}'
     run_procedure(prompt)
 
diff --git a/tracking/global_tracker.py b/tracking/global_tracker.py
index 292fec1..0f2a80a 100644
--- a/tracking/global_tracker.py
+++ b/tracking/global_tracker.py
@@ -9,18 +9,22 @@ import math
 import pandas as pd
 import numpy as np
 from tqdm import tqdm
+import math
 
 
 from pathlib import Path
 from .sys_tools import run_procedure
 from .libct import run_libct, libct2ctc
 from .graph import FlowGraph
-from .divergence_tools import compute_divergence_v2
+from .divergence_tools import compute_divergence, compute_distance, compute_distance2
 
 
 from itertools import repeat, chain
 from matplotlib import pyplot as plt
 
+
+
+
 class GlobalTracker():
     
     '''
@@ -31,12 +35,16 @@ class GlobalTracker():
     def __init__(self,
                  data_path,
                  res_path,
-                 lbd=.5,
                  mean_dist=10,
                  max_dist=20,
                  vertex_thr=0.95,
                  app_cost=10000,
-                 edge_min_cost=-1):
+                 edge_min_cost=-1,
+                 normalize=False,
+                 n_edges=3,
+                 min_frame=0,
+                 max_frame=2000
+                 ):
         '''
         Parameters:
         -----------
@@ -66,61 +74,81 @@ class GlobalTracker():
                              /...
         '''
         
-        sequence = os.path.basename(res_path)[:2]
-        assert sequence == os.path.basename(data_path)[:2]
+        sequence = os.path.basename(data_path)[:2]
+        #assert sequence == os.path.basename(res_path)[:2]
             
         self.res_path = res_path
         self.data_path = data_path
         assert os.path.isdir(data_path), data_path
-        assert os.path.isdir(res_path), res_path
+        #assert os.path.isdir(res_path), res_path
         
         self.graph = None
         self.sequence = sequence
-        self.lbd=lbd
         self.mean_dist = mean_dist
         self.max_dist = max_dist
         self.vertex_thr = vertex_thr
         self.app_cost = app_cost
         self.edge_min_cost = edge_min_cost
 
+        
+        self.n_edges = n_edges
+        self.normalize = normalize
+        
+        
+        self.min_frame = min_frame
+        self.max_frame = max_frame
+        
+        self.graph = self.create_graph()
+        
+        
+        
+        
+    def create_graph(self):
+        # create graph
+        # TODO: limit to min_frame/max_frame
+        print('creating graph')
+        return get_graph_bk(
+                        self.data_path,
+                        min_frame=self.min_frame,
+                        max_frame=self.max_frame,
+                        n_edges=self.n_edges)
+        
+
     def run_tracking(self,
                      idx_min=0,
                      idx_max=10000,
-                     limit_dist=100,
-                     n_neighbours=2):
-
-        # compute divergence
-        div_path = os.path.join(self.data_path, 'edge_prob_divergence.csv')
-        if not os.path.isfile(div_path):
-            print('computing divergence')
-            compute_divergence_v2(self.data_path)
+                     limit_dist=30,
+                     ME=-0.5,
+                     MV=-0.5,
+                     MULTV=2,
+                     zero_exp=300
+                     ):
+        
+        # compute distances
+        div_path = os.path.join(self.data_path, 'edge_prob_distance2.csv')
             
         assert os.path.isfile(div_path)
-
-        # create graph
-        print('creating graph')
-        self.graph = get_graph_embedtrack(
-                        self.data_path,
-                        min_frame=idx_min,
-                        max_frame=idx_max,
-                        limit_dist=limit_dist,
-                        n_neighbours=n_neighbours,
-                        mean_dist=self.mean_dist,
-                        max_dist=self.max_dist)
+        
 
         # create and save tracking file
         print('creating tracking file')
-        tracking = graph2tracking(self.graph,
-                                  lbd=self.lbd,
-                                  vertex_thr=self.vertex_thr,
-                                  max_cost=self.app_cost,
-                                  mean_dist=self.mean_dist,
-                                  max_dist=self.max_dist,
-                                  edge_min_cost=self.edge_min_cost)
+        tracking = graph2tracking_bk(self.graph,
+                                     lbd1=1,
+                                     lbd2=1,    
+                                     lbd3=15,     # max move distance
+                                     thT=1,       # appearance / disappearance in the first / last thT frames
+                                     thS=0,       # not used
+                                     limit_dist=limit_dist,
+                                     ME=ME,
+                                     MV=MV,
+                                     MULTV=MULTV,
+                                     zero_exp=zero_exp
+                                     )
+        
         save_tracking(tracking, self.res_path)
 
         # compute solution
-        print('computiong solution')
+        print('computing solution')
         run_libct(self.res_path) 
         
     
@@ -145,7 +173,8 @@ class GlobalTracker():
         sol_stats(sol_file_path, tracking_file_path, plot=plot)
         
         
-    def evaluate_ctc(self):
+    def evaluate_ctc(self,
+                     measures=('TRAMeasure', 'DETMeasure', 'SEGMeasure')):
         '''
         computes CTC metrics to evaluate the results
         '''
@@ -175,7 +204,7 @@ class GlobalTracker():
         run_procedure(f'ln -s {src_path} {temp_res_path}')
         
         # run evaluation procedures
-        measures = ['TRAMeasure', 'DETMeasure', 'SEGMeasure']
+        #measures = ['TRAMeasure', 'DETMeasure', 'SEGMeasure']
         #measures = ['TRAMeasure', 'SEGMeasure']
         for m in measures:
             command = f'./ctc_metrics/{m} {dirname} {self.sequence} 4'
@@ -184,7 +213,27 @@ class GlobalTracker():
             
         # remove symlink
         run_procedure(f'rm {temp_res_path}')
+        
+    def report_ctc_measures(self, parameters):
+        """
+        for a given parameters, finds a resuts of measures
+        """
+        
+        print(f'parameters :{parameters}')
+        for file in os.listdir(self.res_path):
+            
+            if file[-7:] == 'log.txt':
+                print(f'{file}: {read_last_line(os.path.join(self.res_path, file))}')
+                
+def read_last_line(file_path):
 
+    with open(file_path) as f:
+        for line in f:
+            pass
+        return line
+        
+        
+    
 
 def compute_edge_cost(log_kl_divergence, distance, avg_dist=10):
 
@@ -200,28 +249,25 @@ def compute_edge_cost(log_kl_divergence, distance, avg_dist=10):
     return - cost
 
 
-def get_graph_embedtrack(
-        res_path,
+def get_graph_bk(
+        data_path,
+        n_edges=5,
         min_frame=0,
         max_frame=2000,
-        virtual_edge_prob=50,  # equal to divergence_thr=50 in divergence_tools.py
-        limit_dist=50,
-        n_neighbours=3,
-        mean_dist=10,
-        max_dist=20,
-        edge_min_cost=-1
     ):
     '''
     creates instance of candidate graph
     
     Parameters
     ----------
-    res_path: str
+    data_path: str
         path to the dataset
+    n_edges: int
+        graph connects only <n_edges> of the closest neighbour vertices 
     min_frame: int
-        first frame to analyze 
+        first frame index to analyze (included)
     max_frame: int 
-        last frame to analyze
+        last frame index to analyze (included)
         
     Returns
     -------
@@ -231,38 +277,41 @@ def get_graph_embedtrack(
     
     '''
     
-    # create consistent graph structure
-    graph = FlowGraph(res_path)
-
-    #pd_edge_path = os.path.join(res_path, 'edge_prob.csv')
-    pd_edge_path = os.path.join(res_path, 'edge_prob_divergence.csv')
-    pd_vertex_path = os.path.join(res_path, 'vertex_prob.csv')
+    # create an empty graph structure
+    graph = FlowGraph(data_path)
+    
+    # compute distances
+    pd_edge_path = os.path.join(data_path, 'edge_prob_distance.csv')
+    if not os.path.isfile(pd_edge_path):
+        print('computing distance')
+        compute_distance(data_path)
+
+    # read info about the detections and high probability edges
+    pd_vertex_path = os.path.join(data_path, 'vertex_prob.csv')
     assert os.path.isfile(pd_edge_path), pd_edge_path
     assert os.path.isfile(pd_vertex_path), pd_vertex_path
 
     df_edges = pd.read_csv(pd_edge_path, index_col=False)
     df_vertices = pd.read_csv(pd_vertex_path, index_col=False)
     
-    # compute cost
-    df_edges['cost'] = np.log(df_edges.divergence)
-        
     # list of real frame indexes
     frame_indexes = df_vertices['time'].unique()
     
-    # TODO: sort in reverse order
-    frame_indexes.sort()
+    # limit problem in temporal domain
+    frame_indexes.sort()   # TODO: to remove
     min_frame = int(np.maximum(frame_indexes.min(), min_frame))
     max_frame = int(np.minimum(frame_indexes.max(), max_frame))
     
     ########## 
     # ADD ALL VERTICES
     print('GET GRAPH: adding vertices')
-    for i, row in df_vertices.iterrows():
+    for i, vertex in df_vertices.iterrows():
 
-        prob = row['prob']
-        label = row['label']
-        frame_idx = row['time']
+        prob = vertex['prob']
+        label = vertex['label']
+        frame_idx = vertex['time']
         
+        # limit frames that are processed
         if not (min_frame <= frame_idx <= max_frame):
             continue
         
@@ -281,70 +330,31 @@ def get_graph_embedtrack(
     ###########
     # ADD ALL MIDDLE EDGES
     print('GET GRAPH: adding edges')
-    for i, row in df_edges.iterrows():
         
-        #prob = row['prob']
-        cost = row['cost']
-        label_curr = row['label_curr']
-        frame_curr = row['time_curr']
-        label_prev = row['label_prev']
-        frame_prev = row['time_prev']
+    # iterate over every timeframe
+    for time_curr in df_edges.time_curr.unique():
         
-        if (frame_prev < min_frame) or (frame_curr > max_frame):
+        # limit frames that are processed
+        if (time_curr <= min_frame) or (time_curr > max_frame):
             continue
         
-        # no appearance in the sequence
-        graph.add_edge(frame_curr, label_curr, frame_prev, label_prev, cost)
-        
-        
-    ###########
-    # ADD VIRTUAL edges
-    # for every edge, that has no connection to t+1:
-    #     - add adges to three closest edges (up to 'limit_dist')
-    #     - the edge prob is 'virtual_edge_prob' (=.25)
-    # TODO: replace with creating full graph only based on a distance
-    #       virtual edges then only supports predicted 
-    
-    print(f'GET GRAPH: entangling to {n_neighbours} neighbours')
-    if n_neighbours > 0:
-        
-        # to_me, from_me = [], []   # REMOVE
-        for v_idx in graph.vertex_map.keys():
-
-            if len(graph.edge_to_me[v_idx]) < n_neighbours:
-                from_me_ = [graph.edges[e_idx][1] for e_idx in graph.edge_from_me[v_idx]]
-
-                frame_idx, label, x2, y2 = graph.vertex_map[v_idx]
-                if frame_idx == min_frame:
-                    continue
+        # iterate over all edges
+        time_curr_df_mask = df_edges.time_curr == time_curr
+        for label_curr in df_edges[df_edges.time_curr == time_curr].label_curr.unique():
+            
+            view = df_edges[(time_curr_df_mask) & (df_edges.label_curr == label_curr)].nsmallest(n_edges, 'distance')
 
-                # to_me.append(v_idx)  REMOVE
-                neighbors = graph.knn(frame_idx-1, x2, y2,
-                                      n=n_neighbours,
-                                      limit_dist=limit_dist)  # returns list of v_idxs
+            for _, edge in view.iterrows():
+                
+                dist = edge['distance']
+                label_curr = edge['label_curr']
+                frame_curr = edge['time_curr']
+                label_prev = edge['label_prev']
+                frame_prev = edge['time_prev']               
 
-                # add virtual edges
-                for n_v_idx in neighbors:
-                    
-                    # skip already included neighbours
-                    if n_v_idx in from_me_:
-                        continue
-
-                    frame_prev, label_prev, x1, y1 = graph.vertex_map[n_v_idx]
-
-                    graph.add_edge(frame_idx,
-                                   label,
-                                   frame_prev,
-                                   label_prev,
-                                   virtual_edge_prob)
-
-                    ''' REMOVE
-                    cost = get_distance_cost(x1, y1, x2, y2,
-                                             mean_dist=mean_dist,
-                                             max_dist=max_dist,
-                                             min_cost=edge_min_cost)
-                    graph.add_edge(frame_idx, label, frame_prev, label_prev, cost)
-                    '''
+                # no appearance in the sequence
+                graph.add_edge(frame_curr, label_curr, frame_prev, label_prev, dist)
+        
 
     ''' OUTPUT '''
     indexes = graph.vertex_index, graph.edge_index
@@ -354,6 +364,13 @@ def get_graph_embedtrack(
     return indexes, probs, graph.edges, move_edges, graph.vertex_map
 
 
+def softmax(arr):
+    return np.exp(arr) / np.sum(np.exp(arr))
+
+def sigmoid(x):
+    return 1 / (1 + math.exp(-x))
+
+
 def get_distance_cost(dist,
                       mean_dist=10,
                       max_dist=20,
@@ -392,21 +409,32 @@ def edge_cost(prob, x1, y1, x2, y2, lbd=.5, mean_dist=10, max_dist=20, min_cost=
     return lbd * dst_cost + (1 - lbd) * dvg_cost
 
 
-def graph2tracking(graph,
-                   max_cost=10000,
-                   lbd=.5,
-                   vertex_thr=0.95,
-                   mean_dist=10,
-                   max_dist=20,
-                   edge_min_cost=-1):
+def graph2tracking_bk(graph,
+                      lbd1=1,
+                      lbd2=1,    
+                      lbd3=15,     # max move distance
+                      thT=1,       # appearance / disappearance in the first / last thT frames
+                      thS=0, 
+                      limit_dist=50,
+                      ME = -0.69,
+                      MV = -0.69,
+                      MULTV = 2,
+                      zero_exp=300
+):
+    """
+    translates structure of graph to libct file tracking.txt
+    translates prob and dist to costs
+    """
     
+    CLOSE_TO_ZERO = 10**-zero_exp
+        
     # decompose graph
     indexes, probs, edges, move_edges, vertex_map = graph
 
     vertex_index, edge_index = indexes
     edge_prob, vertex_prob = probs
     edge_to, edge_from = move_edges
-
+        
     # variables
     tracking = [] 
 
@@ -421,60 +449,85 @@ def graph2tracking(graph,
     for v_idx in tqdm(vertex_map.keys()):
         
         frame_idx, label, x, y = vertex_map[v_idx]
-        prob = vertex_prob[v_idx]
+        
+        # TODO: use a parameter 0.95 to classify better vertices
+        
+        prob0 = vertex_prob[v_idx]
+        
+        # map vertex prob
+        THR_VERTEX_PROB = 0.95
+        prob = sigmoid((prob0 - THR_VERTEX_PROB)*40)
         
         # compute cost
-        cost = vertex_cost(prob, vertex_thr=vertex_thr)
+        cost = MULTV * (-np.log(prob) + MV)
+        cost = MULTV * (-np.log(2*prob))
         
-        #unique_id = f'V_{time}_{label}' 
         unique_id = f'1{frame_idx:05d}{label:05d}' 
-        
         assert len(unique_id) == 11, unique_id
         
+        '''
+        # FIXED VERTEX PROB experiment
+        if prob0 > THR_VERTEX_PROB:
+            cost = -35
+        else:
+            cost = 350000
+        '''
+        #cost = 0
+        
         tracking.append(f'H {frame_idx-frame_shift} {unique_id} {cost} {x} {y}')
         
-        # no appearance and diappearance -> cost 10000
-        app_cost, disapp_cost = max_cost, max_cost
-        if (frame_idx == first_frame_index):
-            app_cost = 0.
-        if (frame_idx == last_frame_index):
-            disapp_cost = 0.
+        app_prob = CLOSE_TO_ZERO
+        disapp_prob = CLOSE_TO_ZERO
+        
+        # app/disapp on the first/last frame is for free
+        dt0 = frame_idx - frame_shift
+        if ( dt0 < thT):
+            app_prob = np.exp(-dt0 / lbd1) 
+        dtT = last_frame_index - frame_idx
+        if (dtT < thT):
+            disapp_prob = np.exp(-dtT / lbd2) 
+            
+        app_cost = - np.log(app_prob)
+        disapp_cost = - np.log(disapp_prob)
 
         # add appearance and disappearance
         unique_id_app = f'4{frame_idx:05d}{label:05d}' 
         unique_id_dis = f'5{frame_idx:05d}{label:05d}' 
         tracking.append(f'APP {unique_id_app} {unique_id} {app_cost}')
         tracking.append(f'DISAPP {unique_id_dis} {unique_id} {disapp_cost}')
+        
+        # additional conflict set
+        #if prob0 >= THR_VERTEX_PROB:
+        #    tracking.append(f'CONFSET {unique_id} <= 1')
 
     edges_ = {}
+    
     for e_idx in edges.keys():
         
         v_idx_curr, v_idx_prev  = edges[e_idx]
         
-        # appearance
+        # sink and source vertices
         if v_idx_prev in [0, 1]:
             continue
             
         if v_idx_curr == 0:
             continue
+    
         
         frame_idx_prev, label_prev, x1, y1 = vertex_map[v_idx_prev]
-        frame_idx_curr, label_curr, x2, y2 = vertex_map[v_idx_curr]
-
-        dist = np.linalg.norm(np.array([x2 - x1, y2 - y1]))
-
-        # embed cost - bonus term, only negative
-        embed_cost = min(edge_prob[e_idx], 0)
-        embed_cost = edge_prob[e_idx]
-
-        # dist cost - standard term, zero for ~.95 percentile
-        dist_cost = get_distance_cost(dist,
-                                      mean_dist=mean_dist,
-                                      max_dist=max_dist,
-                                      min_cost=edge_min_cost)
+        frame_idx_curr, label_curr, x2, y2 = vertex_map[v_idx_curr]           
+        
+        dist = edge_prob[e_idx]
+        
 
-        # final cost, weighted by lambda
-        cost = lbd * dist_cost + (1-lbd) * embed_cost
+        
+        prob = np.exp(-dist / lbd3)
+        edge_cost = -np.log(prob) + ME
+        edge_cost = -np.log(2*prob) 
+        
+        # limit long edges
+        if edge_cost > limit_dist :
+            continue
 
         # create the edge descriptor
         seg_id_right = f'1{frame_idx_curr:05d}{label_curr:05d}' 
@@ -483,20 +536,25 @@ def graph2tracking(graph,
         assert len(unique_id) == 23, unique_id
         
         # update tracking
-        tracking.append(f'MOVE {unique_id} {seg_id_left} {seg_id_right} {cost}')
+        tracking.append(f'MOVE {unique_id} {seg_id_left} {seg_id_right} {edge_cost}')
 
         # propagate info to compute division events later on
         edges_[seg_id_left] = edges_.get(seg_id_left, [])
-        edges_[seg_id_left].append((seg_id_right, cost, dist_cost))
+        edges_[seg_id_left].append((seg_id_right, edge_cost))
 
     # division
     div_idx = 0
     for id_left in tqdm(edges_.keys(), 'divisions'):
         
         right_ids = edges_[id_left]
+        
+        # TODO: find three cheapest edges
+        right_ids.sort(key=lambda a: a[1])
+        right_ids = right_ids[:3]
                 
-        for id_right1, cost1, dist_cost1 in right_ids:
-            for id_right2, cost2, dist_cost2 in right_ids:
+        for id_right1, cost1 in right_ids:
+            for id_right2, cost2 in right_ids:
+    
                 if id_right1 >= id_right2:
                     continue
                     
@@ -504,20 +562,13 @@ def graph2tracking(graph,
                 unique_id = f'3{id_left}{id_right1}{id_right2}'
                 assert len(unique_id) == 34, unique_id
                 
-                # experimental cost
-                #cost = - max(cost1, cost2)
-                
-                # experimental cost
-                # penalize if costs are different
-                #diff_penalty = (dist_cost1 - dist_cost2)**2
-                #cost = (cost1 + cost2) // 2 + diff_penalty / 10
-
-                cost = (cost1 + cost2) // 2
+                cost_div = (cost1 + cost2) / 2
                 
-                if cost > 1:
+                # to reduce a candidate tree size
+                if cost_div  > 1:
                     continue
                 
-                tracking.append(f'DIV {unique_id} {id_left} {id_right1} {id_right2} {cost}')
+                tracking.append(f'DIV {unique_id} {id_left} {id_right1} {id_right2} {cost_div}')
                                 
                 div_idx += 1
                 
@@ -529,10 +580,50 @@ def save_tracking(tracking, data_path):
     
     tracking_path = os.path.join(data_path, 'tracking.txt')
     with open(tracking_path, 'w', encoding='utf8') as f:
+        
         line_end = repeat("\n")
         lines = chain.from_iterable(zip(tracking, line_end))
         f.writelines(lines)
         
+        lines = chain.from_iterable(zip(tracking, line_end))
+        print(lines)
+        
+        div, move = 0, 0
+        hyp = set()
+        
+        for line in lines:
+            tokens = line.split(' ')
+            
+            if tokens[0] == 'H':
+                hyp.add(tokens[2])
+                
+            
+            if tokens[0] == 'MOVE':
+                time1 = tokens[2][1:6]
+                time2 = tokens[3][1:6]
+                
+                assert tokens[2] in hyp
+                assert tokens[3] in hyp
+
+                assert int(time1) + 1 == int(time2), f'not correct {line}'
+                
+                move += 1
+                
+            if tokens[0] == 'DIV':
+                time1 = tokens[2][1:6]
+                time2 = tokens[3][1:6]
+                time3 = tokens[4][1:6]
+                
+                assert tokens[2] in hyp
+                assert tokens[3] in hyp
+                assert tokens[4] in hyp
+
+                assert int(time1) + 1 == int(time2), f'not correct {line}'
+                assert int(time1) + 1 == int(time3), f'not correct {line}'
+                
+                div += 1
+                
+        print(f'CORRECT, {move} {div}')
         
         
 def sol_stats(sol_path,
@@ -594,7 +685,7 @@ def sol_stats(sol_path,
             elif key == 'DIV':
                 unique_id, cost = tokens[1], tokens[5]
             else:
-                assert False, key
+                continue
                 
             tra[key][unique_id] = cost
             all_tra[key].append(cost)
@@ -649,7 +740,7 @@ def sol_stats(sol_path,
                      histtype='step',
                      density=True,
                      bins=100)
-        plt.legend(sol.keys())
+        #plt.legend(sol.keys)
         plt.title('tracking.sol')
         plt.xlim((-5, 5))
         plt.ylim((0, 5))
@@ -666,7 +757,7 @@ def sol_stats(sol_path,
                      histtype='step',
                      density=True,
                      bins=100)
-        plt.legend(sol.keys())
+        #plt.legend(sol.keys())
         plt.title('tracking.txt')
         plt.xlim((-5, 5))
         plt.ylim((0, 5))
@@ -677,4 +768,4 @@ def sol_stats(sol_path,
 
         
             
-        
\ No newline at end of file
+        
diff --git a/tracking/sys_tools.py b/tracking/sys_tools.py
index 3975326..5f76ba0 100644
--- a/tracking/sys_tools.py
+++ b/tracking/sys_tools.py
@@ -3,8 +3,9 @@ import sys
 
 def run_procedure(args):
 
-    print(f'running procedure: ${args}')
+    
     status_output = subprocess.call(args.split(' '))
-    res = 'OK:' if status_output == 0 else 'ERROR:'
+    if status_output != 0:
+        print(f'ERROR: procedure ${args} failed with an output {status_output}')
 
     return status_output
-- 
GitLab