[
  {
    "path": "Finetune/AbdomenAtlas/Atlas_test.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#     http://www.apache.org/licenses/LICENSE-2.0\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nimport argparse\r\nimport os\r\nfrom functools import partial\r\nimport nibabel as nib\r\nimport numpy as np\r\nimport torch\r\nimport torch.nn.functional as F\r\nfrom torch.cuda.amp import GradScaler, autocast\r\nfrom dataset.dataloader_test import get_test_loader_Atlas\r\nimport SimpleITK as sitk\r\nfrom monai.inferers import sliding_window_inference\r\n# from monai.data import decollate_batch\r\nfrom monai.losses import DiceCELoss\r\nfrom monai.metrics import DiceMetric\r\nfrom monai.networks.nets import SwinUNETR\r\nfrom monai.transforms import *\r\nfrom monai.utils.enums import MetricReduction\r\nfrom monai.handlers import StatsHandler, from_engine\r\nimport matplotlib.pyplot as plt\r\nfrom utils.utils import *\r\nfrom PIL import Image\r\nfrom monai import data, transforms\r\nfrom monai.data import *\r\n\r\nimport resource\r\n\r\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\r\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\r\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\r\n\r\nos.environ['MASTER_ADDR'] = 'localhost'\r\nos.environ['MASTER_PORT'] = '28890'\r\n\r\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\r\nparser.add_argument(\r\n    \"--test_data_path\", default=\"./test_examples/AbdomenAtlasTest/\", type=str, help=\"test_data_path\")\r\nparser.add_argument(\r\n    \"--save_prediction_path\", default=\"./test_examples/AbdomenAtlasPredict/\", type=str, help=\"test_prediction_path\")\r\nparser.add_argument(\r\n    \"--trained_pth\", default=\"./runs/logs/model_val50_91.88.pt\", type=str, help=\"trained checkpoint directory\")\r\n\r\nroi = 96\r\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\r\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\r\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\r\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\r\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\r\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\r\nparser.add_argument(\"--out_channels\", default=10, type=int, help=\"number of output channels\")\r\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\r\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\r\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\r\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\r\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\r\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\r\nparser.add_argument(\"--space_z\", default=2.0, type=float, help=\"spacing in z direction\")\r\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\r\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\r\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\r\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\r\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\r\nparser.add_argument(\"--workers\", default=16, type=int, help=\"number of workers\")\r\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\r\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\r\n\r\n\r\ndef main():\r\n    args = parser.parse_args()\r\n\r\n    test_loader, test_transforms = get_test_loader_Atlas(args)\r\n\r\n    model = SwinUNETR(\r\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\r\n        in_channels=args.in_channels,\r\n        out_channels=args.out_channels,\r\n        feature_size=args.feature_size,\r\n        drop_rate=0.0,\r\n        attn_drop_rate=0.0,\r\n        dropout_path_rate=0.0,\r\n        use_checkpoint=args.use_checkpoint,\r\n        use_v2=True\r\n    )\r\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\r\n    model_inferer = partial(\r\n        sliding_window_inference,\r\n        roi_size=inf_size,\r\n        sw_batch_size=args.sw_batch_size,\r\n        predictor=model,\r\n        overlap=args.infer_overlap,\r\n    )\r\n\r\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\r\n    model_dict = torch.load(args.trained_pth)[\"state_dict\"]\r\n    model.load_state_dict(model_dict, strict=True)\r\n    model.eval()\r\n    model.to(device)\r\n\r\n    # enable cuDNN benchmark\r\n    torch.backends.cudnn.benchmark = True\r\n\r\n    post_transforms = Compose([EnsureTyped(keys=[\"pred\"]),\r\n                               Invertd(keys=[\"pred\"],\r\n                                       transform=test_transforms,\r\n                                       orig_keys=\"image\",\r\n                                       meta_keys=\"pred_meta_dict\",\r\n                                       orig_meta_keys=\"image_meta_dict\",\r\n                                       meta_key_postfix=\"meta_dict\",\r\n                                       nearest_interp=True,\r\n                                       to_tensor=True),\r\n                               AsDiscreted(keys=\"pred\", argmax=False, to_onehot=args.out_channels),\r\n                               ])\r\n\r\n    with torch.no_grad():\r\n        for idx, batch_data in enumerate(test_loader):\r\n            torch.cuda.empty_cache()\r\n            # data = batch_data[\"image\"].cuda()\r\n\r\n            data = batch_data[\"image\"]\r\n            data = data.cuda()\r\n\r\n            with autocast(enabled=True):\r\n                if model_inferer is not None:\r\n                    logits = model_inferer(data)\r\n                else:\r\n                    logits = model(data)\r\n\r\n            batch_data['pred'] = logits.argmax(1)\r\n            batch_data = post_transforms(batch_data)\r\n\r\n            save_pred_dir = os.path.join(args.save_prediction_path, batch_data['name'][0], 'predictions')\r\n            check_dir(save_pred_dir)\r\n\r\n            organ_ls = [\"aorta\", \"gall_bladder\", \"kidney_left\", \"kidney_right\", \"liver\", \"pancreas\", \"postcava\",\r\n                        \"spleen\", \"stomach\"]\r\n\r\n            for idx, organ_name in enumerate(organ_ls):\r\n                organ = batch_data['pred'][idx+1, :, :, :]\r\n                batch_data['organ'] = organ\r\n                save_transforms = Compose([SaveImaged(keys=\"organ\", meta_keys=\"pred_meta_dict\", output_dir=save_pred_dir,\r\n                                separate_folder=False, folder_layout=None, output_postfix=organ_name,\r\n                                resample=False)])\r\n                save_transforms(batch_data)\r\n                os.rename(os.path.join(save_pred_dir, 'ct_'+organ_name+'.nii.gz'), os.path.join(save_pred_dir, organ_name+'.nii.gz'))\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    main()"
  },
  {
    "path": "Finetune/AbdomenAtlas/Atlas_test.sh",
    "content": "test_data_path=./test_examples/AbdomenAtlasTest/\nsave_prediction_path=./test_examples/AbdomenAtlasPredict/\n\ntorchrun --master_port=21472 Atlas_test.py \\\n    --test_data_path $test_data_path --save_prediction_path $save_prediction_path"
  },
  {
    "path": "Finetune/AbdomenAtlas/check.py",
    "content": "import torch\r\nimport os\r\nfrom tqdm import tqdm\r\nimport numpy as np\r\nfrom utils.utils import *\r\nfrom PIL import Image\r\nimport matplotlib.pyplot as plt\r\n\r\n\r\ndef read(img, transpose=False):\r\n    img = sitk.ReadImage(img)\r\n    direction = img.GetDirection()\r\n    origin = img.GetOrigin()\r\n    Spacing = img.GetSpacing()\r\n\r\n    img = sitk.GetArrayFromImage(img)\r\n    if transpose:\r\n        img = img.transpose(1, 2, 0)\r\n\r\n    return img, direction, origin, Spacing\r\n\r\n\r\ndef vis():\r\n    path = 'D:\\data\\cache\\Atlas'\r\n    ls = os.listdir(path)\r\n    num = 0\r\n\r\n    for i in ls:\r\n        data = torch.load(os.path.join(path, i))\r\n        img, lab = data['image'], data['label']\r\n        print(img.shape, lab.shape)\r\n        img = img[0].data.cpu().numpy()\r\n        #\r\n        # lab_bg = lab.sum(0).unsqueeze(0)\r\n        #\r\n        # la = lab.argmax(0).unsqueeze(0)\r\n        # la += 1\r\n        # la[lab_bg == 0] = 0\r\n        #\r\n        lab = lab[0].data.cpu().numpy()\r\n\r\n        cls_set = list(np.unique(lab))\r\n        print(cls_set)\r\n\r\n        h, w, c = img.shape\r\n        cmap = color_map()\r\n\r\n        for j in range(c):\r\n            im = img[:, :, j]\r\n            la = lab[:, :, j]\r\n\r\n            if len(list(np.unique(la))) > 5:\r\n                im = (255 * im).astype(np.uint8)\r\n\r\n                la = Image.fromarray(la.astype(np.uint8), mode='P')\r\n                la.putpalette(cmap)\r\n                num += 1\r\n\r\n                fig, axs = plt.subplots(1, 2, figsize=(16, 5))\r\n                axs[0].imshow(im, cmap='gray')\r\n                axs[0].axis(\"off\")\r\n\r\n                axs[1].imshow(la)\r\n                axs[1].axis(\"off\")\r\n\r\n                plt.tight_layout()\r\n                plt.show()\r\n                plt.close()\r\n\r\n\r\ndef check_original():\r\n    path = 'D:\\data\\cache\\Atlas\\BDMAP_00000870/'\r\n    img = read(path + 'ct.nii.gz', True)[0]\r\n    gt = read(path + 'label.nii.gz', True)[0]\r\n\r\n    label_path = path+'segmentations'\r\n    organ_ls = [\"aorta\", \"gall_bladder\", \"kidney_left\", \"kidney_right\", \"liver\", \"pancreas\", \"postcava\", \"spleen\",\r\n                \"stomach\"]\r\n\r\n    lab = []\r\n    for i in organ_ls:\r\n        la = read(label_path + '/' + i + '.nii.gz', True)[0]\r\n        la = np.expand_dims(la, 0)\r\n        lab.append(la)\r\n\r\n    labs = np.concatenate(lab, 0)\r\n\r\n    print(img.shape, labs.shape)\r\n\r\n    lab_bg = labs.sum(0)\r\n\r\n    print(np.unique(labs.sum(0)))\r\n    lab = labs.argmax(0)\r\n    lab += 1\r\n    lab[lab_bg == 0] = 0\r\n\r\n    print(np.unique(lab))\r\n    h, w, c = img.shape\r\n    cmap = color_map()\r\n\r\n    for j in range(c):\r\n        im = img[:, :, j]\r\n        la = lab[:, :, j]\r\n        g = gt[:, :, j]\r\n\r\n        if len(list(np.unique(la))) > 1:\r\n            im = (255 * im).astype(np.uint8)\r\n\r\n            la = Image.fromarray(la.astype(np.uint8), mode='P')\r\n            la.putpalette(cmap)\r\n\r\n            g = Image.fromarray(g.astype(np.uint8), mode='P')\r\n            g.putpalette(cmap)\r\n\r\n            fig, axs = plt.subplots(1, 3, figsize=(16, 5))\r\n            axs[0].imshow(im, cmap='gray')\r\n            axs[0].axis(\"off\")\r\n\r\n            axs[1].imshow(la)\r\n            axs[1].axis(\"off\")\r\n\r\n            axs[2].imshow(g)\r\n            axs[2].axis(\"off\")\r\n\r\n            plt.tight_layout()\r\n            plt.show()\r\n            plt.close()\r\n\r\n\r\ndef exe(path):\r\n    root = '/project/medimgfmod/CT/AbdomenAtlasMini1.0/'\r\n    path = root + path\r\n    label_path = path + '/segmentations'\r\n\r\n    organ_ls = [\"aorta\", \"gall_bladder\", \"kidney_left\", \"kidney_right\", \"liver\", \"pancreas\", \"postcava\", \"spleen\", \"stomach\"]\r\n\r\n    lab = []\r\n    for i in organ_ls:\r\n        la, direction, origin, Spacing = read(label_path + '/' + i+'.nii.gz')\r\n        la = np.expand_dims(la, 0)\r\n        lab.append(la)\r\n\r\n    labs = np.concatenate(lab, 0)\r\n    lab_bg = labs.sum(0)\r\n\r\n    lab = labs.argmax(0)\r\n    lab += 1\r\n    lab[lab_bg == 0] = 0\r\n\r\n    new = sitk.GetImageFromArray(lab)\r\n    new.SetDirection(direction)\r\n    new.SetOrigin(origin)\r\n    new.SetSpacing(Spacing)\r\n    sitk.WriteImage(new, path + '/' + 'label.nii.gz')\r\n    print('save:', path + '/' + 'label.nii.gz')\r\n\r\n\r\ndef trans_lab(path):\r\n    organ_ls = [\"aorta\", \"gall_bladder\", \"kidney_left\", \"kidney_right\", \"liver\", \"pancreas\", \"postcava\", \"spleen\",\r\n                \"stomach\"]\r\n    lab = []\r\n    for i in organ_ls:\r\n        la = read(path + '/' + i + '.nii.gz', True)[0]\r\n        la = np.expand_dims(la, 0)\r\n        lab.append(la)\r\n\r\n    labs = np.concatenate(lab, 0)\r\n\r\n    lab_bg = labs.sum(0)\r\n    lab = labs.argmax(0)\r\n    lab += 1\r\n    lab[lab_bg == 0] = 0\r\n    return lab\r\n\r\n\r\ndef check_pred_vis():\r\n    path = 'test_examples/AbdomenAtlasPredict/BDMAP_A0000002/predictions'\r\n    path_temp = 'test_examples/AbdomenAtlasPredict_temp/BDMAP_A0000002/predictions'\r\n\r\n    pred, pred_temp = trans_lab(path), trans_lab(path_temp)\r\n    print(np.unique(pred), np.unique(pred_temp))\r\n\r\n    h, w, c = pred.shape\r\n    cmap = color_map()\r\n\r\n    for j in range(c):\r\n        la = pred[:, :, j]\r\n        g = pred_temp[:, :, j]\r\n\r\n        if len(list(np.unique(la))) > 5:\r\n\r\n            la = Image.fromarray(la.astype(np.uint8), mode='P')\r\n            la.putpalette(cmap)\r\n\r\n            g = Image.fromarray(g.astype(np.uint8), mode='P')\r\n            g.putpalette(cmap)\r\n\r\n            fig, axs = plt.subplots(1, 2, figsize=(16, 5))\r\n\r\n            axs[0].imshow(la)\r\n            axs[0].axis(\"off\")\r\n\r\n            axs[1].imshow(g)\r\n            axs[1].axis(\"off\")\r\n\r\n            plt.tight_layout()\r\n            plt.show()\r\n            plt.close()\r\n\r\n\r\ndef check_pred_acc():\r\n    root = '/project/medimgfmod/CT/AbdomenAtlasMini1.0/'\r\n    ls = os.listdir(root)\r\n\r\n    num = np.zeros(9)\r\n    from utils.utils import dice\r\n    all_dice = None\r\n\r\n    for i in ls:\r\n        path = root + i\r\n        label_path = path + '/segmentations'\r\n        lab = trans_lab(label_path)\r\n\r\n        pred_path = os.path.join('./test_examples/AbdomenAtlasPredict_train/' + i, 'predictions')\r\n        pred = trans_lab(pred_path)\r\n\r\n        dice_list_sub = []\r\n\r\n        for i in range(1, 10):\r\n            num[i - 1] += (np.sum(lab == i) > 0).astype(np.uint8)\r\n            organ_Dice = dice(pred == i, lab == i)\r\n            dice_list_sub.append(organ_Dice)\r\n\r\n        if all_dice is None:\r\n            all_dice = (np.asarray(dice_list_sub)).copy()\r\n        else:\r\n            all_dice = all_dice + np.asarray(dice_list_sub)\r\n        print(\"Organ Dice accumulate:\", (all_dice / num), (all_dice / num).mean())\r\n\r\n\r\nif __name__=='__main__':\r\n    # vis()\r\n    # check_pred_acc()\r\n\r\n    # the path to Atlas train\r\n    path = '/project/medimgfmod/CT/AbdomenAtlasMini1.0/'\r\n    ls = os.listdir(path)\r\n    import multiprocessing\r\n    with multiprocessing.Pool(20) as pool:\r\n        pool.map(exe, ls, 1)\r\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/dataset/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/AbdomenAtlas/dataset/dataloader_bdmap.py",
    "content": "from monai.transforms import *\n\nimport sys\nimport nibabel as nib\nimport os\nimport torch\nimport numpy as np\nfrom typing import Optional, Union\nimport math\nimport pickle\nfrom monai.data import *\nfrom monai.data import DataLoader, Dataset, list_data_collate, DistributedSampler, CacheDataset, SmartCacheDataset\nfrom monai.config import DtypeLike, KeysCollection\nfrom monai.transforms.transform import MapTransform\nfrom monai.transforms.io.array import LoadImage\nfrom monai.utils import ensure_tuple, ensure_tuple_rep\nfrom monai.data.image_reader import ImageReader\nfrom monai.utils.enums import PostFix\nfrom utils.data_trans import *\n\n\nDEFAULT_POST_FIX = PostFix.meta()\n\n# class map for the AbdomenAtlas 1.0 dataset\nclass_map_abdomenatlas_1_0 = {\n    0: \"aorta\",\n    1: \"gall_bladder\",\n    2: \"kidney_left\",\n    3: \"kidney_right\",\n    4: \"liver\",\n    5: \"pancreas\",\n    6: \"postcava\",\n    7: \"spleen\",\n    8: \"stomach\",\n}\n\n# class map for the AbdomenAtlas 1.1 dataset\nclass_map_abdomenatlas_1_1 = {\n    0: 'aorta',\n    1: 'gall_bladder',\n    2: 'kidney_left',\n    3: 'kidney_right',\n    4: 'liver',\n    5: 'pancreas',\n    6: 'postcava',\n    7: 'spleen',\n    8: 'stomach',\n    9: 'adrenal_gland_left',\n    10: 'adrenal_gland_right',\n    11: 'bladder',\n    12: 'celiac_truck',\n    13: 'colon',\n    14: 'duodenum',\n    15: 'esophagus',\n    16: 'femur_left',\n    17: 'femur_right',\n    18: 'hepatic_vessel',\n    19: 'intestine',\n    20: 'lung_left',\n    21: 'lung_right',\n    22: 'portal_vein_and_splenic_vein',\n    23: 'prostate',\n    24: 'rectum'\n}\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank: self.total_size: self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\nclass LoadSelectedImaged(MapTransform):\n    \"\"\"\n    Custom transform to load a specific image and metadata using a flexible reader.\n\n    Args:\n        keys: Keys of the data dictionary to load selected images.\n        reader: Image reader object or string reference.\n        dtype: Data type for loaded images.\n        meta_keys: Keys to store metadata along with image data.\n        meta_key_postfix: Suffix for metadata keys.\n        overwriting: Flag to allow overwriting existing metadata.\n        image_only: Load only the image data (not metadata).\n        ensure_channel_first: Reshape image into channel-first format if necessary.\n        simple_keys: Use simplified, top-level data keys.\n        allow_missing_keys: If True, missing data keys are ignored\n    \"\"\"\n\n    def __init__(\n            self,\n            keys: KeysCollection,\n            reader: Optional[Union[ImageReader, str]] = None,\n            dtype: DtypeLike = np.float32,\n            meta_keys: Optional[KeysCollection] = None,\n            meta_key_postfix: str = DEFAULT_POST_FIX,\n            overwriting: bool = False,\n            image_only: bool = False,\n            ensure_channel_first: bool = False,\n            simple_keys: bool = False,\n            allow_missing_keys: bool = False,\n            *args,\n            **kwargs,\n    ) -> None:\n        super().__init__(keys, allow_missing_keys)\n        self._loader = LoadImage(reader, image_only, dtype, ensure_channel_first, simple_keys, *args, **kwargs)\n        if not isinstance(meta_key_postfix, str):\n            raise TypeError(f\"meta_key_postfix must be a str but is {type(meta_key_postfix).__name__}.\")\n        self.meta_keys = ensure_tuple_rep(None, len(self.keys)) if meta_keys is None else ensure_tuple(meta_keys)\n        if len(self.keys) != len(self.meta_keys):\n            raise ValueError(\"meta_keys should have the same length as keys.\")\n        self.meta_key_postfix = ensure_tuple_rep(meta_key_postfix, len(self.keys))\n        self.overwriting = overwriting\n\n    def register(self, reader: ImageReader):\n        self._loader.register(reader)\n\n    def __call__(self, data, reader: Optional[ImageReader] = None):\n        d = dict(data)\n        for key, meta_key, meta_key_postfix in self.key_iterator(d, self.meta_keys, self.meta_key_postfix):\n            data = self._loader(d[key], reader)\n            if self._loader.image_only:\n                d[key] = data\n            else:\n                if not isinstance(data, (tuple, list)):\n                    raise ValueError(\"loader must return a tuple or list (because image_only=False was used).\")\n                d[key] = data[0]\n                if not isinstance(data[1], dict):\n                    raise ValueError(\"metadata must be a dict.\")\n                meta_key = meta_key or f\"{key}_{meta_key_postfix}\"\n                if meta_key in d and not self.overwriting:\n                    raise KeyError(f\"Metadata with key {meta_key} already exists and overwriting=False.\")\n                d[meta_key] = data[1]\n\n        return d\n\n\ndef get_loader_Atlas(args):\n    \"\"\"\n    Creates training transforms, constructs a dataset, and returns a dataloader.\n\n    Args:\n        args: Command line arguments containing dataset paths and hyperparameters.\n    \"\"\"\n    base_trans, random_trans = get_trans(args)\n\n    train_transforms = base_trans + random_trans\n    val_transforms = base_trans\n\n    # constructing training dataset\n    train_img = []\n    label_img = []\n    # train_lbl_parents = []\n    train_name = []\n\n    for item in args.dataset_list:\n        for line in open(os.path.join(args.data_txt_path, item + '.txt')):\n            name = line.strip().split('\\t')[0]\n            train_img_path = os.path.join(args.data_dir, name, 'ct.nii.gz')\n            label_img_path = os.path.join(args.data_dir, name, 'label.nii.gz')\n\n            train_img.append(train_img_path)\n            label_img.append(label_img_path)\n            train_name.append(name)\n\n    data_dicts_train = [{'image': image, 'label': label, 'name': name}\n                        for image, label, name in zip(train_img, label_img, train_name)]\n    print('train len {}'.format(len(data_dicts_train)))\n\n    if args.cache_dataset:\n\n        train_ds = PersistentDataset(data=data_dicts_train[:-50],\n                                          transform=train_transforms,\n                                          pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                          cache_dir=args.cache_dir)\n\n        val_ds = PersistentDataset(data=data_dicts_train[-50:],\n                                     transform=val_transforms,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir=args.cache_dir)\n    else:\n        train_ds = Dataset(data=data_dicts_train[:-50], transform=Compose(train_transforms))\n        val_ds = Dataset(data=data_dicts_train[-50:], transform=Compose(val_transforms))\n\n    # distributed sampler settings \n    train_sampler = Sampler(train_ds) if args.distributed else None\n    train_loader = DataLoader(train_ds, batch_size=args.batch_size, shuffle=(train_sampler is None),\n                              num_workers=args.workers, pin_memory=True,\n                              collate_fn=list_data_collate, sampler=train_sampler)\n\n    val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None\n    val_loader = DataLoader(\n        val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=True\n    )\n    loader = [train_loader, val_loader]\n    return loader\n\n\nclass Filter_Atlas_Labels(MapTransform):\n    \"\"\"Filter unsed label.\n    \"\"\"\n\n    def __call__(self, data):\n        d = dict(data)\n        for key in self.keys:\n            lab_bg = d[key].clone().sum(0).unsqueeze(0)\n\n            la = d[key].argmax(0).unsqueeze(0)\n            la += 1\n            la[lab_bg == 0] = 0\n\n            d[key] = la.float()\n\n        return d\n\n\n\n\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/dataset/dataloader_test.py",
    "content": "from monai.transforms import *\n\nimport sys\nimport nibabel as nib\nimport os\nimport torch\nimport numpy as np\nfrom typing import Optional, Union\nimport math\nimport pickle\nfrom monai.data import *\nfrom monai.data import DataLoader, Dataset, list_data_collate, DistributedSampler, CacheDataset, SmartCacheDataset\nfrom monai.config import DtypeLike, KeysCollection\nfrom monai.transforms.transform import MapTransform\nfrom monai.transforms.io.array import LoadImage\nfrom monai.utils import ensure_tuple, ensure_tuple_rep\nfrom monai.data.image_reader import ImageReader\nfrom monai.utils.enums import PostFix\nfrom utils.data_trans import *\n\n\nDEFAULT_POST_FIX = PostFix.meta()\n\n# class map for the AbdomenAtlas 1.0 dataset\nclass_map_abdomenatlas_1_0 = {\n    0: \"aorta\",\n    1: \"gall_bladder\",\n    2: \"kidney_left\",\n    3: \"kidney_right\",\n    4: \"liver\",\n    5: \"pancreas\",\n    6: \"postcava\",\n    7: \"spleen\",\n    8: \"stomach\",\n}\n\n# class map for the AbdomenAtlas 1.1 dataset\nclass_map_abdomenatlas_1_1 = {\n    0: 'aorta',\n    1: 'gall_bladder',\n    2: 'kidney_left',\n    3: 'kidney_right',\n    4: 'liver',\n    5: 'pancreas',\n    6: 'postcava',\n    7: 'spleen',\n    8: 'stomach',\n    9: 'adrenal_gland_left',\n    10: 'adrenal_gland_right',\n    11: 'bladder',\n    12: 'celiac_truck',\n    13: 'colon',\n    14: 'duodenum',\n    15: 'esophagus',\n    16: 'femur_left',\n    17: 'femur_right',\n    18: 'hepatic_vessel',\n    19: 'intestine',\n    20: 'lung_left',\n    21: 'lung_right',\n    22: 'portal_vein_and_splenic_vein',\n    23: 'prostate',\n    24: 'rectum'\n}\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank: self.total_size: self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\nclass LoadSelectedImaged(MapTransform):\n    \"\"\"\n    Custom transform to load a specific image and metadata using a flexible reader.\n\n    Args:\n        keys: Keys of the data dictionary to load selected images.\n        reader: Image reader object or string reference.\n        dtype: Data type for loaded images.\n        meta_keys: Keys to store metadata along with image data.\n        meta_key_postfix: Suffix for metadata keys.\n        overwriting: Flag to allow overwriting existing metadata.\n        image_only: Load only the image data (not metadata).\n        ensure_channel_first: Reshape image into channel-first format if necessary.\n        simple_keys: Use simplified, top-level data keys.\n        allow_missing_keys: If True, missing data keys are ignored\n    \"\"\"\n\n    def __init__(\n            self,\n            keys: KeysCollection,\n            reader: Optional[Union[ImageReader, str]] = None,\n            dtype: DtypeLike = np.float32,\n            meta_keys: Optional[KeysCollection] = None,\n            meta_key_postfix: str = DEFAULT_POST_FIX,\n            overwriting: bool = False,\n            image_only: bool = False,\n            ensure_channel_first: bool = False,\n            simple_keys: bool = False,\n            allow_missing_keys: bool = False,\n            *args,\n            **kwargs,\n    ) -> None:\n        super().__init__(keys, allow_missing_keys)\n        self._loader = LoadImage(reader, image_only, dtype, ensure_channel_first, simple_keys, *args, **kwargs)\n        if not isinstance(meta_key_postfix, str):\n            raise TypeError(f\"meta_key_postfix must be a str but is {type(meta_key_postfix).__name__}.\")\n        self.meta_keys = ensure_tuple_rep(None, len(self.keys)) if meta_keys is None else ensure_tuple(meta_keys)\n        if len(self.keys) != len(self.meta_keys):\n            raise ValueError(\"meta_keys should have the same length as keys.\")\n        self.meta_key_postfix = ensure_tuple_rep(meta_key_postfix, len(self.keys))\n        self.overwriting = overwriting\n\n    def register(self, reader: ImageReader):\n        self._loader.register(reader)\n\n    def __call__(self, data, reader: Optional[ImageReader] = None):\n        d = dict(data)\n        for key, meta_key, meta_key_postfix in self.key_iterator(d, self.meta_keys, self.meta_key_postfix):\n            data = self._loader(d[key], reader)\n            if self._loader.image_only:\n                d[key] = data\n            else:\n                if not isinstance(data, (tuple, list)):\n                    raise ValueError(\"loader must return a tuple or list (because image_only=False was used).\")\n                d[key] = data[0]\n                if not isinstance(data[1], dict):\n                    raise ValueError(\"metadata must be a dict.\")\n                meta_key = meta_key or f\"{key}_{meta_key_postfix}\"\n                if meta_key in d and not self.overwriting:\n                    raise KeyError(f\"Metadata with key {meta_key} already exists and overwriting=False.\")\n                d[meta_key] = data[1]\n\n        return d\n\n\ndef get_test_loader_Atlas(args):\n    \"\"\"\n    Creates training transforms, constructs a dataset, and returns a dataloader.\n\n    Args:\n        args: Command line arguments containing dataset paths and hyperparameters.\n    \"\"\"\n    test_transforms = transforms.Compose([\n        LoadImaged(keys=[\"image\"]),\n        EnsureChannelFirstd(keys=[\"image\"]),\n        Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n        Spacingd(keys=[\"image\"], pixdim=(args.space_x, args.space_y, args.space_z),\n                 mode=(\"bilinear\")),\n        ScaleIntensityRanged(\n            keys=[\"image\"],\n            a_min=args.a_min,\n            a_max=args.a_max,\n            b_min=0.0,\n            b_max=1.0,\n            clip=True,\n        ),\n        CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n        SpatialPadd(keys=[\"image\"], spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                    mode='constant'),\n    ])\n\n    # constructing training dataset\n    test_img = []\n    test_name = []\n\n    dataset_list = os.listdir(args.test_data_path)\n\n    for item in dataset_list:\n        name = item\n        test_img_path = os.path.join(args.test_data_path, name, 'ct.nii.gz')\n        test_img.append(test_img_path)\n        test_name.append(name)\n\n    data_dicts_test = [{'image': image, 'name': name}\n                        for image, name in zip(test_img, test_name)]\n\n    print('test len {}'.format(len(data_dicts_test)))\n\n    test_ds = Dataset(data=data_dicts_test, transform=test_transforms)\n    test_loader = DataLoader(\n        test_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=None, pin_memory=True\n    )\n    return test_loader, test_transforms\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/dataset/dataset_list/AbdomenAtlas1.0.txt",
    "content": "BDMAP_00000001\nBDMAP_00000002\nBDMAP_00000003\nBDMAP_00000004\nBDMAP_00000005\nBDMAP_00000006\nBDMAP_00000007\nBDMAP_00000008\nBDMAP_00000009\nBDMAP_00000010\nBDMAP_00000011\nBDMAP_00000012\nBDMAP_00000013\nBDMAP_00000014\nBDMAP_00000015\nBDMAP_00000016\nBDMAP_00000017\nBDMAP_00000018\nBDMAP_00000019\nBDMAP_00000020\nBDMAP_00000021\nBDMAP_00000022\nBDMAP_00000023\nBDMAP_00000024\nBDMAP_00000025\nBDMAP_00000026\nBDMAP_00000027\nBDMAP_00000028\nBDMAP_00000029\nBDMAP_00000030\nBDMAP_00000031\nBDMAP_00000032\nBDMAP_00000033\nBDMAP_00000034\nBDMAP_00000035\nBDMAP_00000036\nBDMAP_00000037\nBDMAP_00000038\nBDMAP_00000039\nBDMAP_00000040\nBDMAP_00000041\nBDMAP_00000042\nBDMAP_00000043\nBDMAP_00000044\nBDMAP_00000045\nBDMAP_00000046\nBDMAP_00000047\nBDMAP_00000048\nBDMAP_00000049\nBDMAP_00000050\nBDMAP_00000051\nBDMAP_00000052\nBDMAP_00000053\nBDMAP_00000054\nBDMAP_00000055\nBDMAP_00000056\nBDMAP_00000057\nBDMAP_00000058\nBDMAP_00000059\nBDMAP_00000060\nBDMAP_00000061\nBDMAP_00000062\nBDMAP_00000063\nBDMAP_00000064\nBDMAP_00000065\nBDMAP_00000066\nBDMAP_00000067\nBDMAP_00000068\nBDMAP_00000069\nBDMAP_00000070\nBDMAP_00000071\nBDMAP_00000072\nBDMAP_00000073\nBDMAP_00000074\nBDMAP_00000075\nBDMAP_00000076\nBDMAP_00000077\nBDMAP_00000078\nBDMAP_00000079\nBDMAP_00000080\nBDMAP_00000081\nBDMAP_00000082\nBDMAP_00000083\nBDMAP_00000084\nBDMAP_00000085\nBDMAP_00000086\nBDMAP_00000087\nBDMAP_00000088\nBDMAP_00000089\nBDMAP_00000090\nBDMAP_00000091\nBDMAP_00000092\nBDMAP_00000093\nBDMAP_00000094\nBDMAP_00000095\nBDMAP_00000096\nBDMAP_00000097\nBDMAP_00000098\nBDMAP_00000099\nBDMAP_00000100\nBDMAP_00000101\nBDMAP_00000102\nBDMAP_00000103\nBDMAP_00000104\nBDMAP_00000105\nBDMAP_00000106\nBDMAP_00000107\nBDMAP_00000108\nBDMAP_00000109\nBDMAP_00000110\nBDMAP_00000111\nBDMAP_00000112\nBDMAP_00000113\nBDMAP_00000114\nBDMAP_00000115\nBDMAP_00000116\nBDMAP_00000117\nBDMAP_00000118\nBDMAP_00000119\nBDMAP_00000120\nBDMAP_00000121\nBDMAP_00000122\nBDMAP_00000123\nBDMAP_00000124\nBDMAP_00000125\nBDMAP_00000126\nBDMAP_00000127\nBDMAP_00000128\nBDMAP_00000129\nBDMAP_00000130\nBDMAP_00000131\nBDMAP_00000132\nBDMAP_00000133\nBDMAP_00000134\nBDMAP_00000135\nBDMAP_00000136\nBDMAP_00000137\nBDMAP_00000138\nBDMAP_00000139\nBDMAP_00000140\nBDMAP_00000141\nBDMAP_00000142\nBDMAP_00000143\nBDMAP_00000144\nBDMAP_00000145\nBDMAP_00000146\nBDMAP_00000147\nBDMAP_00000148\nBDMAP_00000149\nBDMAP_00000150\nBDMAP_00000151\nBDMAP_00000152\nBDMAP_00000153\nBDMAP_00000154\nBDMAP_00000155\nBDMAP_00000156\nBDMAP_00000157\nBDMAP_00000158\nBDMAP_00000159\nBDMAP_00000160\nBDMAP_00000161\nBDMAP_00000162\nBDMAP_00000163\nBDMAP_00000164\nBDMAP_00000165\nBDMAP_00000166\nBDMAP_00000167\nBDMAP_00000168\nBDMAP_00000169\nBDMAP_00000170\nBDMAP_00000171\nBDMAP_00000172\nBDMAP_00000173\nBDMAP_00000174\nBDMAP_00000175\nBDMAP_00000176\nBDMAP_00000177\nBDMAP_00000178\nBDMAP_00000179\nBDMAP_00000180\nBDMAP_00000181\nBDMAP_00000182\nBDMAP_00000183\nBDMAP_00000184\nBDMAP_00000185\nBDMAP_00000186\nBDMAP_00000187\nBDMAP_00000188\nBDMAP_00000189\nBDMAP_00000190\nBDMAP_00000191\nBDMAP_00000192\nBDMAP_00000193\nBDMAP_00000194\nBDMAP_00000195\nBDMAP_00000196\nBDMAP_00000197\nBDMAP_00000198\nBDMAP_00000199\nBDMAP_00000200\nBDMAP_00000201\nBDMAP_00000202\nBDMAP_00000203\nBDMAP_00000204\nBDMAP_00000205\nBDMAP_00000206\nBDMAP_00000207\nBDMAP_00000208\nBDMAP_00000209\nBDMAP_00000210\nBDMAP_00000211\nBDMAP_00000212\nBDMAP_00000213\nBDMAP_00000214\nBDMAP_00000215\nBDMAP_00000216\nBDMAP_00000217\nBDMAP_00000218\nBDMAP_00000219\nBDMAP_00000220\nBDMAP_00000221\nBDMAP_00000222\nBDMAP_00000223\nBDMAP_00000224\nBDMAP_00000225\nBDMAP_00000226\nBDMAP_00000227\nBDMAP_00000228\nBDMAP_00000229\nBDMAP_00000230\nBDMAP_00000231\nBDMAP_00000232\nBDMAP_00000233\nBDMAP_00000234\nBDMAP_00000235\nBDMAP_00000236\nBDMAP_00000237\nBDMAP_00000238\nBDMAP_00000239\nBDMAP_00000240\nBDMAP_00000241\nBDMAP_00000242\nBDMAP_00000243\nBDMAP_00000244\nBDMAP_00000245\nBDMAP_00000246\nBDMAP_00000247\nBDMAP_00000248\nBDMAP_00000249\nBDMAP_00000250\nBDMAP_00000251\nBDMAP_00000252\nBDMAP_00000253\nBDMAP_00000254\nBDMAP_00000255\nBDMAP_00000256\nBDMAP_00000257\nBDMAP_00000258\nBDMAP_00000259\nBDMAP_00000260\nBDMAP_00000261\nBDMAP_00000262\nBDMAP_00000263\nBDMAP_00000264\nBDMAP_00000265\nBDMAP_00000266\nBDMAP_00000267\nBDMAP_00000268\nBDMAP_00000269\nBDMAP_00000270\nBDMAP_00000271\nBDMAP_00000272\nBDMAP_00000273\nBDMAP_00000274\nBDMAP_00000275\nBDMAP_00000276\nBDMAP_00000277\nBDMAP_00000278\nBDMAP_00000279\nBDMAP_00000280\nBDMAP_00000281\nBDMAP_00000282\nBDMAP_00000283\nBDMAP_00000284\nBDMAP_00000285\nBDMAP_00000286\nBDMAP_00000287\nBDMAP_00000288\nBDMAP_00000289\nBDMAP_00000290\nBDMAP_00000291\nBDMAP_00000292\nBDMAP_00000293\nBDMAP_00000294\nBDMAP_00000295\nBDMAP_00000296\nBDMAP_00000297\nBDMAP_00000298\nBDMAP_00000299\nBDMAP_00000300\nBDMAP_00000301\nBDMAP_00000302\nBDMAP_00000303\nBDMAP_00000304\nBDMAP_00000305\nBDMAP_00000306\nBDMAP_00000307\nBDMAP_00000308\nBDMAP_00000309\nBDMAP_00000310\nBDMAP_00000311\nBDMAP_00000312\nBDMAP_00000313\nBDMAP_00000314\nBDMAP_00000315\nBDMAP_00000316\nBDMAP_00000317\nBDMAP_00000318\nBDMAP_00000319\nBDMAP_00000320\nBDMAP_00000321\nBDMAP_00000322\nBDMAP_00000323\nBDMAP_00000324\nBDMAP_00000325\nBDMAP_00000326\nBDMAP_00000327\nBDMAP_00000328\nBDMAP_00000329\nBDMAP_00000330\nBDMAP_00000331\nBDMAP_00000332\nBDMAP_00000333\nBDMAP_00000334\nBDMAP_00000335\nBDMAP_00000336\nBDMAP_00000337\nBDMAP_00000338\nBDMAP_00000339\nBDMAP_00000340\nBDMAP_00000341\nBDMAP_00000342\nBDMAP_00000343\nBDMAP_00000344\nBDMAP_00000345\nBDMAP_00000346\nBDMAP_00000347\nBDMAP_00000348\nBDMAP_00000349\nBDMAP_00000350\nBDMAP_00000351\nBDMAP_00000352\nBDMAP_00000353\nBDMAP_00000354\nBDMAP_00000355\nBDMAP_00000356\nBDMAP_00000357\nBDMAP_00000358\nBDMAP_00000359\nBDMAP_00000360\nBDMAP_00000361\nBDMAP_00000362\nBDMAP_00000363\nBDMAP_00000364\nBDMAP_00000365\nBDMAP_00000366\nBDMAP_00000367\nBDMAP_00000368\nBDMAP_00000369\nBDMAP_00000370\nBDMAP_00000371\nBDMAP_00000372\nBDMAP_00000373\nBDMAP_00000374\nBDMAP_00000375\nBDMAP_00000376\nBDMAP_00000377\nBDMAP_00000378\nBDMAP_00000379\nBDMAP_00000380\nBDMAP_00000381\nBDMAP_00000382\nBDMAP_00000383\nBDMAP_00000384\nBDMAP_00000385\nBDMAP_00000386\nBDMAP_00000387\nBDMAP_00000388\nBDMAP_00000389\nBDMAP_00000390\nBDMAP_00000391\nBDMAP_00000392\nBDMAP_00000393\nBDMAP_00000394\nBDMAP_00000395\nBDMAP_00000396\nBDMAP_00000397\nBDMAP_00000398\nBDMAP_00000399\nBDMAP_00000400\nBDMAP_00000401\nBDMAP_00000402\nBDMAP_00000403\nBDMAP_00000404\nBDMAP_00000405\nBDMAP_00000406\nBDMAP_00000407\nBDMAP_00000408\nBDMAP_00000409\nBDMAP_00000410\nBDMAP_00000411\nBDMAP_00000412\nBDMAP_00000413\nBDMAP_00000414\nBDMAP_00000415\nBDMAP_00000416\nBDMAP_00000417\nBDMAP_00000418\nBDMAP_00000419\nBDMAP_00000420\nBDMAP_00000421\nBDMAP_00000422\nBDMAP_00000423\nBDMAP_00000424\nBDMAP_00000425\nBDMAP_00000426\nBDMAP_00000427\nBDMAP_00000428\nBDMAP_00000429\nBDMAP_00000430\nBDMAP_00000431\nBDMAP_00000432\nBDMAP_00000433\nBDMAP_00000434\nBDMAP_00000435\nBDMAP_00000436\nBDMAP_00000437\nBDMAP_00000438\nBDMAP_00000439\nBDMAP_00000440\nBDMAP_00000441\nBDMAP_00000442\nBDMAP_00000443\nBDMAP_00000444\nBDMAP_00000445\nBDMAP_00000446\nBDMAP_00000447\nBDMAP_00000448\nBDMAP_00000449\nBDMAP_00000450\nBDMAP_00000451\nBDMAP_00000452\nBDMAP_00000453\nBDMAP_00000454\nBDMAP_00000455\nBDMAP_00000456\nBDMAP_00000457\nBDMAP_00000458\nBDMAP_00000459\nBDMAP_00000460\nBDMAP_00000461\nBDMAP_00000462\nBDMAP_00000463\nBDMAP_00000464\nBDMAP_00000465\nBDMAP_00000466\nBDMAP_00000467\nBDMAP_00000468\nBDMAP_00000469\nBDMAP_00000470\nBDMAP_00000471\nBDMAP_00000472\nBDMAP_00000473\nBDMAP_00000474\nBDMAP_00000475\nBDMAP_00000476\nBDMAP_00000477\nBDMAP_00000478\nBDMAP_00000479\nBDMAP_00000480\nBDMAP_00000481\nBDMAP_00000482\nBDMAP_00000483\nBDMAP_00000484\nBDMAP_00000485\nBDMAP_00000486\nBDMAP_00000487\nBDMAP_00000488\nBDMAP_00000489\nBDMAP_00000490\nBDMAP_00000491\nBDMAP_00000492\nBDMAP_00000493\nBDMAP_00000494\nBDMAP_00000495\nBDMAP_00000496\nBDMAP_00000497\nBDMAP_00000498\nBDMAP_00000499\nBDMAP_00000500\nBDMAP_00000501\nBDMAP_00000502\nBDMAP_00000503\nBDMAP_00000504\nBDMAP_00000505\nBDMAP_00000506\nBDMAP_00000507\nBDMAP_00000508\nBDMAP_00000509\nBDMAP_00000510\nBDMAP_00000511\nBDMAP_00000512\nBDMAP_00000513\nBDMAP_00000514\nBDMAP_00000515\nBDMAP_00000516\nBDMAP_00000517\nBDMAP_00000518\nBDMAP_00000519\nBDMAP_00000520\nBDMAP_00000521\nBDMAP_00000522\nBDMAP_00000523\nBDMAP_00000524\nBDMAP_00000525\nBDMAP_00000526\nBDMAP_00000527\nBDMAP_00000528\nBDMAP_00000529\nBDMAP_00000530\nBDMAP_00000531\nBDMAP_00000532\nBDMAP_00000533\nBDMAP_00000534\nBDMAP_00000535\nBDMAP_00000536\nBDMAP_00000537\nBDMAP_00000538\nBDMAP_00000539\nBDMAP_00000540\nBDMAP_00000541\nBDMAP_00000542\nBDMAP_00000543\nBDMAP_00000544\nBDMAP_00000545\nBDMAP_00000546\nBDMAP_00000547\nBDMAP_00000548\nBDMAP_00000549\nBDMAP_00000550\nBDMAP_00000551\nBDMAP_00000552\nBDMAP_00000553\nBDMAP_00000554\nBDMAP_00000555\nBDMAP_00000556\nBDMAP_00000557\nBDMAP_00000558\nBDMAP_00000559\nBDMAP_00000560\nBDMAP_00000561\nBDMAP_00000562\nBDMAP_00000563\nBDMAP_00000564\nBDMAP_00000565\nBDMAP_00000566\nBDMAP_00000567\nBDMAP_00000568\nBDMAP_00000569\nBDMAP_00000570\nBDMAP_00000571\nBDMAP_00000572\nBDMAP_00000573\nBDMAP_00000574\nBDMAP_00000575\nBDMAP_00000576\nBDMAP_00000577\nBDMAP_00000578\nBDMAP_00000579\nBDMAP_00000580\nBDMAP_00000581\nBDMAP_00000582\nBDMAP_00000583\nBDMAP_00000584\nBDMAP_00000585\nBDMAP_00000586\nBDMAP_00000587\nBDMAP_00000588\nBDMAP_00000589\nBDMAP_00000590\nBDMAP_00000591\nBDMAP_00000592\nBDMAP_00000593\nBDMAP_00000594\nBDMAP_00000595\nBDMAP_00000596\nBDMAP_00000597\nBDMAP_00000598\nBDMAP_00000599\nBDMAP_00000600\nBDMAP_00000601\nBDMAP_00000602\nBDMAP_00000603\nBDMAP_00000604\nBDMAP_00000605\nBDMAP_00000606\nBDMAP_00000607\nBDMAP_00000608\nBDMAP_00000609\nBDMAP_00000610\nBDMAP_00000611\nBDMAP_00000612\nBDMAP_00000613\nBDMAP_00000614\nBDMAP_00000615\nBDMAP_00000616\nBDMAP_00000617\nBDMAP_00000618\nBDMAP_00000619\nBDMAP_00000620\nBDMAP_00000621\nBDMAP_00000622\nBDMAP_00000623\nBDMAP_00000624\nBDMAP_00000625\nBDMAP_00000626\nBDMAP_00000627\nBDMAP_00000628\nBDMAP_00000629\nBDMAP_00000630\nBDMAP_00000631\nBDMAP_00000632\nBDMAP_00000633\nBDMAP_00000634\nBDMAP_00000635\nBDMAP_00000636\nBDMAP_00000637\nBDMAP_00000638\nBDMAP_00000639\nBDMAP_00000640\nBDMAP_00000641\nBDMAP_00000642\nBDMAP_00000643\nBDMAP_00000644\nBDMAP_00000645\nBDMAP_00000646\nBDMAP_00000647\nBDMAP_00000648\nBDMAP_00000649\nBDMAP_00000650\nBDMAP_00000651\nBDMAP_00000652\nBDMAP_00000653\nBDMAP_00000654\nBDMAP_00000655\nBDMAP_00000656\nBDMAP_00000657\nBDMAP_00000658\nBDMAP_00000659\nBDMAP_00000660\nBDMAP_00000661\nBDMAP_00000662\nBDMAP_00000663\nBDMAP_00000664\nBDMAP_00000665\nBDMAP_00000666\nBDMAP_00000667\nBDMAP_00000668\nBDMAP_00000669\nBDMAP_00000670\nBDMAP_00000671\nBDMAP_00000672\nBDMAP_00000673\nBDMAP_00000674\nBDMAP_00000675\nBDMAP_00000676\nBDMAP_00000677\nBDMAP_00000678\nBDMAP_00000679\nBDMAP_00000680\nBDMAP_00000681\nBDMAP_00000682\nBDMAP_00000683\nBDMAP_00000684\nBDMAP_00000685\nBDMAP_00000686\nBDMAP_00000687\nBDMAP_00000688\nBDMAP_00000689\nBDMAP_00000690\nBDMAP_00000691\nBDMAP_00000692\nBDMAP_00000693\nBDMAP_00000694\nBDMAP_00000695\nBDMAP_00000696\nBDMAP_00000697\nBDMAP_00000698\nBDMAP_00000699\nBDMAP_00000700\nBDMAP_00000701\nBDMAP_00000702\nBDMAP_00000703\nBDMAP_00000704\nBDMAP_00000705\nBDMAP_00000706\nBDMAP_00000707\nBDMAP_00000708\nBDMAP_00000709\nBDMAP_00000710\nBDMAP_00000711\nBDMAP_00000712\nBDMAP_00000713\nBDMAP_00000714\nBDMAP_00000715\nBDMAP_00000716\nBDMAP_00000717\nBDMAP_00000718\nBDMAP_00000719\nBDMAP_00000720\nBDMAP_00000721\nBDMAP_00000722\nBDMAP_00000723\nBDMAP_00000724\nBDMAP_00000725\nBDMAP_00000726\nBDMAP_00000727\nBDMAP_00000728\nBDMAP_00000729\nBDMAP_00000730\nBDMAP_00000731\nBDMAP_00000732\nBDMAP_00000733\nBDMAP_00000734\nBDMAP_00000735\nBDMAP_00000736\nBDMAP_00000737\nBDMAP_00000738\nBDMAP_00000739\nBDMAP_00000740\nBDMAP_00000741\nBDMAP_00000742\nBDMAP_00000743\nBDMAP_00000744\nBDMAP_00000745\nBDMAP_00000746\nBDMAP_00000747\nBDMAP_00000748\nBDMAP_00000749\nBDMAP_00000750\nBDMAP_00000751\nBDMAP_00000752\nBDMAP_00000753\nBDMAP_00000754\nBDMAP_00000755\nBDMAP_00000756\nBDMAP_00000757\nBDMAP_00000758\nBDMAP_00000759\nBDMAP_00000760\nBDMAP_00000761\nBDMAP_00000762\nBDMAP_00000763\nBDMAP_00000764\nBDMAP_00000765\nBDMAP_00000766\nBDMAP_00000767\nBDMAP_00000768\nBDMAP_00000769\nBDMAP_00000770\nBDMAP_00000771\nBDMAP_00000772\nBDMAP_00000773\nBDMAP_00000774\nBDMAP_00000775\nBDMAP_00000776\nBDMAP_00000777\nBDMAP_00000778\nBDMAP_00000779\nBDMAP_00000780\nBDMAP_00000781\nBDMAP_00000782\nBDMAP_00000783\nBDMAP_00000784\nBDMAP_00000785\nBDMAP_00000786\nBDMAP_00000787\nBDMAP_00000788\nBDMAP_00000789\nBDMAP_00000790\nBDMAP_00000791\nBDMAP_00000792\nBDMAP_00000793\nBDMAP_00000794\nBDMAP_00000795\nBDMAP_00000796\nBDMAP_00000797\nBDMAP_00000798\nBDMAP_00000799\nBDMAP_00000800\nBDMAP_00000801\nBDMAP_00000802\nBDMAP_00000803\nBDMAP_00000804\nBDMAP_00000805\nBDMAP_00000806\nBDMAP_00000807\nBDMAP_00000808\nBDMAP_00000809\nBDMAP_00000810\nBDMAP_00000811\nBDMAP_00000812\nBDMAP_00000813\nBDMAP_00000814\nBDMAP_00000815\nBDMAP_00000816\nBDMAP_00000817\nBDMAP_00000818\nBDMAP_00000819\nBDMAP_00000820\nBDMAP_00000821\nBDMAP_00000822\nBDMAP_00000823\nBDMAP_00000824\nBDMAP_00000825\nBDMAP_00000826\nBDMAP_00000827\nBDMAP_00000828\nBDMAP_00000829\nBDMAP_00000830\nBDMAP_00000831\nBDMAP_00000832\nBDMAP_00000833\nBDMAP_00000834\nBDMAP_00000835\nBDMAP_00000836\nBDMAP_00000837\nBDMAP_00000838\nBDMAP_00000839\nBDMAP_00000840\nBDMAP_00000841\nBDMAP_00000842\nBDMAP_00000843\nBDMAP_00000844\nBDMAP_00000845\nBDMAP_00000846\nBDMAP_00000847\nBDMAP_00000848\nBDMAP_00000849\nBDMAP_00000850\nBDMAP_00000851\nBDMAP_00000852\nBDMAP_00000853\nBDMAP_00000854\nBDMAP_00000855\nBDMAP_00000856\nBDMAP_00000857\nBDMAP_00000858\nBDMAP_00000859\nBDMAP_00000860\nBDMAP_00000861\nBDMAP_00000862\nBDMAP_00000863\nBDMAP_00000864\nBDMAP_00000865\nBDMAP_00000866\nBDMAP_00000867\nBDMAP_00000868\nBDMAP_00000869\nBDMAP_00000870\nBDMAP_00000871\nBDMAP_00000872\nBDMAP_00000873\nBDMAP_00000874\nBDMAP_00000875\nBDMAP_00000876\nBDMAP_00000877\nBDMAP_00000878\nBDMAP_00000879\nBDMAP_00000880\nBDMAP_00000881\nBDMAP_00000882\nBDMAP_00000883\nBDMAP_00000884\nBDMAP_00000885\nBDMAP_00000886\nBDMAP_00000887\nBDMAP_00000888\nBDMAP_00000889\nBDMAP_00000890\nBDMAP_00000891\nBDMAP_00000892\nBDMAP_00000893\nBDMAP_00000894\nBDMAP_00000895\nBDMAP_00000896\nBDMAP_00000897\nBDMAP_00000898\nBDMAP_00000899\nBDMAP_00000900\nBDMAP_00000901\nBDMAP_00000902\nBDMAP_00000903\nBDMAP_00000904\nBDMAP_00000905\nBDMAP_00000906\nBDMAP_00000907\nBDMAP_00000908\nBDMAP_00000909\nBDMAP_00000910\nBDMAP_00000911\nBDMAP_00000912\nBDMAP_00000913\nBDMAP_00000914\nBDMAP_00000915\nBDMAP_00000916\nBDMAP_00000917\nBDMAP_00000918\nBDMAP_00000919\nBDMAP_00000920\nBDMAP_00000921\nBDMAP_00000922\nBDMAP_00000923\nBDMAP_00000924\nBDMAP_00000925\nBDMAP_00000926\nBDMAP_00000927\nBDMAP_00000928\nBDMAP_00000929\nBDMAP_00000930\nBDMAP_00000931\nBDMAP_00000932\nBDMAP_00000933\nBDMAP_00000934\nBDMAP_00000935\nBDMAP_00000936\nBDMAP_00000937\nBDMAP_00000938\nBDMAP_00000939\nBDMAP_00000940\nBDMAP_00000941\nBDMAP_00000942\nBDMAP_00000943\nBDMAP_00000944\nBDMAP_00000945\nBDMAP_00000946\nBDMAP_00000947\nBDMAP_00000948\nBDMAP_00000949\nBDMAP_00000950\nBDMAP_00000951\nBDMAP_00000952\nBDMAP_00000953\nBDMAP_00000954\nBDMAP_00000955\nBDMAP_00000956\nBDMAP_00000957\nBDMAP_00000958\nBDMAP_00000959\nBDMAP_00000960\nBDMAP_00000961\nBDMAP_00000962\nBDMAP_00000963\nBDMAP_00000964\nBDMAP_00000965\nBDMAP_00000966\nBDMAP_00000967\nBDMAP_00000968\nBDMAP_00000969\nBDMAP_00000970\nBDMAP_00000971\nBDMAP_00000972\nBDMAP_00000973\nBDMAP_00000974\nBDMAP_00000975\nBDMAP_00000976\nBDMAP_00000977\nBDMAP_00000978\nBDMAP_00000979\nBDMAP_00000980\nBDMAP_00000981\nBDMAP_00000982\nBDMAP_00000983\nBDMAP_00000984\nBDMAP_00000985\nBDMAP_00000986\nBDMAP_00000987\nBDMAP_00000988\nBDMAP_00000989\nBDMAP_00000990\nBDMAP_00000991\nBDMAP_00000992\nBDMAP_00000993\nBDMAP_00000994\nBDMAP_00000995\nBDMAP_00000996\nBDMAP_00000997\nBDMAP_00000998\nBDMAP_00000999\nBDMAP_00001000\nBDMAP_00001001\nBDMAP_00001002\nBDMAP_00001003\nBDMAP_00001004\nBDMAP_00001005\nBDMAP_00001006\nBDMAP_00001007\nBDMAP_00001008\nBDMAP_00001009\nBDMAP_00001010\nBDMAP_00001011\nBDMAP_00001012\nBDMAP_00001013\nBDMAP_00001014\nBDMAP_00001015\nBDMAP_00001016\nBDMAP_00001017\nBDMAP_00001018\nBDMAP_00001019\nBDMAP_00001020\nBDMAP_00001021\nBDMAP_00001022\nBDMAP_00001023\nBDMAP_00001024\nBDMAP_00001025\nBDMAP_00001026\nBDMAP_00001027\nBDMAP_00001028\nBDMAP_00001029\nBDMAP_00001030\nBDMAP_00001031\nBDMAP_00001032\nBDMAP_00001033\nBDMAP_00001034\nBDMAP_00001035\nBDMAP_00001036\nBDMAP_00001037\nBDMAP_00001038\nBDMAP_00001039\nBDMAP_00001040\nBDMAP_00001041\nBDMAP_00001042\nBDMAP_00001043\nBDMAP_00001044\nBDMAP_00001045\nBDMAP_00001046\nBDMAP_00001047\nBDMAP_00001048\nBDMAP_00001049\nBDMAP_00001050\nBDMAP_00001051\nBDMAP_00001052\nBDMAP_00001053\nBDMAP_00001054\nBDMAP_00001055\nBDMAP_00001056\nBDMAP_00001057\nBDMAP_00001058\nBDMAP_00001059\nBDMAP_00001060\nBDMAP_00001061\nBDMAP_00001062\nBDMAP_00001063\nBDMAP_00001064\nBDMAP_00001065\nBDMAP_00001066\nBDMAP_00001067\nBDMAP_00001068\nBDMAP_00001069\nBDMAP_00001070\nBDMAP_00001071\nBDMAP_00001072\nBDMAP_00001073\nBDMAP_00001074\nBDMAP_00001075\nBDMAP_00001076\nBDMAP_00001077\nBDMAP_00001078\nBDMAP_00001079\nBDMAP_00001080\nBDMAP_00001081\nBDMAP_00001082\nBDMAP_00001083\nBDMAP_00001084\nBDMAP_00001085\nBDMAP_00001086\nBDMAP_00001087\nBDMAP_00001088\nBDMAP_00001089\nBDMAP_00001090\nBDMAP_00001091\nBDMAP_00001092\nBDMAP_00001093\nBDMAP_00001094\nBDMAP_00001095\nBDMAP_00001096\nBDMAP_00001097\nBDMAP_00001098\nBDMAP_00001099\nBDMAP_00001100\nBDMAP_00001101\nBDMAP_00001102\nBDMAP_00001103\nBDMAP_00001104\nBDMAP_00001105\nBDMAP_00001106\nBDMAP_00001107\nBDMAP_00001108\nBDMAP_00001109\nBDMAP_00001110\nBDMAP_00001111\nBDMAP_00001112\nBDMAP_00001113\nBDMAP_00001114\nBDMAP_00001115\nBDMAP_00001116\nBDMAP_00001117\nBDMAP_00001118\nBDMAP_00001119\nBDMAP_00001120\nBDMAP_00001121\nBDMAP_00001122\nBDMAP_00001123\nBDMAP_00001124\nBDMAP_00001125\nBDMAP_00001126\nBDMAP_00001127\nBDMAP_00001128\nBDMAP_00001129\nBDMAP_00001130\nBDMAP_00001131\nBDMAP_00001132\nBDMAP_00001133\nBDMAP_00001134\nBDMAP_00001135\nBDMAP_00001136\nBDMAP_00001137\nBDMAP_00001138\nBDMAP_00001139\nBDMAP_00001140\nBDMAP_00001141\nBDMAP_00001142\nBDMAP_00001143\nBDMAP_00001144\nBDMAP_00001145\nBDMAP_00001146\nBDMAP_00001147\nBDMAP_00001148\nBDMAP_00001149\nBDMAP_00001150\nBDMAP_00001151\nBDMAP_00001152\nBDMAP_00001153\nBDMAP_00001154\nBDMAP_00001155\nBDMAP_00001156\nBDMAP_00001157\nBDMAP_00001158\nBDMAP_00001159\nBDMAP_00001160\nBDMAP_00001161\nBDMAP_00001162\nBDMAP_00001163\nBDMAP_00001164\nBDMAP_00001165\nBDMAP_00001166\nBDMAP_00001167\nBDMAP_00001168\nBDMAP_00001169\nBDMAP_00001170\nBDMAP_00001171\nBDMAP_00001172\nBDMAP_00001173\nBDMAP_00001174\nBDMAP_00001175\nBDMAP_00001176\nBDMAP_00001177\nBDMAP_00001178\nBDMAP_00001179\nBDMAP_00001180\nBDMAP_00001181\nBDMAP_00001182\nBDMAP_00001183\nBDMAP_00001184\nBDMAP_00001185\nBDMAP_00001186\nBDMAP_00001187\nBDMAP_00001188\nBDMAP_00001189\nBDMAP_00001190\nBDMAP_00001191\nBDMAP_00001192\nBDMAP_00001193\nBDMAP_00001194\nBDMAP_00001195\nBDMAP_00001196\nBDMAP_00001197\nBDMAP_00001198\nBDMAP_00001199\nBDMAP_00001200\nBDMAP_00001201\nBDMAP_00001202\nBDMAP_00001203\nBDMAP_00001204\nBDMAP_00001205\nBDMAP_00001206\nBDMAP_00001207\nBDMAP_00001208\nBDMAP_00001209\nBDMAP_00001210\nBDMAP_00001211\nBDMAP_00001212\nBDMAP_00001213\nBDMAP_00001214\nBDMAP_00001215\nBDMAP_00001216\nBDMAP_00001217\nBDMAP_00001218\nBDMAP_00001219\nBDMAP_00001220\nBDMAP_00001221\nBDMAP_00001222\nBDMAP_00001223\nBDMAP_00001224\nBDMAP_00001225\nBDMAP_00001226\nBDMAP_00001227\nBDMAP_00001228\nBDMAP_00001229\nBDMAP_00001230\nBDMAP_00001231\nBDMAP_00001232\nBDMAP_00001233\nBDMAP_00001234\nBDMAP_00001235\nBDMAP_00001236\nBDMAP_00001237\nBDMAP_00001238\nBDMAP_00001239\nBDMAP_00001240\nBDMAP_00001241\nBDMAP_00001242\nBDMAP_00001243\nBDMAP_00001244\nBDMAP_00001245\nBDMAP_00001246\nBDMAP_00001247\nBDMAP_00001248\nBDMAP_00001249\nBDMAP_00001250\nBDMAP_00001251\nBDMAP_00001252\nBDMAP_00001253\nBDMAP_00001254\nBDMAP_00001255\nBDMAP_00001256\nBDMAP_00001257\nBDMAP_00001258\nBDMAP_00001259\nBDMAP_00001260\nBDMAP_00001261\nBDMAP_00001262\nBDMAP_00001263\nBDMAP_00001264\nBDMAP_00001265\nBDMAP_00001266\nBDMAP_00001267\nBDMAP_00001268\nBDMAP_00001269\nBDMAP_00001270\nBDMAP_00001271\nBDMAP_00001272\nBDMAP_00001273\nBDMAP_00001274\nBDMAP_00001275\nBDMAP_00001276\nBDMAP_00001277\nBDMAP_00001278\nBDMAP_00001279\nBDMAP_00001280\nBDMAP_00001281\nBDMAP_00001282\nBDMAP_00001283\nBDMAP_00001284\nBDMAP_00001285\nBDMAP_00001286\nBDMAP_00001287\nBDMAP_00001288\nBDMAP_00001289\nBDMAP_00001290\nBDMAP_00001291\nBDMAP_00001292\nBDMAP_00001293\nBDMAP_00001294\nBDMAP_00001295\nBDMAP_00001296\nBDMAP_00001297\nBDMAP_00001298\nBDMAP_00001299\nBDMAP_00001300\nBDMAP_00001301\nBDMAP_00001302\nBDMAP_00001303\nBDMAP_00001304\nBDMAP_00001305\nBDMAP_00001306\nBDMAP_00001307\nBDMAP_00001308\nBDMAP_00001309\nBDMAP_00001310\nBDMAP_00001311\nBDMAP_00001312\nBDMAP_00001313\nBDMAP_00001314\nBDMAP_00001315\nBDMAP_00001316\nBDMAP_00001317\nBDMAP_00001318\nBDMAP_00001319\nBDMAP_00001320\nBDMAP_00001321\nBDMAP_00001322\nBDMAP_00001323\nBDMAP_00001324\nBDMAP_00001325\nBDMAP_00001326\nBDMAP_00001327\nBDMAP_00001328\nBDMAP_00001329\nBDMAP_00001330\nBDMAP_00001331\nBDMAP_00001332\nBDMAP_00001333\nBDMAP_00001334\nBDMAP_00001335\nBDMAP_00001336\nBDMAP_00001337\nBDMAP_00001338\nBDMAP_00001339\nBDMAP_00001340\nBDMAP_00001341\nBDMAP_00001342\nBDMAP_00001343\nBDMAP_00001344\nBDMAP_00001345\nBDMAP_00001346\nBDMAP_00001347\nBDMAP_00001348\nBDMAP_00001349\nBDMAP_00001350\nBDMAP_00001351\nBDMAP_00001352\nBDMAP_00001353\nBDMAP_00001354\nBDMAP_00001355\nBDMAP_00001356\nBDMAP_00001357\nBDMAP_00001358\nBDMAP_00001359\nBDMAP_00001360\nBDMAP_00001361\nBDMAP_00001362\nBDMAP_00001363\nBDMAP_00001364\nBDMAP_00001365\nBDMAP_00001366\nBDMAP_00001367\nBDMAP_00001368\nBDMAP_00001369\nBDMAP_00001370\nBDMAP_00001371\nBDMAP_00001372\nBDMAP_00001373\nBDMAP_00001374\nBDMAP_00001375\nBDMAP_00001376\nBDMAP_00001377\nBDMAP_00001378\nBDMAP_00001379\nBDMAP_00001380\nBDMAP_00001381\nBDMAP_00001382\nBDMAP_00001383\nBDMAP_00001384\nBDMAP_00001385\nBDMAP_00001386\nBDMAP_00001387\nBDMAP_00001388\nBDMAP_00001389\nBDMAP_00001390\nBDMAP_00001391\nBDMAP_00001392\nBDMAP_00001393\nBDMAP_00001394\nBDMAP_00001395\nBDMAP_00001396\nBDMAP_00001397\nBDMAP_00001398\nBDMAP_00001399\nBDMAP_00001400\nBDMAP_00001401\nBDMAP_00001402\nBDMAP_00001403\nBDMAP_00001404\nBDMAP_00001405\nBDMAP_00001406\nBDMAP_00001407\nBDMAP_00001408\nBDMAP_00001409\nBDMAP_00001410\nBDMAP_00001411\nBDMAP_00001412\nBDMAP_00001413\nBDMAP_00001414\nBDMAP_00001415\nBDMAP_00001416\nBDMAP_00001417\nBDMAP_00001418\nBDMAP_00001419\nBDMAP_00001420\nBDMAP_00001421\nBDMAP_00001422\nBDMAP_00001423\nBDMAP_00001424\nBDMAP_00001425\nBDMAP_00001426\nBDMAP_00001427\nBDMAP_00001428\nBDMAP_00001429\nBDMAP_00001430\nBDMAP_00001431\nBDMAP_00001432\nBDMAP_00001433\nBDMAP_00001434\nBDMAP_00001435\nBDMAP_00001436\nBDMAP_00001437\nBDMAP_00001438\nBDMAP_00001439\nBDMAP_00001440\nBDMAP_00001441\nBDMAP_00001442\nBDMAP_00001443\nBDMAP_00001444\nBDMAP_00001445\nBDMAP_00001446\nBDMAP_00001447\nBDMAP_00001448\nBDMAP_00001449\nBDMAP_00001450\nBDMAP_00001451\nBDMAP_00001452\nBDMAP_00001453\nBDMAP_00001454\nBDMAP_00001455\nBDMAP_00001456\nBDMAP_00001457\nBDMAP_00001458\nBDMAP_00001459\nBDMAP_00001460\nBDMAP_00001461\nBDMAP_00001462\nBDMAP_00001463\nBDMAP_00001464\nBDMAP_00001465\nBDMAP_00001466\nBDMAP_00001467\nBDMAP_00001468\nBDMAP_00001469\nBDMAP_00001470\nBDMAP_00001471\nBDMAP_00001472\nBDMAP_00001473\nBDMAP_00001474\nBDMAP_00001475\nBDMAP_00001476\nBDMAP_00001477\nBDMAP_00001478\nBDMAP_00001479\nBDMAP_00001480\nBDMAP_00001481\nBDMAP_00001482\nBDMAP_00001483\nBDMAP_00001484\nBDMAP_00001485\nBDMAP_00001486\nBDMAP_00001487\nBDMAP_00001488\nBDMAP_00001489\nBDMAP_00001490\nBDMAP_00001491\nBDMAP_00001492\nBDMAP_00001493\nBDMAP_00001494\nBDMAP_00001495\nBDMAP_00001496\nBDMAP_00001497\nBDMAP_00001498\nBDMAP_00001499\nBDMAP_00001500\nBDMAP_00001501\nBDMAP_00001502\nBDMAP_00001503\nBDMAP_00001504\nBDMAP_00001505\nBDMAP_00001506\nBDMAP_00001507\nBDMAP_00001508\nBDMAP_00001509\nBDMAP_00001510\nBDMAP_00001511\nBDMAP_00001512\nBDMAP_00001513\nBDMAP_00001514\nBDMAP_00001515\nBDMAP_00001516\nBDMAP_00001517\nBDMAP_00001518\nBDMAP_00001519\nBDMAP_00001520\nBDMAP_00001521\nBDMAP_00001522\nBDMAP_00001523\nBDMAP_00001524\nBDMAP_00001525\nBDMAP_00001526\nBDMAP_00001527\nBDMAP_00001528\nBDMAP_00001529\nBDMAP_00001530\nBDMAP_00001531\nBDMAP_00001532\nBDMAP_00001533\nBDMAP_00001534\nBDMAP_00001535\nBDMAP_00001536\nBDMAP_00001537\nBDMAP_00001538\nBDMAP_00001539\nBDMAP_00001540\nBDMAP_00001541\nBDMAP_00001542\nBDMAP_00001543\nBDMAP_00001544\nBDMAP_00001545\nBDMAP_00001546\nBDMAP_00001547\nBDMAP_00001548\nBDMAP_00001549\nBDMAP_00001550\nBDMAP_00001551\nBDMAP_00001552\nBDMAP_00001553\nBDMAP_00001554\nBDMAP_00001555\nBDMAP_00001556\nBDMAP_00001557\nBDMAP_00001558\nBDMAP_00001559\nBDMAP_00001560\nBDMAP_00001561\nBDMAP_00001562\nBDMAP_00001563\nBDMAP_00001564\nBDMAP_00001565\nBDMAP_00001566\nBDMAP_00001567\nBDMAP_00001568\nBDMAP_00001569\nBDMAP_00001570\nBDMAP_00001571\nBDMAP_00001572\nBDMAP_00001573\nBDMAP_00001574\nBDMAP_00001575\nBDMAP_00001576\nBDMAP_00001577\nBDMAP_00001578\nBDMAP_00001579\nBDMAP_00001580\nBDMAP_00001581\nBDMAP_00001582\nBDMAP_00001583\nBDMAP_00001584\nBDMAP_00001585\nBDMAP_00001586\nBDMAP_00001587\nBDMAP_00001588\nBDMAP_00001589\nBDMAP_00001590\nBDMAP_00001591\nBDMAP_00001592\nBDMAP_00001593\nBDMAP_00001594\nBDMAP_00001595\nBDMAP_00001596\nBDMAP_00001597\nBDMAP_00001598\nBDMAP_00001599\nBDMAP_00001600\nBDMAP_00001601\nBDMAP_00001602\nBDMAP_00001603\nBDMAP_00001604\nBDMAP_00001605\nBDMAP_00001606\nBDMAP_00001607\nBDMAP_00001608\nBDMAP_00001609\nBDMAP_00001610\nBDMAP_00001611\nBDMAP_00001612\nBDMAP_00001613\nBDMAP_00001614\nBDMAP_00001615\nBDMAP_00001616\nBDMAP_00001617\nBDMAP_00001618\nBDMAP_00001619\nBDMAP_00001620\nBDMAP_00001621\nBDMAP_00001622\nBDMAP_00001623\nBDMAP_00001624\nBDMAP_00001625\nBDMAP_00001626\nBDMAP_00001627\nBDMAP_00001628\nBDMAP_00001629\nBDMAP_00001630\nBDMAP_00001631\nBDMAP_00001632\nBDMAP_00001633\nBDMAP_00001634\nBDMAP_00001635\nBDMAP_00001636\nBDMAP_00001637\nBDMAP_00001638\nBDMAP_00001639\nBDMAP_00001640\nBDMAP_00001641\nBDMAP_00001642\nBDMAP_00001643\nBDMAP_00001644\nBDMAP_00001645\nBDMAP_00001646\nBDMAP_00001647\nBDMAP_00001648\nBDMAP_00001649\nBDMAP_00001650\nBDMAP_00001651\nBDMAP_00001652\nBDMAP_00001653\nBDMAP_00001654\nBDMAP_00001655\nBDMAP_00001656\nBDMAP_00001657\nBDMAP_00001658\nBDMAP_00001659\nBDMAP_00001660\nBDMAP_00001661\nBDMAP_00001662\nBDMAP_00001663\nBDMAP_00001664\nBDMAP_00001665\nBDMAP_00001666\nBDMAP_00001667\nBDMAP_00001668\nBDMAP_00001669\nBDMAP_00001670\nBDMAP_00001671\nBDMAP_00001672\nBDMAP_00001673\nBDMAP_00001674\nBDMAP_00001675\nBDMAP_00001676\nBDMAP_00001677\nBDMAP_00001678\nBDMAP_00001679\nBDMAP_00001680\nBDMAP_00001681\nBDMAP_00001682\nBDMAP_00001683\nBDMAP_00001684\nBDMAP_00001685\nBDMAP_00001686\nBDMAP_00001687\nBDMAP_00001688\nBDMAP_00001689\nBDMAP_00001690\nBDMAP_00001691\nBDMAP_00001692\nBDMAP_00001693\nBDMAP_00001694\nBDMAP_00001695\nBDMAP_00001696\nBDMAP_00001697\nBDMAP_00001698\nBDMAP_00001699\nBDMAP_00001700\nBDMAP_00001701\nBDMAP_00001702\nBDMAP_00001703\nBDMAP_00001704\nBDMAP_00001705\nBDMAP_00001706\nBDMAP_00001707\nBDMAP_00001708\nBDMAP_00001709\nBDMAP_00001710\nBDMAP_00001711\nBDMAP_00001712\nBDMAP_00001713\nBDMAP_00001714\nBDMAP_00001715\nBDMAP_00001716\nBDMAP_00001717\nBDMAP_00001718\nBDMAP_00001719\nBDMAP_00001720\nBDMAP_00001721\nBDMAP_00001722\nBDMAP_00001723\nBDMAP_00001724\nBDMAP_00001725\nBDMAP_00001726\nBDMAP_00001727\nBDMAP_00001728\nBDMAP_00001729\nBDMAP_00001730\nBDMAP_00001731\nBDMAP_00001732\nBDMAP_00001733\nBDMAP_00001734\nBDMAP_00001735\nBDMAP_00001736\nBDMAP_00001737\nBDMAP_00001738\nBDMAP_00001739\nBDMAP_00001740\nBDMAP_00001741\nBDMAP_00001742\nBDMAP_00001743\nBDMAP_00001744\nBDMAP_00001745\nBDMAP_00001746\nBDMAP_00001747\nBDMAP_00001748\nBDMAP_00001749\nBDMAP_00001750\nBDMAP_00001751\nBDMAP_00001752\nBDMAP_00001753\nBDMAP_00001754\nBDMAP_00001755\nBDMAP_00001756\nBDMAP_00001757\nBDMAP_00001758\nBDMAP_00001759\nBDMAP_00001760\nBDMAP_00001761\nBDMAP_00001762\nBDMAP_00001763\nBDMAP_00001764\nBDMAP_00001765\nBDMAP_00001766\nBDMAP_00001767\nBDMAP_00001768\nBDMAP_00001769\nBDMAP_00001770\nBDMAP_00001771\nBDMAP_00001772\nBDMAP_00001773\nBDMAP_00001774\nBDMAP_00001775\nBDMAP_00001776\nBDMAP_00001777\nBDMAP_00001778\nBDMAP_00001779\nBDMAP_00001780\nBDMAP_00001781\nBDMAP_00001782\nBDMAP_00001783\nBDMAP_00001784\nBDMAP_00001785\nBDMAP_00001786\nBDMAP_00001787\nBDMAP_00001788\nBDMAP_00001789\nBDMAP_00001790\nBDMAP_00001791\nBDMAP_00001792\nBDMAP_00001793\nBDMAP_00001794\nBDMAP_00001795\nBDMAP_00001796\nBDMAP_00001797\nBDMAP_00001798\nBDMAP_00001799\nBDMAP_00001800\nBDMAP_00001801\nBDMAP_00001802\nBDMAP_00001803\nBDMAP_00001804\nBDMAP_00001805\nBDMAP_00001806\nBDMAP_00001807\nBDMAP_00001808\nBDMAP_00001809\nBDMAP_00001810\nBDMAP_00001811\nBDMAP_00001812\nBDMAP_00001813\nBDMAP_00001814\nBDMAP_00001815\nBDMAP_00001816\nBDMAP_00001817\nBDMAP_00001818\nBDMAP_00001819\nBDMAP_00001820\nBDMAP_00001821\nBDMAP_00001822\nBDMAP_00001823\nBDMAP_00001824\nBDMAP_00001825\nBDMAP_00001826\nBDMAP_00001827\nBDMAP_00001828\nBDMAP_00001829\nBDMAP_00001830\nBDMAP_00001831\nBDMAP_00001832\nBDMAP_00001833\nBDMAP_00001834\nBDMAP_00001835\nBDMAP_00001836\nBDMAP_00001837\nBDMAP_00001838\nBDMAP_00001839\nBDMAP_00001840\nBDMAP_00001841\nBDMAP_00001842\nBDMAP_00001843\nBDMAP_00001844\nBDMAP_00001845\nBDMAP_00001846\nBDMAP_00001847\nBDMAP_00001848\nBDMAP_00001849\nBDMAP_00001850\nBDMAP_00001851\nBDMAP_00001852\nBDMAP_00001853\nBDMAP_00001854\nBDMAP_00001855\nBDMAP_00001856\nBDMAP_00001857\nBDMAP_00001858\nBDMAP_00001859\nBDMAP_00001860\nBDMAP_00001861\nBDMAP_00001862\nBDMAP_00001863\nBDMAP_00001864\nBDMAP_00001865\nBDMAP_00001866\nBDMAP_00001867\nBDMAP_00001868\nBDMAP_00001869\nBDMAP_00001870\nBDMAP_00001871\nBDMAP_00001872\nBDMAP_00001873\nBDMAP_00001874\nBDMAP_00001875\nBDMAP_00001876\nBDMAP_00001877\nBDMAP_00001878\nBDMAP_00001879\nBDMAP_00001880\nBDMAP_00001881\nBDMAP_00001882\nBDMAP_00001883\nBDMAP_00001884\nBDMAP_00001885\nBDMAP_00001886\nBDMAP_00001887\nBDMAP_00001888\nBDMAP_00001889\nBDMAP_00001890\nBDMAP_00001891\nBDMAP_00001892\nBDMAP_00001893\nBDMAP_00001894\nBDMAP_00001895\nBDMAP_00001896\nBDMAP_00001897\nBDMAP_00001898\nBDMAP_00001899\nBDMAP_00001900\nBDMAP_00001901\nBDMAP_00001902\nBDMAP_00001903\nBDMAP_00001904\nBDMAP_00001905\nBDMAP_00001906\nBDMAP_00001907\nBDMAP_00001908\nBDMAP_00001909\nBDMAP_00001910\nBDMAP_00001911\nBDMAP_00001912\nBDMAP_00001913\nBDMAP_00001914\nBDMAP_00001915\nBDMAP_00001916\nBDMAP_00001917\nBDMAP_00001918\nBDMAP_00001919\nBDMAP_00001920\nBDMAP_00001921\nBDMAP_00001922\nBDMAP_00001923\nBDMAP_00001924\nBDMAP_00001925\nBDMAP_00001926\nBDMAP_00001927\nBDMAP_00001928\nBDMAP_00001929\nBDMAP_00001930\nBDMAP_00001931\nBDMAP_00001932\nBDMAP_00001933\nBDMAP_00001934\nBDMAP_00001935\nBDMAP_00001936\nBDMAP_00001937\nBDMAP_00001938\nBDMAP_00001939\nBDMAP_00001940\nBDMAP_00001941\nBDMAP_00001942\nBDMAP_00001943\nBDMAP_00001944\nBDMAP_00001945\nBDMAP_00001946\nBDMAP_00001947\nBDMAP_00001948\nBDMAP_00001949\nBDMAP_00001950\nBDMAP_00001951\nBDMAP_00001952\nBDMAP_00001953\nBDMAP_00001954\nBDMAP_00001955\nBDMAP_00001956\nBDMAP_00001957\nBDMAP_00001958\nBDMAP_00001959\nBDMAP_00001960\nBDMAP_00001961\nBDMAP_00001962\nBDMAP_00001963\nBDMAP_00001964\nBDMAP_00001965\nBDMAP_00001966\nBDMAP_00001967\nBDMAP_00001968\nBDMAP_00001969\nBDMAP_00001970\nBDMAP_00001971\nBDMAP_00001972\nBDMAP_00001973\nBDMAP_00001974\nBDMAP_00001975\nBDMAP_00001976\nBDMAP_00001977\nBDMAP_00001978\nBDMAP_00001979\nBDMAP_00001980\nBDMAP_00001981\nBDMAP_00001982\nBDMAP_00001983\nBDMAP_00001984\nBDMAP_00001985\nBDMAP_00001986\nBDMAP_00001987\nBDMAP_00001988\nBDMAP_00001989\nBDMAP_00001990\nBDMAP_00001991\nBDMAP_00001992\nBDMAP_00001993\nBDMAP_00001994\nBDMAP_00001995\nBDMAP_00001996\nBDMAP_00001997\nBDMAP_00001998\nBDMAP_00001999\nBDMAP_00002000\nBDMAP_00002001\nBDMAP_00002002\nBDMAP_00002003\nBDMAP_00002004\nBDMAP_00002005\nBDMAP_00002006\nBDMAP_00002007\nBDMAP_00002008\nBDMAP_00002009\nBDMAP_00002010\nBDMAP_00002011\nBDMAP_00002012\nBDMAP_00002013\nBDMAP_00002014\nBDMAP_00002015\nBDMAP_00002016\nBDMAP_00002017\nBDMAP_00002018\nBDMAP_00002019\nBDMAP_00002020\nBDMAP_00002021\nBDMAP_00002022\nBDMAP_00002023\nBDMAP_00002024\nBDMAP_00002025\nBDMAP_00002026\nBDMAP_00002027\nBDMAP_00002028\nBDMAP_00002029\nBDMAP_00002030\nBDMAP_00002031\nBDMAP_00002032\nBDMAP_00002033\nBDMAP_00002034\nBDMAP_00002035\nBDMAP_00002036\nBDMAP_00002037\nBDMAP_00002038\nBDMAP_00002039\nBDMAP_00002040\nBDMAP_00002041\nBDMAP_00002042\nBDMAP_00002043\nBDMAP_00002044\nBDMAP_00002045\nBDMAP_00002046\nBDMAP_00002047\nBDMAP_00002048\nBDMAP_00002049\nBDMAP_00002050\nBDMAP_00002051\nBDMAP_00002052\nBDMAP_00002053\nBDMAP_00002054\nBDMAP_00002055\nBDMAP_00002056\nBDMAP_00002057\nBDMAP_00002058\nBDMAP_00002059\nBDMAP_00002060\nBDMAP_00002061\nBDMAP_00002062\nBDMAP_00002063\nBDMAP_00002064\nBDMAP_00002065\nBDMAP_00002066\nBDMAP_00002067\nBDMAP_00002068\nBDMAP_00002069\nBDMAP_00002070\nBDMAP_00002071\nBDMAP_00002072\nBDMAP_00002073\nBDMAP_00002074\nBDMAP_00002075\nBDMAP_00002076\nBDMAP_00002077\nBDMAP_00002078\nBDMAP_00002079\nBDMAP_00002080\nBDMAP_00002081\nBDMAP_00002082\nBDMAP_00002083\nBDMAP_00002084\nBDMAP_00002085\nBDMAP_00002086\nBDMAP_00002087\nBDMAP_00002088\nBDMAP_00002089\nBDMAP_00002090\nBDMAP_00002091\nBDMAP_00002092\nBDMAP_00002093\nBDMAP_00002094\nBDMAP_00002095\nBDMAP_00002096\nBDMAP_00002097\nBDMAP_00002098\nBDMAP_00002099\nBDMAP_00002100\nBDMAP_00002101\nBDMAP_00002102\nBDMAP_00002103\nBDMAP_00002104\nBDMAP_00002105\nBDMAP_00002106\nBDMAP_00002107\nBDMAP_00002108\nBDMAP_00002109\nBDMAP_00002110\nBDMAP_00002111\nBDMAP_00002112\nBDMAP_00002113\nBDMAP_00002114\nBDMAP_00002115\nBDMAP_00002116\nBDMAP_00002117\nBDMAP_00002118\nBDMAP_00002119\nBDMAP_00002120\nBDMAP_00002121\nBDMAP_00002122\nBDMAP_00002123\nBDMAP_00002124\nBDMAP_00002125\nBDMAP_00002126\nBDMAP_00002127\nBDMAP_00002128\nBDMAP_00002129\nBDMAP_00002130\nBDMAP_00002131\nBDMAP_00002132\nBDMAP_00002133\nBDMAP_00002134\nBDMAP_00002135\nBDMAP_00002136\nBDMAP_00002137\nBDMAP_00002138\nBDMAP_00002139\nBDMAP_00002140\nBDMAP_00002141\nBDMAP_00002142\nBDMAP_00002143\nBDMAP_00002144\nBDMAP_00002145\nBDMAP_00002146\nBDMAP_00002147\nBDMAP_00002148\nBDMAP_00002149\nBDMAP_00002150\nBDMAP_00002151\nBDMAP_00002152\nBDMAP_00002153\nBDMAP_00002154\nBDMAP_00002155\nBDMAP_00002156\nBDMAP_00002157\nBDMAP_00002158\nBDMAP_00002159\nBDMAP_00002160\nBDMAP_00002161\nBDMAP_00002162\nBDMAP_00002163\nBDMAP_00002164\nBDMAP_00002165\nBDMAP_00002166\nBDMAP_00002167\nBDMAP_00002168\nBDMAP_00002169\nBDMAP_00002170\nBDMAP_00002171\nBDMAP_00002172\nBDMAP_00002173\nBDMAP_00002174\nBDMAP_00002175\nBDMAP_00002176\nBDMAP_00002177\nBDMAP_00002178\nBDMAP_00002179\nBDMAP_00002180\nBDMAP_00002181\nBDMAP_00002182\nBDMAP_00002183\nBDMAP_00002184\nBDMAP_00002185\nBDMAP_00002186\nBDMAP_00002187\nBDMAP_00002188\nBDMAP_00002189\nBDMAP_00002190\nBDMAP_00002191\nBDMAP_00002192\nBDMAP_00002193\nBDMAP_00002194\nBDMAP_00002195\nBDMAP_00002196\nBDMAP_00002197\nBDMAP_00002198\nBDMAP_00002199\nBDMAP_00002200\nBDMAP_00002201\nBDMAP_00002202\nBDMAP_00002203\nBDMAP_00002204\nBDMAP_00002205\nBDMAP_00002206\nBDMAP_00002207\nBDMAP_00002208\nBDMAP_00002209\nBDMAP_00002210\nBDMAP_00002211\nBDMAP_00002212\nBDMAP_00002213\nBDMAP_00002214\nBDMAP_00002215\nBDMAP_00002216\nBDMAP_00002217\nBDMAP_00002218\nBDMAP_00002219\nBDMAP_00002220\nBDMAP_00002221\nBDMAP_00002222\nBDMAP_00002223\nBDMAP_00002224\nBDMAP_00002225\nBDMAP_00002226\nBDMAP_00002227\nBDMAP_00002228\nBDMAP_00002229\nBDMAP_00002230\nBDMAP_00002231\nBDMAP_00002232\nBDMAP_00002233\nBDMAP_00002234\nBDMAP_00002235\nBDMAP_00002236\nBDMAP_00002237\nBDMAP_00002238\nBDMAP_00002239\nBDMAP_00002240\nBDMAP_00002241\nBDMAP_00002242\nBDMAP_00002243\nBDMAP_00002244\nBDMAP_00002245\nBDMAP_00002246\nBDMAP_00002247\nBDMAP_00002248\nBDMAP_00002249\nBDMAP_00002250\nBDMAP_00002251\nBDMAP_00002252\nBDMAP_00002253\nBDMAP_00002254\nBDMAP_00002255\nBDMAP_00002256\nBDMAP_00002257\nBDMAP_00002258\nBDMAP_00002259\nBDMAP_00002260\nBDMAP_00002261\nBDMAP_00002262\nBDMAP_00002263\nBDMAP_00002264\nBDMAP_00002265\nBDMAP_00002266\nBDMAP_00002267\nBDMAP_00002268\nBDMAP_00002269\nBDMAP_00002270\nBDMAP_00002271\nBDMAP_00002272\nBDMAP_00002273\nBDMAP_00002274\nBDMAP_00002275\nBDMAP_00002276\nBDMAP_00002277\nBDMAP_00002278\nBDMAP_00002279\nBDMAP_00002280\nBDMAP_00002281\nBDMAP_00002282\nBDMAP_00002283\nBDMAP_00002284\nBDMAP_00002285\nBDMAP_00002286\nBDMAP_00002287\nBDMAP_00002288\nBDMAP_00002289\nBDMAP_00002290\nBDMAP_00002291\nBDMAP_00002292\nBDMAP_00002293\nBDMAP_00002294\nBDMAP_00002295\nBDMAP_00002296\nBDMAP_00002297\nBDMAP_00002298\nBDMAP_00002299\nBDMAP_00002300\nBDMAP_00002301\nBDMAP_00002302\nBDMAP_00002303\nBDMAP_00002304\nBDMAP_00002305\nBDMAP_00002306\nBDMAP_00002307\nBDMAP_00002308\nBDMAP_00002309\nBDMAP_00002310\nBDMAP_00002311\nBDMAP_00002312\nBDMAP_00002313\nBDMAP_00002314\nBDMAP_00002315\nBDMAP_00002316\nBDMAP_00002317\nBDMAP_00002318\nBDMAP_00002319\nBDMAP_00002320\nBDMAP_00002321\nBDMAP_00002322\nBDMAP_00002323\nBDMAP_00002324\nBDMAP_00002325\nBDMAP_00002326\nBDMAP_00002327\nBDMAP_00002328\nBDMAP_00002329\nBDMAP_00002330\nBDMAP_00002331\nBDMAP_00002332\nBDMAP_00002333\nBDMAP_00002334\nBDMAP_00002335\nBDMAP_00002336\nBDMAP_00002337\nBDMAP_00002338\nBDMAP_00002339\nBDMAP_00002340\nBDMAP_00002341\nBDMAP_00002342\nBDMAP_00002343\nBDMAP_00002344\nBDMAP_00002345\nBDMAP_00002346\nBDMAP_00002347\nBDMAP_00002348\nBDMAP_00002349\nBDMAP_00002350\nBDMAP_00002351\nBDMAP_00002352\nBDMAP_00002353\nBDMAP_00002354\nBDMAP_00002355\nBDMAP_00002356\nBDMAP_00002357\nBDMAP_00002358\nBDMAP_00002359\nBDMAP_00002360\nBDMAP_00002361\nBDMAP_00002362\nBDMAP_00002363\nBDMAP_00002364\nBDMAP_00002365\nBDMAP_00002366\nBDMAP_00002367\nBDMAP_00002368\nBDMAP_00002369\nBDMAP_00002370\nBDMAP_00002371\nBDMAP_00002372\nBDMAP_00002373\nBDMAP_00002374\nBDMAP_00002375\nBDMAP_00002376\nBDMAP_00002377\nBDMAP_00002378\nBDMAP_00002379\nBDMAP_00002380\nBDMAP_00002381\nBDMAP_00002382\nBDMAP_00002383\nBDMAP_00002384\nBDMAP_00002385\nBDMAP_00002386\nBDMAP_00002387\nBDMAP_00002388\nBDMAP_00002389\nBDMAP_00002390\nBDMAP_00002391\nBDMAP_00002392\nBDMAP_00002393\nBDMAP_00002394\nBDMAP_00002395\nBDMAP_00002396\nBDMAP_00002397\nBDMAP_00002398\nBDMAP_00002399\nBDMAP_00002400\nBDMAP_00002401\nBDMAP_00002402\nBDMAP_00002403\nBDMAP_00002404\nBDMAP_00002405\nBDMAP_00002406\nBDMAP_00002407\nBDMAP_00002408\nBDMAP_00002409\nBDMAP_00002410\nBDMAP_00002411\nBDMAP_00002412\nBDMAP_00002413\nBDMAP_00002414\nBDMAP_00002415\nBDMAP_00002416\nBDMAP_00002417\nBDMAP_00002418\nBDMAP_00002419\nBDMAP_00002420\nBDMAP_00002421\nBDMAP_00002422\nBDMAP_00002423\nBDMAP_00002424\nBDMAP_00002425\nBDMAP_00002426\nBDMAP_00002427\nBDMAP_00002428\nBDMAP_00002429\nBDMAP_00002430\nBDMAP_00002431\nBDMAP_00002432\nBDMAP_00002433\nBDMAP_00002434\nBDMAP_00002435\nBDMAP_00002436\nBDMAP_00002437\nBDMAP_00002438\nBDMAP_00002439\nBDMAP_00002440\nBDMAP_00002441\nBDMAP_00002442\nBDMAP_00002443\nBDMAP_00002444\nBDMAP_00002445\nBDMAP_00002446\nBDMAP_00002447\nBDMAP_00002448\nBDMAP_00002449\nBDMAP_00002450\nBDMAP_00002451\nBDMAP_00002452\nBDMAP_00002453\nBDMAP_00002454\nBDMAP_00002455\nBDMAP_00002456\nBDMAP_00002457\nBDMAP_00002458\nBDMAP_00002459\nBDMAP_00002460\nBDMAP_00002461\nBDMAP_00002462\nBDMAP_00002463\nBDMAP_00002464\nBDMAP_00002465\nBDMAP_00002466\nBDMAP_00002467\nBDMAP_00002468\nBDMAP_00002469\nBDMAP_00002470\nBDMAP_00002471\nBDMAP_00002472\nBDMAP_00002473\nBDMAP_00002474\nBDMAP_00002475\nBDMAP_00002476\nBDMAP_00002477\nBDMAP_00002478\nBDMAP_00002479\nBDMAP_00002480\nBDMAP_00002481\nBDMAP_00002482\nBDMAP_00002483\nBDMAP_00002484\nBDMAP_00002485\nBDMAP_00002486\nBDMAP_00002487\nBDMAP_00002488\nBDMAP_00002489\nBDMAP_00002490\nBDMAP_00002491\nBDMAP_00002492\nBDMAP_00002493\nBDMAP_00002494\nBDMAP_00002495\nBDMAP_00002496\nBDMAP_00002497\nBDMAP_00002498\nBDMAP_00002499\nBDMAP_00002500\nBDMAP_00002501\nBDMAP_00002502\nBDMAP_00002503\nBDMAP_00002504\nBDMAP_00002505\nBDMAP_00002506\nBDMAP_00002507\nBDMAP_00002508\nBDMAP_00002509\nBDMAP_00002510\nBDMAP_00002511\nBDMAP_00002512\nBDMAP_00002513\nBDMAP_00002514\nBDMAP_00002515\nBDMAP_00002516\nBDMAP_00002517\nBDMAP_00002518\nBDMAP_00002519\nBDMAP_00002520\nBDMAP_00002521\nBDMAP_00002522\nBDMAP_00002523\nBDMAP_00002524\nBDMAP_00002525\nBDMAP_00002526\nBDMAP_00002527\nBDMAP_00002528\nBDMAP_00002529\nBDMAP_00002530\nBDMAP_00002531\nBDMAP_00002532\nBDMAP_00002533\nBDMAP_00002534\nBDMAP_00002535\nBDMAP_00002536\nBDMAP_00002537\nBDMAP_00002538\nBDMAP_00002539\nBDMAP_00002540\nBDMAP_00002541\nBDMAP_00002542\nBDMAP_00002543\nBDMAP_00002544\nBDMAP_00002545\nBDMAP_00002546\nBDMAP_00002547\nBDMAP_00002548\nBDMAP_00002549\nBDMAP_00002550\nBDMAP_00002551\nBDMAP_00002552\nBDMAP_00002553\nBDMAP_00002554\nBDMAP_00002555\nBDMAP_00002556\nBDMAP_00002557\nBDMAP_00002558\nBDMAP_00002559\nBDMAP_00002560\nBDMAP_00002561\nBDMAP_00002562\nBDMAP_00002563\nBDMAP_00002564\nBDMAP_00002565\nBDMAP_00002566\nBDMAP_00002567\nBDMAP_00002568\nBDMAP_00002569\nBDMAP_00002570\nBDMAP_00002571\nBDMAP_00002572\nBDMAP_00002573\nBDMAP_00002574\nBDMAP_00002575\nBDMAP_00002576\nBDMAP_00002577\nBDMAP_00002578\nBDMAP_00002579\nBDMAP_00002580\nBDMAP_00002581\nBDMAP_00002582\nBDMAP_00002583\nBDMAP_00002584\nBDMAP_00002585\nBDMAP_00002586\nBDMAP_00002587\nBDMAP_00002588\nBDMAP_00002589\nBDMAP_00002590\nBDMAP_00002591\nBDMAP_00002592\nBDMAP_00002593\nBDMAP_00002594\nBDMAP_00002595\nBDMAP_00002596\nBDMAP_00002597\nBDMAP_00002598\nBDMAP_00002599\nBDMAP_00002600\nBDMAP_00002601\nBDMAP_00002602\nBDMAP_00002603\nBDMAP_00002604\nBDMAP_00002605\nBDMAP_00002606\nBDMAP_00002607\nBDMAP_00002608\nBDMAP_00002609\nBDMAP_00002610\nBDMAP_00002611\nBDMAP_00002612\nBDMAP_00002613\nBDMAP_00002614\nBDMAP_00002615\nBDMAP_00002616\nBDMAP_00002617\nBDMAP_00002618\nBDMAP_00002619\nBDMAP_00002620\nBDMAP_00002621\nBDMAP_00002622\nBDMAP_00002623\nBDMAP_00002624\nBDMAP_00002625\nBDMAP_00002626\nBDMAP_00002627\nBDMAP_00002628\nBDMAP_00002629\nBDMAP_00002630\nBDMAP_00002631\nBDMAP_00002632\nBDMAP_00002633\nBDMAP_00002634\nBDMAP_00002635\nBDMAP_00002636\nBDMAP_00002637\nBDMAP_00002638\nBDMAP_00002639\nBDMAP_00002640\nBDMAP_00002641\nBDMAP_00002642\nBDMAP_00002643\nBDMAP_00002644\nBDMAP_00002645\nBDMAP_00002646\nBDMAP_00002647\nBDMAP_00002648\nBDMAP_00002649\nBDMAP_00002650\nBDMAP_00002651\nBDMAP_00002652\nBDMAP_00002653\nBDMAP_00002654\nBDMAP_00002655\nBDMAP_00002656\nBDMAP_00002657\nBDMAP_00002658\nBDMAP_00002659\nBDMAP_00002660\nBDMAP_00002661\nBDMAP_00002662\nBDMAP_00002663\nBDMAP_00002664\nBDMAP_00002665\nBDMAP_00002666\nBDMAP_00002667\nBDMAP_00002668\nBDMAP_00002669\nBDMAP_00002670\nBDMAP_00002671\nBDMAP_00002672\nBDMAP_00002673\nBDMAP_00002674\nBDMAP_00002675\nBDMAP_00002676\nBDMAP_00002677\nBDMAP_00002678\nBDMAP_00002679\nBDMAP_00002680\nBDMAP_00002681\nBDMAP_00002682\nBDMAP_00002683\nBDMAP_00002684\nBDMAP_00002685\nBDMAP_00002686\nBDMAP_00002687\nBDMAP_00002688\nBDMAP_00002689\nBDMAP_00002690\nBDMAP_00002691\nBDMAP_00002692\nBDMAP_00002693\nBDMAP_00002694\nBDMAP_00002695\nBDMAP_00002696\nBDMAP_00002697\nBDMAP_00002698\nBDMAP_00002699\nBDMAP_00002700\nBDMAP_00002701\nBDMAP_00002702\nBDMAP_00002703\nBDMAP_00002704\nBDMAP_00002705\nBDMAP_00002706\nBDMAP_00002707\nBDMAP_00002708\nBDMAP_00002709\nBDMAP_00002710\nBDMAP_00002711\nBDMAP_00002712\nBDMAP_00002713\nBDMAP_00002714\nBDMAP_00002715\nBDMAP_00002716\nBDMAP_00002717\nBDMAP_00002718\nBDMAP_00002719\nBDMAP_00002720\nBDMAP_00002721\nBDMAP_00002722\nBDMAP_00002723\nBDMAP_00002724\nBDMAP_00002725\nBDMAP_00002726\nBDMAP_00002727\nBDMAP_00002728\nBDMAP_00002729\nBDMAP_00002730\nBDMAP_00002731\nBDMAP_00002732\nBDMAP_00002733\nBDMAP_00002734\nBDMAP_00002735\nBDMAP_00002736\nBDMAP_00002737\nBDMAP_00002738\nBDMAP_00002739\nBDMAP_00002740\nBDMAP_00002741\nBDMAP_00002742\nBDMAP_00002743\nBDMAP_00002744\nBDMAP_00002745\nBDMAP_00002746\nBDMAP_00002747\nBDMAP_00002748\nBDMAP_00002749\nBDMAP_00002750\nBDMAP_00002751\nBDMAP_00002752\nBDMAP_00002753\nBDMAP_00002754\nBDMAP_00002755\nBDMAP_00002756\nBDMAP_00002757\nBDMAP_00002758\nBDMAP_00002759\nBDMAP_00002760\nBDMAP_00002761\nBDMAP_00002762\nBDMAP_00002763\nBDMAP_00002764\nBDMAP_00002765\nBDMAP_00002766\nBDMAP_00002767\nBDMAP_00002768\nBDMAP_00002769\nBDMAP_00002770\nBDMAP_00002771\nBDMAP_00002772\nBDMAP_00002773\nBDMAP_00002774\nBDMAP_00002775\nBDMAP_00002776\nBDMAP_00002777\nBDMAP_00002778\nBDMAP_00002779\nBDMAP_00002780\nBDMAP_00002781\nBDMAP_00002782\nBDMAP_00002783\nBDMAP_00002784\nBDMAP_00002785\nBDMAP_00002786\nBDMAP_00002787\nBDMAP_00002788\nBDMAP_00002789\nBDMAP_00002790\nBDMAP_00002791\nBDMAP_00002792\nBDMAP_00002793\nBDMAP_00002794\nBDMAP_00002795\nBDMAP_00002796\nBDMAP_00002797\nBDMAP_00002798\nBDMAP_00002799\nBDMAP_00002800\nBDMAP_00002801\nBDMAP_00002802\nBDMAP_00002803\nBDMAP_00002804\nBDMAP_00002805\nBDMAP_00002806\nBDMAP_00002807\nBDMAP_00002808\nBDMAP_00002809\nBDMAP_00002810\nBDMAP_00002811\nBDMAP_00002812\nBDMAP_00002813\nBDMAP_00002814\nBDMAP_00002815\nBDMAP_00002816\nBDMAP_00002817\nBDMAP_00002818\nBDMAP_00002819\nBDMAP_00002820\nBDMAP_00002821\nBDMAP_00002822\nBDMAP_00002823\nBDMAP_00002824\nBDMAP_00002825\nBDMAP_00002826\nBDMAP_00002827\nBDMAP_00002828\nBDMAP_00002829\nBDMAP_00002830\nBDMAP_00002831\nBDMAP_00002832\nBDMAP_00002833\nBDMAP_00002834\nBDMAP_00002835\nBDMAP_00002836\nBDMAP_00002837\nBDMAP_00002838\nBDMAP_00002839\nBDMAP_00002840\nBDMAP_00002841\nBDMAP_00002842\nBDMAP_00002843\nBDMAP_00002844\nBDMAP_00002845\nBDMAP_00002846\nBDMAP_00002847\nBDMAP_00002848\nBDMAP_00002849\nBDMAP_00002850\nBDMAP_00002851\nBDMAP_00002852\nBDMAP_00002853\nBDMAP_00002854\nBDMAP_00002855\nBDMAP_00002856\nBDMAP_00002857\nBDMAP_00002858\nBDMAP_00002859\nBDMAP_00002860\nBDMAP_00002861\nBDMAP_00002862\nBDMAP_00002863\nBDMAP_00002864\nBDMAP_00002865\nBDMAP_00002866\nBDMAP_00002867\nBDMAP_00002868\nBDMAP_00002869\nBDMAP_00002870\nBDMAP_00002871\nBDMAP_00002872\nBDMAP_00002873\nBDMAP_00002874\nBDMAP_00002875\nBDMAP_00002876\nBDMAP_00002877\nBDMAP_00002878\nBDMAP_00002879\nBDMAP_00002880\nBDMAP_00002881\nBDMAP_00002882\nBDMAP_00002883\nBDMAP_00002884\nBDMAP_00002885\nBDMAP_00002886\nBDMAP_00002887\nBDMAP_00002888\nBDMAP_00002889\nBDMAP_00002890\nBDMAP_00002891\nBDMAP_00002892\nBDMAP_00002893\nBDMAP_00002894\nBDMAP_00002895\nBDMAP_00002896\nBDMAP_00002897\nBDMAP_00002898\nBDMAP_00002899\nBDMAP_00002900\nBDMAP_00002901\nBDMAP_00002902\nBDMAP_00002903\nBDMAP_00002904\nBDMAP_00002905\nBDMAP_00002906\nBDMAP_00002907\nBDMAP_00002908\nBDMAP_00002909\nBDMAP_00002910\nBDMAP_00002911\nBDMAP_00002912\nBDMAP_00002913\nBDMAP_00002914\nBDMAP_00002915\nBDMAP_00002916\nBDMAP_00002917\nBDMAP_00002918\nBDMAP_00002919\nBDMAP_00002920\nBDMAP_00002921\nBDMAP_00002922\nBDMAP_00002923\nBDMAP_00002924\nBDMAP_00002925\nBDMAP_00002926\nBDMAP_00002927\nBDMAP_00002928\nBDMAP_00002929\nBDMAP_00002930\nBDMAP_00002931\nBDMAP_00002932\nBDMAP_00002933\nBDMAP_00002934\nBDMAP_00002935\nBDMAP_00002936\nBDMAP_00002937\nBDMAP_00002938\nBDMAP_00002939\nBDMAP_00002940\nBDMAP_00002941\nBDMAP_00002942\nBDMAP_00002943\nBDMAP_00002944\nBDMAP_00002945\nBDMAP_00002946\nBDMAP_00002947\nBDMAP_00002948\nBDMAP_00002949\nBDMAP_00002950\nBDMAP_00002951\nBDMAP_00002952\nBDMAP_00002953\nBDMAP_00002954\nBDMAP_00002955\nBDMAP_00002956\nBDMAP_00002957\nBDMAP_00002958\nBDMAP_00002959\nBDMAP_00002960\nBDMAP_00002961\nBDMAP_00002962\nBDMAP_00002963\nBDMAP_00002964\nBDMAP_00002965\nBDMAP_00002966\nBDMAP_00002967\nBDMAP_00002968\nBDMAP_00002969\nBDMAP_00002970\nBDMAP_00002971\nBDMAP_00002972\nBDMAP_00002973\nBDMAP_00002974\nBDMAP_00002975\nBDMAP_00002976\nBDMAP_00002977\nBDMAP_00002978\nBDMAP_00002979\nBDMAP_00002980\nBDMAP_00002981\nBDMAP_00002982\nBDMAP_00002983\nBDMAP_00002984\nBDMAP_00002985\nBDMAP_00002986\nBDMAP_00002987\nBDMAP_00002988\nBDMAP_00002989\nBDMAP_00002990\nBDMAP_00002991\nBDMAP_00002992\nBDMAP_00002993\nBDMAP_00002994\nBDMAP_00002995\nBDMAP_00002996\nBDMAP_00002997\nBDMAP_00002998\nBDMAP_00002999\nBDMAP_00003000\nBDMAP_00003001\nBDMAP_00003002\nBDMAP_00003003\nBDMAP_00003004\nBDMAP_00003005\nBDMAP_00003006\nBDMAP_00003007\nBDMAP_00003008\nBDMAP_00003009\nBDMAP_00003010\nBDMAP_00003011\nBDMAP_00003012\nBDMAP_00003013\nBDMAP_00003014\nBDMAP_00003015\nBDMAP_00003016\nBDMAP_00003017\nBDMAP_00003018\nBDMAP_00003019\nBDMAP_00003020\nBDMAP_00003021\nBDMAP_00003022\nBDMAP_00003023\nBDMAP_00003024\nBDMAP_00003025\nBDMAP_00003026\nBDMAP_00003027\nBDMAP_00003028\nBDMAP_00003029\nBDMAP_00003030\nBDMAP_00003031\nBDMAP_00003032\nBDMAP_00003033\nBDMAP_00003034\nBDMAP_00003035\nBDMAP_00003036\nBDMAP_00003037\nBDMAP_00003038\nBDMAP_00003039\nBDMAP_00003040\nBDMAP_00003041\nBDMAP_00003042\nBDMAP_00003043\nBDMAP_00003044\nBDMAP_00003045\nBDMAP_00003046\nBDMAP_00003047\nBDMAP_00003048\nBDMAP_00003049\nBDMAP_00003050\nBDMAP_00003051\nBDMAP_00003052\nBDMAP_00003053\nBDMAP_00003054\nBDMAP_00003055\nBDMAP_00003056\nBDMAP_00003057\nBDMAP_00003058\nBDMAP_00003059\nBDMAP_00003060\nBDMAP_00003061\nBDMAP_00003062\nBDMAP_00003063\nBDMAP_00003064\nBDMAP_00003065\nBDMAP_00003066\nBDMAP_00003067\nBDMAP_00003068\nBDMAP_00003069\nBDMAP_00003070\nBDMAP_00003071\nBDMAP_00003072\nBDMAP_00003073\nBDMAP_00003074\nBDMAP_00003075\nBDMAP_00003076\nBDMAP_00003077\nBDMAP_00003078\nBDMAP_00003079\nBDMAP_00003080\nBDMAP_00003081\nBDMAP_00003082\nBDMAP_00003083\nBDMAP_00003084\nBDMAP_00003085\nBDMAP_00003086\nBDMAP_00003087\nBDMAP_00003088\nBDMAP_00003089\nBDMAP_00003090\nBDMAP_00003091\nBDMAP_00003092\nBDMAP_00003093\nBDMAP_00003094\nBDMAP_00003095\nBDMAP_00003096\nBDMAP_00003097\nBDMAP_00003098\nBDMAP_00003099\nBDMAP_00003100\nBDMAP_00003101\nBDMAP_00003102\nBDMAP_00003103\nBDMAP_00003104\nBDMAP_00003105\nBDMAP_00003106\nBDMAP_00003107\nBDMAP_00003108\nBDMAP_00003109\nBDMAP_00003110\nBDMAP_00003111\nBDMAP_00003112\nBDMAP_00003113\nBDMAP_00003114\nBDMAP_00003115\nBDMAP_00003116\nBDMAP_00003117\nBDMAP_00003118\nBDMAP_00003119\nBDMAP_00003120\nBDMAP_00003121\nBDMAP_00003122\nBDMAP_00003123\nBDMAP_00003124\nBDMAP_00003125\nBDMAP_00003126\nBDMAP_00003127\nBDMAP_00003128\nBDMAP_00003129\nBDMAP_00003130\nBDMAP_00003131\nBDMAP_00003132\nBDMAP_00003133\nBDMAP_00003134\nBDMAP_00003135\nBDMAP_00003136\nBDMAP_00003137\nBDMAP_00003138\nBDMAP_00003139\nBDMAP_00003140\nBDMAP_00003141\nBDMAP_00003142\nBDMAP_00003143\nBDMAP_00003144\nBDMAP_00003145\nBDMAP_00003146\nBDMAP_00003147\nBDMAP_00003148\nBDMAP_00003149\nBDMAP_00003150\nBDMAP_00003151\nBDMAP_00003152\nBDMAP_00003153\nBDMAP_00003154\nBDMAP_00003155\nBDMAP_00003156\nBDMAP_00003157\nBDMAP_00003158\nBDMAP_00003159\nBDMAP_00003160\nBDMAP_00003161\nBDMAP_00003162\nBDMAP_00003163\nBDMAP_00003164\nBDMAP_00003165\nBDMAP_00003166\nBDMAP_00003167\nBDMAP_00003168\nBDMAP_00003169\nBDMAP_00003170\nBDMAP_00003171\nBDMAP_00003172\nBDMAP_00003173\nBDMAP_00003174\nBDMAP_00003175\nBDMAP_00003176\nBDMAP_00003177\nBDMAP_00003178\nBDMAP_00003179\nBDMAP_00003180\nBDMAP_00003181\nBDMAP_00003182\nBDMAP_00003183\nBDMAP_00003184\nBDMAP_00003185\nBDMAP_00003186\nBDMAP_00003187\nBDMAP_00003188\nBDMAP_00003189\nBDMAP_00003190\nBDMAP_00003191\nBDMAP_00003192\nBDMAP_00003193\nBDMAP_00003194\nBDMAP_00003195\nBDMAP_00003196\nBDMAP_00003197\nBDMAP_00003198\nBDMAP_00003199\nBDMAP_00003200\nBDMAP_00003201\nBDMAP_00003202\nBDMAP_00003203\nBDMAP_00003204\nBDMAP_00003205\nBDMAP_00003206\nBDMAP_00003207\nBDMAP_00003208\nBDMAP_00003209\nBDMAP_00003210\nBDMAP_00003211\nBDMAP_00003212\nBDMAP_00003213\nBDMAP_00003214\nBDMAP_00003215\nBDMAP_00003216\nBDMAP_00003217\nBDMAP_00003218\nBDMAP_00003219\nBDMAP_00003220\nBDMAP_00003221\nBDMAP_00003222\nBDMAP_00003223\nBDMAP_00003224\nBDMAP_00003225\nBDMAP_00003226\nBDMAP_00003227\nBDMAP_00003228\nBDMAP_00003229\nBDMAP_00003230\nBDMAP_00003231\nBDMAP_00003232\nBDMAP_00003233\nBDMAP_00003234\nBDMAP_00003235\nBDMAP_00003236\nBDMAP_00003237\nBDMAP_00003238\nBDMAP_00003239\nBDMAP_00003240\nBDMAP_00003241\nBDMAP_00003242\nBDMAP_00003243\nBDMAP_00003244\nBDMAP_00003245\nBDMAP_00003246\nBDMAP_00003247\nBDMAP_00003248\nBDMAP_00003249\nBDMAP_00003250\nBDMAP_00003251\nBDMAP_00003252\nBDMAP_00003253\nBDMAP_00003254\nBDMAP_00003255\nBDMAP_00003256\nBDMAP_00003257\nBDMAP_00003258\nBDMAP_00003259\nBDMAP_00003260\nBDMAP_00003261\nBDMAP_00003262\nBDMAP_00003263\nBDMAP_00003264\nBDMAP_00003265\nBDMAP_00003266\nBDMAP_00003267\nBDMAP_00003268\nBDMAP_00003269\nBDMAP_00003270\nBDMAP_00003271\nBDMAP_00003272\nBDMAP_00003273\nBDMAP_00003274\nBDMAP_00003275\nBDMAP_00003276\nBDMAP_00003277\nBDMAP_00003278\nBDMAP_00003279\nBDMAP_00003280\nBDMAP_00003281\nBDMAP_00003282\nBDMAP_00003283\nBDMAP_00003284\nBDMAP_00003285\nBDMAP_00003286\nBDMAP_00003287\nBDMAP_00003288\nBDMAP_00003289\nBDMAP_00003290\nBDMAP_00003291\nBDMAP_00003292\nBDMAP_00003293\nBDMAP_00003294\nBDMAP_00003295\nBDMAP_00003296\nBDMAP_00003297\nBDMAP_00003298\nBDMAP_00003299\nBDMAP_00003300\nBDMAP_00003301\nBDMAP_00003302\nBDMAP_00003303\nBDMAP_00003304\nBDMAP_00003305\nBDMAP_00003306\nBDMAP_00003307\nBDMAP_00003308\nBDMAP_00003309\nBDMAP_00003310\nBDMAP_00003311\nBDMAP_00003312\nBDMAP_00003313\nBDMAP_00003314\nBDMAP_00003315\nBDMAP_00003316\nBDMAP_00003317\nBDMAP_00003318\nBDMAP_00003319\nBDMAP_00003320\nBDMAP_00003321\nBDMAP_00003322\nBDMAP_00003323\nBDMAP_00003324\nBDMAP_00003325\nBDMAP_00003326\nBDMAP_00003327\nBDMAP_00003328\nBDMAP_00003329\nBDMAP_00003330\nBDMAP_00003331\nBDMAP_00003332\nBDMAP_00003333\nBDMAP_00003334\nBDMAP_00003335\nBDMAP_00003336\nBDMAP_00003337\nBDMAP_00003338\nBDMAP_00003339\nBDMAP_00003340\nBDMAP_00003341\nBDMAP_00003342\nBDMAP_00003343\nBDMAP_00003344\nBDMAP_00003345\nBDMAP_00003346\nBDMAP_00003347\nBDMAP_00003348\nBDMAP_00003349\nBDMAP_00003350\nBDMAP_00003351\nBDMAP_00003352\nBDMAP_00003353\nBDMAP_00003354\nBDMAP_00003355\nBDMAP_00003356\nBDMAP_00003357\nBDMAP_00003358\nBDMAP_00003359\nBDMAP_00003360\nBDMAP_00003361\nBDMAP_00003362\nBDMAP_00003363\nBDMAP_00003364\nBDMAP_00003365\nBDMAP_00003366\nBDMAP_00003367\nBDMAP_00003368\nBDMAP_00003369\nBDMAP_00003370\nBDMAP_00003371\nBDMAP_00003372\nBDMAP_00003373\nBDMAP_00003374\nBDMAP_00003375\nBDMAP_00003376\nBDMAP_00003377\nBDMAP_00003378\nBDMAP_00003379\nBDMAP_00003380\nBDMAP_00003381\nBDMAP_00003382\nBDMAP_00003383\nBDMAP_00003384\nBDMAP_00003385\nBDMAP_00003386\nBDMAP_00003387\nBDMAP_00003388\nBDMAP_00003389\nBDMAP_00003390\nBDMAP_00003391\nBDMAP_00003392\nBDMAP_00003393\nBDMAP_00003394\nBDMAP_00003395\nBDMAP_00003396\nBDMAP_00003397\nBDMAP_00003398\nBDMAP_00003399\nBDMAP_00003400\nBDMAP_00003401\nBDMAP_00003402\nBDMAP_00003403\nBDMAP_00003404\nBDMAP_00003405\nBDMAP_00003406\nBDMAP_00003407\nBDMAP_00003408\nBDMAP_00003409\nBDMAP_00003410\nBDMAP_00003411\nBDMAP_00003412\nBDMAP_00003413\nBDMAP_00003414\nBDMAP_00003415\nBDMAP_00003416\nBDMAP_00003417\nBDMAP_00003418\nBDMAP_00003419\nBDMAP_00003420\nBDMAP_00003421\nBDMAP_00003422\nBDMAP_00003423\nBDMAP_00003424\nBDMAP_00003425\nBDMAP_00003426\nBDMAP_00003427\nBDMAP_00003428\nBDMAP_00003429\nBDMAP_00003430\nBDMAP_00003431\nBDMAP_00003432\nBDMAP_00003433\nBDMAP_00003434\nBDMAP_00003435\nBDMAP_00003436\nBDMAP_00003437\nBDMAP_00003438\nBDMAP_00003439\nBDMAP_00003440\nBDMAP_00003441\nBDMAP_00003442\nBDMAP_00003443\nBDMAP_00003444\nBDMAP_00003445\nBDMAP_00003446\nBDMAP_00003447\nBDMAP_00003448\nBDMAP_00003449\nBDMAP_00003450\nBDMAP_00003451\nBDMAP_00003452\nBDMAP_00003453\nBDMAP_00003454\nBDMAP_00003455\nBDMAP_00003456\nBDMAP_00003457\nBDMAP_00003458\nBDMAP_00003459\nBDMAP_00003460\nBDMAP_00003461\nBDMAP_00003462\nBDMAP_00003463\nBDMAP_00003464\nBDMAP_00003465\nBDMAP_00003466\nBDMAP_00003467\nBDMAP_00003468\nBDMAP_00003469\nBDMAP_00003470\nBDMAP_00003471\nBDMAP_00003472\nBDMAP_00003473\nBDMAP_00003474\nBDMAP_00003475\nBDMAP_00003476\nBDMAP_00003477\nBDMAP_00003478\nBDMAP_00003479\nBDMAP_00003480\nBDMAP_00003481\nBDMAP_00003482\nBDMAP_00003483\nBDMAP_00003484\nBDMAP_00003485\nBDMAP_00003486\nBDMAP_00003487\nBDMAP_00003488\nBDMAP_00003489\nBDMAP_00003490\nBDMAP_00003491\nBDMAP_00003492\nBDMAP_00003493\nBDMAP_00003494\nBDMAP_00003495\nBDMAP_00003496\nBDMAP_00003497\nBDMAP_00003498\nBDMAP_00003499\nBDMAP_00003500\nBDMAP_00003501\nBDMAP_00003502\nBDMAP_00003503\nBDMAP_00003504\nBDMAP_00003505\nBDMAP_00003506\nBDMAP_00003507\nBDMAP_00003508\nBDMAP_00003509\nBDMAP_00003510\nBDMAP_00003511\nBDMAP_00003512\nBDMAP_00003513\nBDMAP_00003514\nBDMAP_00003515\nBDMAP_00003516\nBDMAP_00003517\nBDMAP_00003518\nBDMAP_00003519\nBDMAP_00003520\nBDMAP_00003521\nBDMAP_00003522\nBDMAP_00003523\nBDMAP_00003524\nBDMAP_00003525\nBDMAP_00003526\nBDMAP_00003527\nBDMAP_00003528\nBDMAP_00003529\nBDMAP_00003530\nBDMAP_00003531\nBDMAP_00003532\nBDMAP_00003533\nBDMAP_00003534\nBDMAP_00003535\nBDMAP_00003536\nBDMAP_00003537\nBDMAP_00003538\nBDMAP_00003539\nBDMAP_00003540\nBDMAP_00003541\nBDMAP_00003542\nBDMAP_00003543\nBDMAP_00003544\nBDMAP_00003545\nBDMAP_00003546\nBDMAP_00003547\nBDMAP_00003548\nBDMAP_00003549\nBDMAP_00003550\nBDMAP_00003551\nBDMAP_00003552\nBDMAP_00003553\nBDMAP_00003554\nBDMAP_00003555\nBDMAP_00003556\nBDMAP_00003557\nBDMAP_00003558\nBDMAP_00003559\nBDMAP_00003560\nBDMAP_00003561\nBDMAP_00003562\nBDMAP_00003563\nBDMAP_00003564\nBDMAP_00003565\nBDMAP_00003566\nBDMAP_00003567\nBDMAP_00003568\nBDMAP_00003569\nBDMAP_00003570\nBDMAP_00003571\nBDMAP_00003572\nBDMAP_00003573\nBDMAP_00003574\nBDMAP_00003575\nBDMAP_00003576\nBDMAP_00003577\nBDMAP_00003578\nBDMAP_00003579\nBDMAP_00003580\nBDMAP_00003581\nBDMAP_00003582\nBDMAP_00003583\nBDMAP_00003584\nBDMAP_00003585\nBDMAP_00003586\nBDMAP_00003587\nBDMAP_00003588\nBDMAP_00003589\nBDMAP_00003590\nBDMAP_00003591\nBDMAP_00003592\nBDMAP_00003593\nBDMAP_00003594\nBDMAP_00003595\nBDMAP_00003596\nBDMAP_00003597\nBDMAP_00003598\nBDMAP_00003599\nBDMAP_00003600\nBDMAP_00003601\nBDMAP_00003602\nBDMAP_00003603\nBDMAP_00003604\nBDMAP_00003605\nBDMAP_00003606\nBDMAP_00003607\nBDMAP_00003608\nBDMAP_00003609\nBDMAP_00003610\nBDMAP_00003611\nBDMAP_00003612\nBDMAP_00003613\nBDMAP_00003614\nBDMAP_00003615\nBDMAP_00003616\nBDMAP_00003617\nBDMAP_00003618\nBDMAP_00003619\nBDMAP_00003620\nBDMAP_00003621\nBDMAP_00003622\nBDMAP_00003623\nBDMAP_00003624\nBDMAP_00003625\nBDMAP_00003626\nBDMAP_00003627\nBDMAP_00003628\nBDMAP_00003629\nBDMAP_00003630\nBDMAP_00003631\nBDMAP_00003632\nBDMAP_00003633\nBDMAP_00003634\nBDMAP_00003635\nBDMAP_00003636\nBDMAP_00003637\nBDMAP_00003638\nBDMAP_00003639\nBDMAP_00003640\nBDMAP_00003641\nBDMAP_00003642\nBDMAP_00003643\nBDMAP_00003644\nBDMAP_00003645\nBDMAP_00003646\nBDMAP_00003647\nBDMAP_00003648\nBDMAP_00003649\nBDMAP_00003650\nBDMAP_00003651\nBDMAP_00003652\nBDMAP_00003653\nBDMAP_00003654\nBDMAP_00003655\nBDMAP_00003656\nBDMAP_00003657\nBDMAP_00003658\nBDMAP_00003659\nBDMAP_00003660\nBDMAP_00003661\nBDMAP_00003662\nBDMAP_00003663\nBDMAP_00003664\nBDMAP_00003665\nBDMAP_00003666\nBDMAP_00003667\nBDMAP_00003668\nBDMAP_00003669\nBDMAP_00003670\nBDMAP_00003671\nBDMAP_00003672\nBDMAP_00003673\nBDMAP_00003674\nBDMAP_00003675\nBDMAP_00003676\nBDMAP_00003677\nBDMAP_00003678\nBDMAP_00003679\nBDMAP_00003680\nBDMAP_00003681\nBDMAP_00003682\nBDMAP_00003683\nBDMAP_00003684\nBDMAP_00003685\nBDMAP_00003686\nBDMAP_00003687\nBDMAP_00003688\nBDMAP_00003689\nBDMAP_00003690\nBDMAP_00003691\nBDMAP_00003692\nBDMAP_00003693\nBDMAP_00003694\nBDMAP_00003695\nBDMAP_00003696\nBDMAP_00003697\nBDMAP_00003698\nBDMAP_00003699\nBDMAP_00003700\nBDMAP_00003701\nBDMAP_00003702\nBDMAP_00003703\nBDMAP_00003704\nBDMAP_00003705\nBDMAP_00003706\nBDMAP_00003707\nBDMAP_00003708\nBDMAP_00003709\nBDMAP_00003710\nBDMAP_00003711\nBDMAP_00003712\nBDMAP_00003713\nBDMAP_00003714\nBDMAP_00003715\nBDMAP_00003716\nBDMAP_00003717\nBDMAP_00003718\nBDMAP_00003719\nBDMAP_00003720\nBDMAP_00003721\nBDMAP_00003722\nBDMAP_00003723\nBDMAP_00003724\nBDMAP_00003725\nBDMAP_00003726\nBDMAP_00003727\nBDMAP_00003728\nBDMAP_00003729\nBDMAP_00003730\nBDMAP_00003731\nBDMAP_00003732\nBDMAP_00003733\nBDMAP_00003734\nBDMAP_00003735\nBDMAP_00003736\nBDMAP_00003737\nBDMAP_00003738\nBDMAP_00003739\nBDMAP_00003740\nBDMAP_00003741\nBDMAP_00003742\nBDMAP_00003743\nBDMAP_00003744\nBDMAP_00003745\nBDMAP_00003746\nBDMAP_00003747\nBDMAP_00003748\nBDMAP_00003749\nBDMAP_00003750\nBDMAP_00003751\nBDMAP_00003752\nBDMAP_00003753\nBDMAP_00003754\nBDMAP_00003755\nBDMAP_00003756\nBDMAP_00003757\nBDMAP_00003758\nBDMAP_00003759\nBDMAP_00003760\nBDMAP_00003761\nBDMAP_00003762\nBDMAP_00003763\nBDMAP_00003764\nBDMAP_00003765\nBDMAP_00003766\nBDMAP_00003767\nBDMAP_00003768\nBDMAP_00003769\nBDMAP_00003770\nBDMAP_00003771\nBDMAP_00003772\nBDMAP_00003773\nBDMAP_00003774\nBDMAP_00003775\nBDMAP_00003776\nBDMAP_00003777\nBDMAP_00003778\nBDMAP_00003779\nBDMAP_00003780\nBDMAP_00003781\nBDMAP_00003782\nBDMAP_00003783\nBDMAP_00003784\nBDMAP_00003785\nBDMAP_00003786\nBDMAP_00003787\nBDMAP_00003788\nBDMAP_00003789\nBDMAP_00003790\nBDMAP_00003791\nBDMAP_00003792\nBDMAP_00003793\nBDMAP_00003794\nBDMAP_00003795\nBDMAP_00003796\nBDMAP_00003797\nBDMAP_00003798\nBDMAP_00003799\nBDMAP_00003800\nBDMAP_00003801\nBDMAP_00003802\nBDMAP_00003803\nBDMAP_00003804\nBDMAP_00003805\nBDMAP_00003806\nBDMAP_00003807\nBDMAP_00003808\nBDMAP_00003809\nBDMAP_00003810\nBDMAP_00003811\nBDMAP_00003812\nBDMAP_00003813\nBDMAP_00003814\nBDMAP_00003815\nBDMAP_00003816\nBDMAP_00003817\nBDMAP_00003818\nBDMAP_00003819\nBDMAP_00003820\nBDMAP_00003821\nBDMAP_00003822\nBDMAP_00003823\nBDMAP_00003824\nBDMAP_00003825\nBDMAP_00003826\nBDMAP_00003827\nBDMAP_00003828\nBDMAP_00003829\nBDMAP_00003830\nBDMAP_00003831\nBDMAP_00003832\nBDMAP_00003833\nBDMAP_00003834\nBDMAP_00003835\nBDMAP_00003836\nBDMAP_00003837\nBDMAP_00003838\nBDMAP_00003839\nBDMAP_00003840\nBDMAP_00003841\nBDMAP_00003842\nBDMAP_00003843\nBDMAP_00003844\nBDMAP_00003845\nBDMAP_00003846\nBDMAP_00003847\nBDMAP_00003848\nBDMAP_00003849\nBDMAP_00003850\nBDMAP_00003851\nBDMAP_00003852\nBDMAP_00003853\nBDMAP_00003854\nBDMAP_00003855\nBDMAP_00003856\nBDMAP_00003857\nBDMAP_00003858\nBDMAP_00003859\nBDMAP_00003860\nBDMAP_00003861\nBDMAP_00003862\nBDMAP_00003863\nBDMAP_00003864\nBDMAP_00003865\nBDMAP_00003866\nBDMAP_00003867\nBDMAP_00003868\nBDMAP_00003869\nBDMAP_00003870\nBDMAP_00003871\nBDMAP_00003872\nBDMAP_00003873\nBDMAP_00003874\nBDMAP_00003875\nBDMAP_00003876\nBDMAP_00003877\nBDMAP_00003878\nBDMAP_00003879\nBDMAP_00003880\nBDMAP_00003881\nBDMAP_00003882\nBDMAP_00003883\nBDMAP_00003884\nBDMAP_00003885\nBDMAP_00003886\nBDMAP_00003887\nBDMAP_00003888\nBDMAP_00003889\nBDMAP_00003890\nBDMAP_00003891\nBDMAP_00003892\nBDMAP_00003893\nBDMAP_00003894\nBDMAP_00003895\nBDMAP_00003896\nBDMAP_00003897\nBDMAP_00003898\nBDMAP_00003899\nBDMAP_00003900\nBDMAP_00003901\nBDMAP_00003902\nBDMAP_00003903\nBDMAP_00003904\nBDMAP_00003905\nBDMAP_00003906\nBDMAP_00003907\nBDMAP_00003908\nBDMAP_00003909\nBDMAP_00003910\nBDMAP_00003911\nBDMAP_00003912\nBDMAP_00003913\nBDMAP_00003914\nBDMAP_00003915\nBDMAP_00003916\nBDMAP_00003917\nBDMAP_00003918\nBDMAP_00003919\nBDMAP_00003920\nBDMAP_00003921\nBDMAP_00003922\nBDMAP_00003923\nBDMAP_00003924\nBDMAP_00003925\nBDMAP_00003926\nBDMAP_00003927\nBDMAP_00003928\nBDMAP_00003929\nBDMAP_00003930\nBDMAP_00003931\nBDMAP_00003932\nBDMAP_00003933\nBDMAP_00003934\nBDMAP_00003935\nBDMAP_00003936\nBDMAP_00003937\nBDMAP_00003938\nBDMAP_00003939\nBDMAP_00003940\nBDMAP_00003941\nBDMAP_00003942\nBDMAP_00003943\nBDMAP_00003944\nBDMAP_00003945\nBDMAP_00003946\nBDMAP_00003947\nBDMAP_00003948\nBDMAP_00003949\nBDMAP_00003950\nBDMAP_00003951\nBDMAP_00003952\nBDMAP_00003953\nBDMAP_00003954\nBDMAP_00003955\nBDMAP_00003956\nBDMAP_00003957\nBDMAP_00003958\nBDMAP_00003959\nBDMAP_00003960\nBDMAP_00003961\nBDMAP_00003962\nBDMAP_00003963\nBDMAP_00003964\nBDMAP_00003965\nBDMAP_00003966\nBDMAP_00003967\nBDMAP_00003968\nBDMAP_00003969\nBDMAP_00003970\nBDMAP_00003971\nBDMAP_00003972\nBDMAP_00003973\nBDMAP_00003974\nBDMAP_00003975\nBDMAP_00003976\nBDMAP_00003977\nBDMAP_00003978\nBDMAP_00003979\nBDMAP_00003980\nBDMAP_00003981\nBDMAP_00003982\nBDMAP_00003983\nBDMAP_00003984\nBDMAP_00003985\nBDMAP_00003986\nBDMAP_00003987\nBDMAP_00003988\nBDMAP_00003989\nBDMAP_00003990\nBDMAP_00003991\nBDMAP_00003992\nBDMAP_00003993\nBDMAP_00003994\nBDMAP_00003995\nBDMAP_00003996\nBDMAP_00003997\nBDMAP_00003998\nBDMAP_00003999\nBDMAP_00004000\nBDMAP_00004001\nBDMAP_00004002\nBDMAP_00004003\nBDMAP_00004004\nBDMAP_00004005\nBDMAP_00004006\nBDMAP_00004007\nBDMAP_00004008\nBDMAP_00004009\nBDMAP_00004010\nBDMAP_00004011\nBDMAP_00004012\nBDMAP_00004013\nBDMAP_00004014\nBDMAP_00004015\nBDMAP_00004016\nBDMAP_00004017\nBDMAP_00004018\nBDMAP_00004019\nBDMAP_00004020\nBDMAP_00004021\nBDMAP_00004022\nBDMAP_00004023\nBDMAP_00004024\nBDMAP_00004025\nBDMAP_00004026\nBDMAP_00004027\nBDMAP_00004028\nBDMAP_00004029\nBDMAP_00004030\nBDMAP_00004031\nBDMAP_00004032\nBDMAP_00004033\nBDMAP_00004034\nBDMAP_00004035\nBDMAP_00004036\nBDMAP_00004037\nBDMAP_00004038\nBDMAP_00004039\nBDMAP_00004040\nBDMAP_00004041\nBDMAP_00004042\nBDMAP_00004043\nBDMAP_00004044\nBDMAP_00004045\nBDMAP_00004046\nBDMAP_00004047\nBDMAP_00004048\nBDMAP_00004049\nBDMAP_00004050\nBDMAP_00004051\nBDMAP_00004052\nBDMAP_00004053\nBDMAP_00004054\nBDMAP_00004055\nBDMAP_00004056\nBDMAP_00004057\nBDMAP_00004058\nBDMAP_00004059\nBDMAP_00004060\nBDMAP_00004061\nBDMAP_00004062\nBDMAP_00004063\nBDMAP_00004064\nBDMAP_00004065\nBDMAP_00004066\nBDMAP_00004067\nBDMAP_00004068\nBDMAP_00004069\nBDMAP_00004070\nBDMAP_00004071\nBDMAP_00004072\nBDMAP_00004073\nBDMAP_00004074\nBDMAP_00004075\nBDMAP_00004076\nBDMAP_00004077\nBDMAP_00004078\nBDMAP_00004079\nBDMAP_00004080\nBDMAP_00004081\nBDMAP_00004082\nBDMAP_00004083\nBDMAP_00004084\nBDMAP_00004085\nBDMAP_00004086\nBDMAP_00004087\nBDMAP_00004088\nBDMAP_00004089\nBDMAP_00004090\nBDMAP_00004091\nBDMAP_00004092\nBDMAP_00004093\nBDMAP_00004094\nBDMAP_00004095\nBDMAP_00004096\nBDMAP_00004097\nBDMAP_00004098\nBDMAP_00004099\nBDMAP_00004100\nBDMAP_00004101\nBDMAP_00004102\nBDMAP_00004103\nBDMAP_00004104\nBDMAP_00004105\nBDMAP_00004106\nBDMAP_00004107\nBDMAP_00004108\nBDMAP_00004109\nBDMAP_00004110\nBDMAP_00004111\nBDMAP_00004112\nBDMAP_00004113\nBDMAP_00004114\nBDMAP_00004115\nBDMAP_00004116\nBDMAP_00004117\nBDMAP_00004118\nBDMAP_00004119\nBDMAP_00004120\nBDMAP_00004121\nBDMAP_00004122\nBDMAP_00004123\nBDMAP_00004124\nBDMAP_00004125\nBDMAP_00004126\nBDMAP_00004127\nBDMAP_00004128\nBDMAP_00004129\nBDMAP_00004130\nBDMAP_00004131\nBDMAP_00004132\nBDMAP_00004133\nBDMAP_00004134\nBDMAP_00004135\nBDMAP_00004136\nBDMAP_00004137\nBDMAP_00004138\nBDMAP_00004139\nBDMAP_00004140\nBDMAP_00004141\nBDMAP_00004142\nBDMAP_00004143\nBDMAP_00004144\nBDMAP_00004145\nBDMAP_00004146\nBDMAP_00004147\nBDMAP_00004148\nBDMAP_00004149\nBDMAP_00004150\nBDMAP_00004151\nBDMAP_00004152\nBDMAP_00004153\nBDMAP_00004154\nBDMAP_00004155\nBDMAP_00004156\nBDMAP_00004157\nBDMAP_00004158\nBDMAP_00004159\nBDMAP_00004160\nBDMAP_00004161\nBDMAP_00004162\nBDMAP_00004163\nBDMAP_00004164\nBDMAP_00004165\nBDMAP_00004166\nBDMAP_00004167\nBDMAP_00004168\nBDMAP_00004169\nBDMAP_00004170\nBDMAP_00004171\nBDMAP_00004172\nBDMAP_00004173\nBDMAP_00004174\nBDMAP_00004175\nBDMAP_00004176\nBDMAP_00004177\nBDMAP_00004178\nBDMAP_00004179\nBDMAP_00004180\nBDMAP_00004181\nBDMAP_00004182\nBDMAP_00004183\nBDMAP_00004184\nBDMAP_00004185\nBDMAP_00004186\nBDMAP_00004187\nBDMAP_00004188\nBDMAP_00004189\nBDMAP_00004190\nBDMAP_00004191\nBDMAP_00004192\nBDMAP_00004193\nBDMAP_00004194\nBDMAP_00004195\nBDMAP_00004196\nBDMAP_00004197\nBDMAP_00004198\nBDMAP_00004199\nBDMAP_00004200\nBDMAP_00004201\nBDMAP_00004202\nBDMAP_00004203\nBDMAP_00004204\nBDMAP_00004205\nBDMAP_00004206\nBDMAP_00004207\nBDMAP_00004208\nBDMAP_00004209\nBDMAP_00004210\nBDMAP_00004211\nBDMAP_00004212\nBDMAP_00004213\nBDMAP_00004214\nBDMAP_00004215\nBDMAP_00004216\nBDMAP_00004217\nBDMAP_00004218\nBDMAP_00004219\nBDMAP_00004220\nBDMAP_00004221\nBDMAP_00004222\nBDMAP_00004223\nBDMAP_00004224\nBDMAP_00004225\nBDMAP_00004226\nBDMAP_00004227\nBDMAP_00004228\nBDMAP_00004229\nBDMAP_00004230\nBDMAP_00004231\nBDMAP_00004232\nBDMAP_00004233\nBDMAP_00004234\nBDMAP_00004235\nBDMAP_00004236\nBDMAP_00004237\nBDMAP_00004238\nBDMAP_00004239\nBDMAP_00004240\nBDMAP_00004241\nBDMAP_00004242\nBDMAP_00004243\nBDMAP_00004244\nBDMAP_00004245\nBDMAP_00004246\nBDMAP_00004247\nBDMAP_00004248\nBDMAP_00004249\nBDMAP_00004250\nBDMAP_00004251\nBDMAP_00004252\nBDMAP_00004253\nBDMAP_00004254\nBDMAP_00004255\nBDMAP_00004256\nBDMAP_00004257\nBDMAP_00004258\nBDMAP_00004259\nBDMAP_00004260\nBDMAP_00004261\nBDMAP_00004262\nBDMAP_00004263\nBDMAP_00004264\nBDMAP_00004265\nBDMAP_00004266\nBDMAP_00004267\nBDMAP_00004268\nBDMAP_00004269\nBDMAP_00004270\nBDMAP_00004271\nBDMAP_00004272\nBDMAP_00004273\nBDMAP_00004274\nBDMAP_00004275\nBDMAP_00004276\nBDMAP_00004277\nBDMAP_00004278\nBDMAP_00004279\nBDMAP_00004280\nBDMAP_00004281\nBDMAP_00004282\nBDMAP_00004283\nBDMAP_00004284\nBDMAP_00004285\nBDMAP_00004286\nBDMAP_00004287\nBDMAP_00004288\nBDMAP_00004289\nBDMAP_00004290\nBDMAP_00004291\nBDMAP_00004292\nBDMAP_00004293\nBDMAP_00004294\nBDMAP_00004295\nBDMAP_00004296\nBDMAP_00004297\nBDMAP_00004298\nBDMAP_00004299\nBDMAP_00004300\nBDMAP_00004301\nBDMAP_00004302\nBDMAP_00004303\nBDMAP_00004304\nBDMAP_00004305\nBDMAP_00004306\nBDMAP_00004307\nBDMAP_00004308\nBDMAP_00004309\nBDMAP_00004310\nBDMAP_00004311\nBDMAP_00004312\nBDMAP_00004313\nBDMAP_00004314\nBDMAP_00004315\nBDMAP_00004316\nBDMAP_00004317\nBDMAP_00004318\nBDMAP_00004319\nBDMAP_00004320\nBDMAP_00004321\nBDMAP_00004322\nBDMAP_00004323\nBDMAP_00004324\nBDMAP_00004325\nBDMAP_00004326\nBDMAP_00004327\nBDMAP_00004328\nBDMAP_00004329\nBDMAP_00004330\nBDMAP_00004331\nBDMAP_00004332\nBDMAP_00004333\nBDMAP_00004334\nBDMAP_00004335\nBDMAP_00004336\nBDMAP_00004337\nBDMAP_00004338\nBDMAP_00004339\nBDMAP_00004340\nBDMAP_00004341\nBDMAP_00004342\nBDMAP_00004343\nBDMAP_00004344\nBDMAP_00004345\nBDMAP_00004346\nBDMAP_00004347\nBDMAP_00004348\nBDMAP_00004349\nBDMAP_00004350\nBDMAP_00004351\nBDMAP_00004352\nBDMAP_00004353\nBDMAP_00004354\nBDMAP_00004355\nBDMAP_00004356\nBDMAP_00004357\nBDMAP_00004358\nBDMAP_00004359\nBDMAP_00004360\nBDMAP_00004361\nBDMAP_00004362\nBDMAP_00004363\nBDMAP_00004364\nBDMAP_00004365\nBDMAP_00004366\nBDMAP_00004367\nBDMAP_00004368\nBDMAP_00004369\nBDMAP_00004370\nBDMAP_00004371\nBDMAP_00004372\nBDMAP_00004373\nBDMAP_00004374\nBDMAP_00004375\nBDMAP_00004376\nBDMAP_00004377\nBDMAP_00004378\nBDMAP_00004379\nBDMAP_00004380\nBDMAP_00004381\nBDMAP_00004382\nBDMAP_00004383\nBDMAP_00004384\nBDMAP_00004385\nBDMAP_00004386\nBDMAP_00004387\nBDMAP_00004388\nBDMAP_00004389\nBDMAP_00004390\nBDMAP_00004391\nBDMAP_00004392\nBDMAP_00004393\nBDMAP_00004394\nBDMAP_00004395\nBDMAP_00004396\nBDMAP_00004397\nBDMAP_00004398\nBDMAP_00004399\nBDMAP_00004400\nBDMAP_00004401\nBDMAP_00004402\nBDMAP_00004403\nBDMAP_00004404\nBDMAP_00004405\nBDMAP_00004406\nBDMAP_00004407\nBDMAP_00004408\nBDMAP_00004409\nBDMAP_00004410\nBDMAP_00004411\nBDMAP_00004412\nBDMAP_00004413\nBDMAP_00004414\nBDMAP_00004415\nBDMAP_00004416\nBDMAP_00004417\nBDMAP_00004418\nBDMAP_00004419\nBDMAP_00004420\nBDMAP_00004421\nBDMAP_00004422\nBDMAP_00004423\nBDMAP_00004424\nBDMAP_00004425\nBDMAP_00004426\nBDMAP_00004427\nBDMAP_00004428\nBDMAP_00004429\nBDMAP_00004430\nBDMAP_00004431\nBDMAP_00004432\nBDMAP_00004433\nBDMAP_00004434\nBDMAP_00004435\nBDMAP_00004436\nBDMAP_00004437\nBDMAP_00004438\nBDMAP_00004439\nBDMAP_00004440\nBDMAP_00004441\nBDMAP_00004442\nBDMAP_00004443\nBDMAP_00004444\nBDMAP_00004445\nBDMAP_00004446\nBDMAP_00004447\nBDMAP_00004448\nBDMAP_00004449\nBDMAP_00004450\nBDMAP_00004451\nBDMAP_00004452\nBDMAP_00004453\nBDMAP_00004454\nBDMAP_00004455\nBDMAP_00004456\nBDMAP_00004457\nBDMAP_00004458\nBDMAP_00004459\nBDMAP_00004460\nBDMAP_00004461\nBDMAP_00004462\nBDMAP_00004463\nBDMAP_00004464\nBDMAP_00004465\nBDMAP_00004466\nBDMAP_00004467\nBDMAP_00004468\nBDMAP_00004469\nBDMAP_00004470\nBDMAP_00004471\nBDMAP_00004472\nBDMAP_00004473\nBDMAP_00004474\nBDMAP_00004475\nBDMAP_00004476\nBDMAP_00004477\nBDMAP_00004478\nBDMAP_00004479\nBDMAP_00004480\nBDMAP_00004481\nBDMAP_00004482\nBDMAP_00004483\nBDMAP_00004484\nBDMAP_00004485\nBDMAP_00004486\nBDMAP_00004487\nBDMAP_00004488\nBDMAP_00004489\nBDMAP_00004490\nBDMAP_00004491\nBDMAP_00004492\nBDMAP_00004493\nBDMAP_00004494\nBDMAP_00004495\nBDMAP_00004496\nBDMAP_00004497\nBDMAP_00004498\nBDMAP_00004499\nBDMAP_00004500\nBDMAP_00004501\nBDMAP_00004502\nBDMAP_00004503\nBDMAP_00004504\nBDMAP_00004505\nBDMAP_00004506\nBDMAP_00004507\nBDMAP_00004508\nBDMAP_00004509\nBDMAP_00004510\nBDMAP_00004511\nBDMAP_00004512\nBDMAP_00004513\nBDMAP_00004514\nBDMAP_00004515\nBDMAP_00004516\nBDMAP_00004517\nBDMAP_00004518\nBDMAP_00004519\nBDMAP_00004520\nBDMAP_00004521\nBDMAP_00004522\nBDMAP_00004523\nBDMAP_00004524\nBDMAP_00004525\nBDMAP_00004526\nBDMAP_00004527\nBDMAP_00004528\nBDMAP_00004529\nBDMAP_00004530\nBDMAP_00004531\nBDMAP_00004532\nBDMAP_00004533\nBDMAP_00004534\nBDMAP_00004535\nBDMAP_00004536\nBDMAP_00004537\nBDMAP_00004538\nBDMAP_00004539\nBDMAP_00004540\nBDMAP_00004541\nBDMAP_00004542\nBDMAP_00004543\nBDMAP_00004544\nBDMAP_00004545\nBDMAP_00004546\nBDMAP_00004547\nBDMAP_00004548\nBDMAP_00004549\nBDMAP_00004550\nBDMAP_00004551\nBDMAP_00004552\nBDMAP_00004553\nBDMAP_00004554\nBDMAP_00004555\nBDMAP_00004556\nBDMAP_00004557\nBDMAP_00004558\nBDMAP_00004559\nBDMAP_00004560\nBDMAP_00004561\nBDMAP_00004562\nBDMAP_00004563\nBDMAP_00004564\nBDMAP_00004565\nBDMAP_00004566\nBDMAP_00004567\nBDMAP_00004568\nBDMAP_00004569\nBDMAP_00004570\nBDMAP_00004571\nBDMAP_00004572\nBDMAP_00004573\nBDMAP_00004574\nBDMAP_00004575\nBDMAP_00004576\nBDMAP_00004577\nBDMAP_00004578\nBDMAP_00004579\nBDMAP_00004580\nBDMAP_00004581\nBDMAP_00004582\nBDMAP_00004583\nBDMAP_00004584\nBDMAP_00004585\nBDMAP_00004586\nBDMAP_00004587\nBDMAP_00004588\nBDMAP_00004589\nBDMAP_00004590\nBDMAP_00004591\nBDMAP_00004592\nBDMAP_00004593\nBDMAP_00004594\nBDMAP_00004595\nBDMAP_00004596\nBDMAP_00004597\nBDMAP_00004598\nBDMAP_00004599\nBDMAP_00004600\nBDMAP_00004601\nBDMAP_00004602\nBDMAP_00004603\nBDMAP_00004604\nBDMAP_00004605\nBDMAP_00004606\nBDMAP_00004607\nBDMAP_00004608\nBDMAP_00004609\nBDMAP_00004610\nBDMAP_00004611\nBDMAP_00004612\nBDMAP_00004613\nBDMAP_00004614\nBDMAP_00004615\nBDMAP_00004616\nBDMAP_00004617\nBDMAP_00004618\nBDMAP_00004619\nBDMAP_00004620\nBDMAP_00004621\nBDMAP_00004622\nBDMAP_00004623\nBDMAP_00004624\nBDMAP_00004625\nBDMAP_00004626\nBDMAP_00004627\nBDMAP_00004628\nBDMAP_00004629\nBDMAP_00004630\nBDMAP_00004631\nBDMAP_00004632\nBDMAP_00004633\nBDMAP_00004634\nBDMAP_00004635\nBDMAP_00004636\nBDMAP_00004637\nBDMAP_00004638\nBDMAP_00004639\nBDMAP_00004640\nBDMAP_00004641\nBDMAP_00004642\nBDMAP_00004643\nBDMAP_00004644\nBDMAP_00004645\nBDMAP_00004646\nBDMAP_00004647\nBDMAP_00004648\nBDMAP_00004649\nBDMAP_00004650\nBDMAP_00004651\nBDMAP_00004652\nBDMAP_00004653\nBDMAP_00004654\nBDMAP_00004655\nBDMAP_00004656\nBDMAP_00004657\nBDMAP_00004658\nBDMAP_00004659\nBDMAP_00004660\nBDMAP_00004661\nBDMAP_00004662\nBDMAP_00004663\nBDMAP_00004664\nBDMAP_00004665\nBDMAP_00004666\nBDMAP_00004667\nBDMAP_00004668\nBDMAP_00004669\nBDMAP_00004670\nBDMAP_00004671\nBDMAP_00004672\nBDMAP_00004673\nBDMAP_00004674\nBDMAP_00004675\nBDMAP_00004676\nBDMAP_00004677\nBDMAP_00004678\nBDMAP_00004679\nBDMAP_00004680\nBDMAP_00004681\nBDMAP_00004682\nBDMAP_00004683\nBDMAP_00004684\nBDMAP_00004685\nBDMAP_00004686\nBDMAP_00004687\nBDMAP_00004688\nBDMAP_00004689\nBDMAP_00004690\nBDMAP_00004691\nBDMAP_00004692\nBDMAP_00004693\nBDMAP_00004694\nBDMAP_00004695\nBDMAP_00004696\nBDMAP_00004697\nBDMAP_00004698\nBDMAP_00004699\nBDMAP_00004700\nBDMAP_00004701\nBDMAP_00004702\nBDMAP_00004703\nBDMAP_00004704\nBDMAP_00004705\nBDMAP_00004706\nBDMAP_00004707\nBDMAP_00004708\nBDMAP_00004709\nBDMAP_00004710\nBDMAP_00004711\nBDMAP_00004712\nBDMAP_00004713\nBDMAP_00004714\nBDMAP_00004715\nBDMAP_00004716\nBDMAP_00004717\nBDMAP_00004718\nBDMAP_00004719\nBDMAP_00004720\nBDMAP_00004721\nBDMAP_00004722\nBDMAP_00004723\nBDMAP_00004724\nBDMAP_00004725\nBDMAP_00004726\nBDMAP_00004727\nBDMAP_00004728\nBDMAP_00004729\nBDMAP_00004730\nBDMAP_00004731\nBDMAP_00004732\nBDMAP_00004733\nBDMAP_00004734\nBDMAP_00004735\nBDMAP_00004736\nBDMAP_00004737\nBDMAP_00004738\nBDMAP_00004739\nBDMAP_00004740\nBDMAP_00004741\nBDMAP_00004742\nBDMAP_00004743\nBDMAP_00004744\nBDMAP_00004745\nBDMAP_00004746\nBDMAP_00004747\nBDMAP_00004748\nBDMAP_00004749\nBDMAP_00004750\nBDMAP_00004751\nBDMAP_00004752\nBDMAP_00004753\nBDMAP_00004754\nBDMAP_00004755\nBDMAP_00004756\nBDMAP_00004757\nBDMAP_00004758\nBDMAP_00004759\nBDMAP_00004760\nBDMAP_00004761\nBDMAP_00004762\nBDMAP_00004763\nBDMAP_00004764\nBDMAP_00004765\nBDMAP_00004766\nBDMAP_00004767\nBDMAP_00004768\nBDMAP_00004769\nBDMAP_00004770\nBDMAP_00004771\nBDMAP_00004772\nBDMAP_00004773\nBDMAP_00004774\nBDMAP_00004775\nBDMAP_00004776\nBDMAP_00004777\nBDMAP_00004778\nBDMAP_00004779\nBDMAP_00004780\nBDMAP_00004781\nBDMAP_00004782\nBDMAP_00004783\nBDMAP_00004784\nBDMAP_00004785\nBDMAP_00004786\nBDMAP_00004787\nBDMAP_00004788\nBDMAP_00004789\nBDMAP_00004790\nBDMAP_00004791\nBDMAP_00004792\nBDMAP_00004793\nBDMAP_00004794\nBDMAP_00004795\nBDMAP_00004796\nBDMAP_00004797\nBDMAP_00004798\nBDMAP_00004799\nBDMAP_00004800\nBDMAP_00004801\nBDMAP_00004802\nBDMAP_00004803\nBDMAP_00004804\nBDMAP_00004805\nBDMAP_00004806\nBDMAP_00004807\nBDMAP_00004808\nBDMAP_00004809\nBDMAP_00004810\nBDMAP_00004811\nBDMAP_00004812\nBDMAP_00004813\nBDMAP_00004814\nBDMAP_00004815\nBDMAP_00004816\nBDMAP_00004817\nBDMAP_00004818\nBDMAP_00004819\nBDMAP_00004820\nBDMAP_00004821\nBDMAP_00004822\nBDMAP_00004823\nBDMAP_00004824\nBDMAP_00004825\nBDMAP_00004826\nBDMAP_00004827\nBDMAP_00004828\nBDMAP_00004829\nBDMAP_00004830\nBDMAP_00004831\nBDMAP_00004832\nBDMAP_00004833\nBDMAP_00004834\nBDMAP_00004835\nBDMAP_00004836\nBDMAP_00004837\nBDMAP_00004838\nBDMAP_00004839\nBDMAP_00004840\nBDMAP_00004841\nBDMAP_00004842\nBDMAP_00004843\nBDMAP_00004844\nBDMAP_00004845\nBDMAP_00004846\nBDMAP_00004847\nBDMAP_00004848\nBDMAP_00004849\nBDMAP_00004850\nBDMAP_00004851\nBDMAP_00004852\nBDMAP_00004853\nBDMAP_00004854\nBDMAP_00004855\nBDMAP_00004856\nBDMAP_00004857\nBDMAP_00004858\nBDMAP_00004859\nBDMAP_00004860\nBDMAP_00004861\nBDMAP_00004862\nBDMAP_00004863\nBDMAP_00004864\nBDMAP_00004865\nBDMAP_00004866\nBDMAP_00004867\nBDMAP_00004868\nBDMAP_00004869\nBDMAP_00004870\nBDMAP_00004871\nBDMAP_00004872\nBDMAP_00004873\nBDMAP_00004874\nBDMAP_00004875\nBDMAP_00004876\nBDMAP_00004877\nBDMAP_00004878\nBDMAP_00004879\nBDMAP_00004880\nBDMAP_00004881\nBDMAP_00004882\nBDMAP_00004883\nBDMAP_00004884\nBDMAP_00004885\nBDMAP_00004886\nBDMAP_00004887\nBDMAP_00004888\nBDMAP_00004889\nBDMAP_00004890\nBDMAP_00004891\nBDMAP_00004892\nBDMAP_00004893\nBDMAP_00004894\nBDMAP_00004895\nBDMAP_00004896\nBDMAP_00004897\nBDMAP_00004898\nBDMAP_00004899\nBDMAP_00004900\nBDMAP_00004901\nBDMAP_00004902\nBDMAP_00004903\nBDMAP_00004904\nBDMAP_00004905\nBDMAP_00004906\nBDMAP_00004907\nBDMAP_00004908\nBDMAP_00004909\nBDMAP_00004910\nBDMAP_00004911\nBDMAP_00004912\nBDMAP_00004913\nBDMAP_00004914\nBDMAP_00004915\nBDMAP_00004916\nBDMAP_00004917\nBDMAP_00004918\nBDMAP_00004919\nBDMAP_00004920\nBDMAP_00004921\nBDMAP_00004922\nBDMAP_00004923\nBDMAP_00004924\nBDMAP_00004925\nBDMAP_00004926\nBDMAP_00004927\nBDMAP_00004928\nBDMAP_00004929\nBDMAP_00004930\nBDMAP_00004931\nBDMAP_00004932\nBDMAP_00004933\nBDMAP_00004934\nBDMAP_00004935\nBDMAP_00004936\nBDMAP_00004937\nBDMAP_00004938\nBDMAP_00004939\nBDMAP_00004940\nBDMAP_00004941\nBDMAP_00004942\nBDMAP_00004943\nBDMAP_00004944\nBDMAP_00004945\nBDMAP_00004946\nBDMAP_00004947\nBDMAP_00004948\nBDMAP_00004949\nBDMAP_00004950\nBDMAP_00004951\nBDMAP_00004952\nBDMAP_00004953\nBDMAP_00004954\nBDMAP_00004955\nBDMAP_00004956\nBDMAP_00004957\nBDMAP_00004958\nBDMAP_00004959\nBDMAP_00004960\nBDMAP_00004961\nBDMAP_00004962\nBDMAP_00004963\nBDMAP_00004964\nBDMAP_00004965\nBDMAP_00004966\nBDMAP_00004967\nBDMAP_00004968\nBDMAP_00004969\nBDMAP_00004970\nBDMAP_00004971\nBDMAP_00004972\nBDMAP_00004973\nBDMAP_00004974\nBDMAP_00004975\nBDMAP_00004976\nBDMAP_00004977\nBDMAP_00004978\nBDMAP_00004979\nBDMAP_00004980\nBDMAP_00004981\nBDMAP_00004982\nBDMAP_00004983\nBDMAP_00004984\nBDMAP_00004985\nBDMAP_00004986\nBDMAP_00004987\nBDMAP_00004988\nBDMAP_00004989\nBDMAP_00004990\nBDMAP_00004991\nBDMAP_00004992\nBDMAP_00004993\nBDMAP_00004994\nBDMAP_00004995\nBDMAP_00004996\nBDMAP_00004997\nBDMAP_00004998\nBDMAP_00004999\nBDMAP_00005000\nBDMAP_00005001\nBDMAP_00005002\nBDMAP_00005003\nBDMAP_00005004\nBDMAP_00005005\nBDMAP_00005006\nBDMAP_00005007\nBDMAP_00005008\nBDMAP_00005009\nBDMAP_00005010\nBDMAP_00005011\nBDMAP_00005012\nBDMAP_00005013\nBDMAP_00005014\nBDMAP_00005015\nBDMAP_00005016\nBDMAP_00005017\nBDMAP_00005018\nBDMAP_00005019\nBDMAP_00005020\nBDMAP_00005021\nBDMAP_00005022\nBDMAP_00005023\nBDMAP_00005024\nBDMAP_00005025\nBDMAP_00005026\nBDMAP_00005027\nBDMAP_00005028\nBDMAP_00005029\nBDMAP_00005030\nBDMAP_00005031\nBDMAP_00005032\nBDMAP_00005033\nBDMAP_00005034\nBDMAP_00005035\nBDMAP_00005036\nBDMAP_00005037\nBDMAP_00005038\nBDMAP_00005039\nBDMAP_00005040\nBDMAP_00005041\nBDMAP_00005042\nBDMAP_00005043\nBDMAP_00005044\nBDMAP_00005045\nBDMAP_00005046\nBDMAP_00005047\nBDMAP_00005048\nBDMAP_00005049\nBDMAP_00005050\nBDMAP_00005051\nBDMAP_00005052\nBDMAP_00005053\nBDMAP_00005054\nBDMAP_00005055\nBDMAP_00005056\nBDMAP_00005057\nBDMAP_00005058\nBDMAP_00005059\nBDMAP_00005060\nBDMAP_00005061\nBDMAP_00005062\nBDMAP_00005063\nBDMAP_00005064\nBDMAP_00005065\nBDMAP_00005066\nBDMAP_00005067\nBDMAP_00005068\nBDMAP_00005069\nBDMAP_00005070\nBDMAP_00005071\nBDMAP_00005072\nBDMAP_00005073\nBDMAP_00005074\nBDMAP_00005075\nBDMAP_00005076\nBDMAP_00005077\nBDMAP_00005078\nBDMAP_00005079\nBDMAP_00005080\nBDMAP_00005081\nBDMAP_00005082\nBDMAP_00005083\nBDMAP_00005084\nBDMAP_00005085\nBDMAP_00005086\nBDMAP_00005087\nBDMAP_00005088\nBDMAP_00005089\nBDMAP_00005090\nBDMAP_00005091\nBDMAP_00005092\nBDMAP_00005093\nBDMAP_00005094\nBDMAP_00005095\nBDMAP_00005096\nBDMAP_00005097\nBDMAP_00005098\nBDMAP_00005099\nBDMAP_00005100\nBDMAP_00005101\nBDMAP_00005102\nBDMAP_00005103\nBDMAP_00005104\nBDMAP_00005105\nBDMAP_00005106\nBDMAP_00005107\nBDMAP_00005108\nBDMAP_00005109\nBDMAP_00005110\nBDMAP_00005111\nBDMAP_00005112\nBDMAP_00005113\nBDMAP_00005114\nBDMAP_00005115\nBDMAP_00005116\nBDMAP_00005117\nBDMAP_00005118\nBDMAP_00005119\nBDMAP_00005120\nBDMAP_00005121\nBDMAP_00005122\nBDMAP_00005123\nBDMAP_00005124\nBDMAP_00005125\nBDMAP_00005126\nBDMAP_00005127\nBDMAP_00005128\nBDMAP_00005129\nBDMAP_00005130\nBDMAP_00005131\nBDMAP_00005132\nBDMAP_00005133\nBDMAP_00005134\nBDMAP_00005135\nBDMAP_00005136\nBDMAP_00005137\nBDMAP_00005138\nBDMAP_00005139\nBDMAP_00005140\nBDMAP_00005141\nBDMAP_00005142\nBDMAP_00005143\nBDMAP_00005144\nBDMAP_00005145\nBDMAP_00005146\nBDMAP_00005147\nBDMAP_00005148\nBDMAP_00005149\nBDMAP_00005150\nBDMAP_00005151\nBDMAP_00005152\nBDMAP_00005153\nBDMAP_00005154\nBDMAP_00005155\nBDMAP_00005156\nBDMAP_00005157\nBDMAP_00005158\nBDMAP_00005159\nBDMAP_00005160\nBDMAP_00005161\nBDMAP_00005162\nBDMAP_00005163\nBDMAP_00005164\nBDMAP_00005165\nBDMAP_00005166\nBDMAP_00005167\nBDMAP_00005168\nBDMAP_00005169\nBDMAP_00005170\nBDMAP_00005171\nBDMAP_00005172\nBDMAP_00005173\nBDMAP_00005174\nBDMAP_00005175\nBDMAP_00005176\nBDMAP_00005177\nBDMAP_00005178\nBDMAP_00005179\nBDMAP_00005180\nBDMAP_00005181\nBDMAP_00005182\nBDMAP_00005183\nBDMAP_00005184\nBDMAP_00005185\nBDMAP_00005186\nBDMAP_00005187\nBDMAP_00005188\nBDMAP_00005189\nBDMAP_00005190\nBDMAP_00005191\nBDMAP_00005192\nBDMAP_00005193\nBDMAP_00005194\nBDMAP_00005195"
  },
  {
    "path": "Finetune/AbdomenAtlas/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom dataset.dataloader_bdmap import get_loader_Atlas\nimport torch.nn as nn\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\nfrom monai.utils import ensure_tuple_rep\n\nimport warnings\n\nwarnings.filterwarnings('ignore')\n\n# os.environ['CUDA_VISIBLE_DEVICES'] = \"7\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--out_channels\", default=10, type=int, help=\"number of output channels\")\n\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--data_dir\", default=\"/project/medimgfmod/CT/AbdomenAtlasMini1.0/\", type=str,\n                    help=\"dataset directory\")\nparser.add_argument(\"--data_txt_path\", default='./dataset/dataset_list', help=\"dataset json file\")\nparser.add_argument(\"--dataset_list\", default=['AbdomenAtlas1.0'], help=\"dataset json file\")\nparser.add_argument(\"--cache_dataset\", default=True, help=\"use monai CACHE Dataset class\")\nparser.add_argument(\"--cache_dir\", default='./cache', help=\"CACHE dir\")\n\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=100, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--warmup_epochs\", default=5, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--val_every\", default=1, type=int, help=\"validation frequency\")\n\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=1e-3, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=1e-5, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", default=False, help=\"do NOT use amp for training\")\n\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\n\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=2.0, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--smooth_dr\", default=1e-6, type=float, help=\"constant added to dice denominator to avoid nan\")\nparser.add_argument(\"--smooth_nr\", default=0.0, type=float, help=\"constant added to dice numerator to avoid zero\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n\n    torch.backends.cudnn.enabled = True\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader_Atlas(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=args.dropout_path_rate,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use resume weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            model_dict = torch.load(\"./VoCo_10k.pt\", map_location=torch.device('cpu'))\n            state_dict = model_dict\n\n            # state_dict = model_dict['net']\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    if args.squared_dice:\n        dice_loss = DiceCELoss(\n            to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr\n        )\n    else:\n        dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)\n\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n    dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda()\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n        # optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        print(len(loader[0]))\n        max_steps = args.max_epochs * len(loader[0])\n        warmup_steps = args.warmup_epochs * len(loader[0])\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=warmup_steps, max_epochs=max_steps\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        loss_func=dice_loss,\n        acc_func=dice_acc,\n        args=args,\n        model_inferer=model_inferer,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n        post_label=post_label,\n        post_pred=post_pred,\n    )\n    return accuracy\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/AbdomenAtlas/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/preprocess/try_load.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#     http://www.apache.org/licenses/LICENSE-2.0\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nimport argparse\r\nimport os\r\nfrom functools import partial\r\nimport nibabel as nib\r\nimport numpy as np\r\nimport torch\r\nimport torch.nn.functional as F\r\nfrom torch.cuda.amp import GradScaler, autocast\r\nfrom dataset.dataloader_bdmap import get_loader_Atlas\r\nfrom utils.utils import dice, resample_3d\r\nfrom utils.utils import AverageMeter, distributed_all_gather\r\nfrom tqdm import tqdm\r\nfrom monai.inferers import sliding_window_inference\r\nfrom monai.data import decollate_batch\r\nfrom monai.losses import DiceCELoss\r\nfrom monai.metrics import DiceMetric\r\nfrom monai.networks.nets import SwinUNETR\r\nfrom monai.transforms import Activations, AsDiscrete, Compose\r\nfrom monai.utils.enums import MetricReduction\r\nfrom utils.utils import *\r\nimport cv2\r\nfrom PIL import Image\r\n\r\n# os.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\r\nos.environ['MASTER_ADDR'] = 'localhost'\r\nos.environ['MASTER_PORT'] = '28890'\r\n\r\nimport resource\r\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\r\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\r\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\r\n\r\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\r\nparser.add_argument(\r\n    \"--pretrained_dir\", default=\"./runs/logs_scratch_v2/\", type=str, help=\"pretrained checkpoint directory\"\r\n)\r\nparser.add_argument(\"--data_dir\", default=\"/project/medimgfmod/CT/AbdomenAtlasMini1.0/\", type=str, help=\"dataset directory\")\r\nparser.add_argument(\"--data_txt_path\", default='./dataset/dataset_list', help=\"dataset json file\")\r\nparser.add_argument(\"--dataset_list\", default=['AbdomenAtlas1.0'], help=\"dataset json file\")\r\n\r\nparser.add_argument(\"--pos\", default=1, type=int, help=\"number of positive sample\")\r\nparser.add_argument(\"--neg\", default=0, type=int, help=\"number of negative sample\")\r\n\r\nroi=96\r\nparser.add_argument(\"--cache_dataset\", default=False, help=\"use monai CACHE Dataset class\")\r\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\r\nparser.add_argument(\"--batch_size\", default=8, type=int, help=\"number of batch size\")\r\nparser.add_argument(\"--sw_batch_size\", default=1, type=int, help=\"number of sliding window batch size\")\r\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\r\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\r\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\r\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\r\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\r\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\r\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\r\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\r\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\r\nparser.add_argument(\"--space_z\", default=2.0, type=float, help=\"spacing in z direction\")\r\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\r\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\r\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\r\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\r\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\r\nparser.add_argument(\"--workers\", default=16, type=int, help=\"number of workers\")\r\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\r\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\r\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\r\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\r\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\r\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\r\n\r\nimport warnings\r\nwarnings.filterwarnings('ignore')\r\n\r\n\r\ndef main():\r\n    args = parser.parse_args()\r\n    args.test_mode = True\r\n    loader = get_loader_Atlas(args)\r\n\r\n    # num = 0\r\n    # vis_path = './vis/'\r\n    # check_dir(vis_path)\r\n\r\n    with torch.no_grad():\r\n        for batch_data in tqdm(loader[0]):\r\n            image, label = batch_data[\"image\"], batch_data[\"label\"]\r\n\r\n            print(image.shape, label.shape, torch.unique(label))\r\n\r\n            # img = image[0][0].data.cpu().numpy()\r\n            # label = label[0][0].data.cpu().numpy()\r\n            #\r\n            # h, w, c = img.shape\r\n            # cmap = color_map()\r\n            #\r\n            # for j in range(c):\r\n            #     im = img[:, :, j]\r\n            #     la = label[:, :, j]\r\n            #\r\n            #     if len(list(np.unique(la))) > 1:\r\n            #         im = (255 * im).astype(np.uint8)\r\n            #         la = Image.fromarray(la.astype(np.uint8), mode='P')\r\n            #         la.putpalette(cmap)\r\n            #         num += 1\r\n            #\r\n            #         cv2.imwrite(vis_path+str(num)+'_im.png', im)\r\n            #         la.save(vis_path+str(num)+'_lab.png')\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    main()\r\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/readme.md",
    "content": "# VoCo for AbdomenAtlas\r\n\r\n<a href=\"https://arxiv.org/abs/2402.17300\"><img src='https://img.shields.io/badge/arXiv-VoCo-red' alt='Paper PDF'></a>\r\n<a href='https://huggingface.co/datasets/Luffy503/VoCo-10k/tree/main'><img src='https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue'></a>\r\n\r\nCVPR 2024 paper, [**\"VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis\"**](https://arxiv.org/abs/2402.17300)\r\n\r\nAuthors: Linshan Wu, <a href=\"https://scholar.google.com/citations?user=PfM5gucAAAAJ&hl=en\">Jiaxin Zhuang</a>, and <a href=\"https://scholar.google.com/citations?hl=en&user=Z_t5DjwAAAAJ\">Hao Chen</a>\r\n\r\nCode for AbdomenAtlasMini1.0 Training and Inference.\r\n\r\n\r\n## Usage\r\n### Pre-training\r\nPlease refer to the official [VoCo repo](https://github.com/Luffy03/VoCo)\r\n\r\n### Requirement\r\nI have stored all the required checkpoints and running logs in the project. \r\nOur Segmentation Training codes are based on [MONAI](https://github.com/Project-MONAI/research-contributions). \r\nPlease also refer to the requirements.txt.\r\n\r\n### Training\r\n\r\nFirst edit the data_path of AbdomenAtlasMini1.0 in 'train.sh'\r\n```\r\ndata_dir=YOUR AbdomenAtlasMini1.0 PATH\r\n```\r\nReading 9 label files is not efficient in training and we also find that there are some bugs in \r\nthe originial [data_loader](https://github.com/MrGiovanni/SuPreM/blob/d8a948c96e56f2050109c3ce418bc4caa09420a5/supervised_pretraining/dataset/dataloader_bdmap.py#L147)\r\n(the data of label is loaded but the meta_keys of labels are not loaded, thus the following transform will result in not corresponding image and labels. We provide '/preprocess/try_load.py' for visualization). Thus, we first merge all 9 label files in to one.\r\n```\r\n# preprocess, in exe function of check.py , path=YOUR AbdomenAtlasMini1.0 PATH\r\npython check.py\r\n# merge all 9 organ label files to one label.nii.gz\r\n```\r\n\r\nAfter pre-processing, Training implementation\r\n```\r\n# bash\r\nsh train.sh\r\n# Or using slurm\r\nsbatch train.slurm\r\n```\r\n\r\nTo accelerate training, we use 'PersistentDataset' to pre-cache data.\r\n```\r\n# in train.sh\r\ncache_dataset=False\r\n# Or with adequate space\r\ncache_dataset=True\r\ncache_dir=Your path to save cache\r\n```\r\n\r\n### Inference\r\nFirst edit the test and prediction path of AbdomenAtlasMini1.0 in 'Atlas_test.sh'\r\n```\r\ntest_data_path=Your path to AbdomenAtlasTest\r\nsave_prediction_path=Your path to save the prediction AbdomenAtlasTest\r\n```\r\n\r\nInference implementation\r\n```\r\n# bash\r\nsh Atlas_test.sh\r\n```\r\n\r\nInference Visualization\r\n```\r\n# We provide check_pred_vis() function in check.py for you to visualize the predictions\r\npython check.py\r\n```\r\n\r\n## Acknowledgement\r\nWe thank [MONAI](https://github.com/Project-MONAI/research-contributions) and [SuPreM](https://github.com/MrGiovanni/SuPreM) for part of their codes.\r\n## Citation ✏️ 📄\r\nIf you find this repo useful for your research, please consider citing the paper as follows:\r\n\r\n```\r\n@inproceedings{VoCo,\r\n  title={VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis},\r\n  author={Wu, Linshan and Zhuang, Jiaxin and Chen, Hao},\r\n  booktitle={IEEE Conf. Comput. Vis. Pattern Recog.},\r\n  year={2024}\r\n  }\r\n```\r\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/requirements.txt",
    "content": "# packages in environment at /home/lwubf/anaconda3/envs/nnunet:\n#\n# Name                    Version                   Build  Channel\n_libgcc_mutex             0.1                        main  \nabsl-py                   2.1.0                     <pip>\nca-certificates           2023.12.12           h06a4308_0  \ncertifi                   2022.12.7                 <pip>\ncharset-normalizer        2.1.1                     <pip>\ncmake                     3.25.0                    <pip>\ncontourpy                 1.2.0                     <pip>\ncycler                    0.12.1                    <pip>\neinops                    0.7.0                     <pip>\nelasticdeform             0.5.0                     <pip>\nfilelock                  3.9.0                     <pip>\nfonttools                 4.50.0                    <pip>\nfsspec                    2024.2.0                  <pip>\ngrpcio                    1.62.0                    <pip>\nhuggingface-hub           0.21.4                    <pip>\nidna                      3.4                       <pip>\nimportlib-metadata        7.0.1                     <pip>\nimportlib_resources       6.4.0                     <pip>\ninquirerpy                0.3.4                     <pip>\nJinja2                    3.1.2                     <pip>\nkiwisolver                1.4.5                     <pip>\nld_impl_linux-64          2.38                 h1181459_1  \nlibffi                    3.3                  he6710b0_2  \nlibgcc-ng                 9.1.0                hdf63c60_0  \nlibstdcxx-ng              9.1.0                hdf63c60_0  \nlit                       15.0.7                    <pip>\nMarkdown                  3.5.2                     <pip>\nMarkupSafe                2.1.5                     <pip>\nmatplotlib                3.8.3                     <pip>\nmonai                     1.3.0                     <pip>\nmpmath                    1.3.0                     <pip>\nncurses                   6.3                  h7f8727e_2  \nnetworkx                  3.2.1                     <pip>\nnibabel                   5.2.0                     <pip>\nnumpy                     1.26.4                    <pip>\nopencv-python             4.9.0.80                  <pip>\nopenssl                   1.1.1w               h7f8727e_0  \npackaging                 23.2                      <pip>\npfzy                      0.3.4                     <pip>\npillow                    10.2.0                    <pip>\npip                       23.3.1           py39h06a4308_0  \nprompt-toolkit            3.0.43                    <pip>\nprotobuf                  4.25.3                    <pip>\npyparsing                 3.1.2                     <pip>\npython                    3.9.12               h12debd9_1  \npython-dateutil           2.9.0.post0               <pip>\nPyYAML                    6.0.1                     <pip>\nreadline                  8.1.2                h7f8727e_1  \nrequests                  2.28.1                    <pip>\nscipy                     1.12.0                    <pip>\nsetuptools                68.2.2           py39h06a4308_0  \nSimpleITK                 2.0.2                     <pip>\nsix                       1.16.0                    <pip>\nsqlite                    3.38.5               hc218d9a_0  \nsympy                     1.12                      <pip>\ntensorboard               2.16.2                    <pip>\ntensorboard-data-server   0.7.2                     <pip>\ntensorboardX              2.6.2.2                   <pip>\ntk                        8.6.12               h1ccaba5_0  \ntorch                     2.0.1+cu118               <pip>\ntorchaudio                2.0.2+cu118               <pip>\ntorchvision               0.15.2+cu118              <pip>\ntqdm                      4.66.2                    <pip>\ntriton                    2.0.0                     <pip>\ntyping_extensions         4.8.0                     <pip>\ntzdata                    2024a                h04d1e81_0  \nurllib3                   1.26.13                   <pip>\nwcwidth                   0.2.13                    <pip>\nWerkzeug                  3.0.1                     <pip>\nwheel                     0.41.2           py39h06a4308_0  \nxz                        5.2.5                h7f8727e_1  \nzipp                      3.17.0                    <pip>\nzlib                      1.2.12               h7f8727e_2  \n"
  },
  {
    "path": "Finetune/AbdomenAtlas/train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs\nmkdir -p $logdir\n\ndata_dir=/project/medimgfmod/CT/AbdomenAtlasMini1.0/\ncache_dataset=False\ncache_dir=/scratch/medimgfmod/CT/cache/Atlas\n\ntorchrun --master_port=21472 main.py \\\n    --data_dir $data_dir --cache_dataset $cache_dataset --cache_dir $cache_dir --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "Finetune/AbdomenAtlas/train.slurm",
    "content": "#!/bin/bash\n\n# NOTE: Lines starting with \"#SBATCH\" are valid SLURM commands or statements,\n#       while those starting with \"#\" and \"##SBATCH\" are comments.\n\n#SBATCH -J Atlas\n\n#SBATCH -t 72:00:00 #Maximum runtime of 48 hours\n\n# Enable email notificaitons when job begins and ends\n#SBATCH --mail-user=lwubf@connect.ust.hk #Update your email address\n#SBATCH --mail-type=begin\n#SBATCH --mail-type=end\n\n# Choose partition (queue) with \"gpu\"\n#SBATCH -p project\n\n# To use 24 cpu core and 1 gpu devices in a node\n#SBATCH -N 1 -n 16 --gres=gpu:1\n\n# Setup runtime environment if necessary\nsource ~/.bashrc\nsource activate nnunet\n\n# Go to the job submission directory and run your application\ncd /home/lwubf/AbdomenAtlas/\nsh train.sh"
  },
  {
    "path": "Finetune/AbdomenAtlas/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.data import decollate_batch\nfrom utils.mixup import mixup\n\n\ndef train_epoch(model, loader, optimizer, scheduler, scaler, epoch, loss_func, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n    for idx, batch_data in enumerate(loader):\n        data, target = batch_data[\"image\"], batch_data[\"label\"]\n        data, target = data.cuda(), target.cuda()\n\n        data, target = mixup([data, target])\n\n        for param in model.parameters():\n            param.grad = None\n        with autocast(enabled=args.amp):\n            logits = model(data)\n            loss = loss_func(logits, target)\n            #\n        if args.amp:\n            scaler.scale(loss).backward()\n            scaler.step(optimizer)\n            scaler.update()\n        else:\n            loss.backward()\n            optimizer.step()\n        if args.distributed:\n            loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)\n            run_loss.update(\n                np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size\n            )\n        else:\n            run_loss.update(loss.item(), n=args.batch_size)\n\n        lr = optimizer.param_groups[0][\"lr\"]\n        if scheduler is not None:\n            scheduler.step()\n        if args.rank == 0 and (idx + 1) % 200 == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n\n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):\n    model.eval()\n    run_acc = AverageMeter()\n    start_time = time.time()\n    with torch.no_grad():\n        for idx, batch_data in enumerate(loader):\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n\n            with autocast(enabled=args.amp):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n\n            val_labels_list = [target[0]]\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = [logits[0]]\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            acc = acc.cuda(args.rank)\n\n            if args.distributed:\n                acc_list, not_nans_list = distributed_all_gather(\n                    [acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length\n                )\n                for al, nl in zip(acc_list, not_nans_list):\n                    run_acc.update(al, n=nl)\n\n            else:\n                run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n\n    if args.rank == 0:\n        avg_acc = np.mean(run_acc.avg)\n        print(\n            \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n            \"acc\",\n            avg_acc,\n            \"time {:.2f}s\".format(time.time() - start_time),\n        )\n    start_time = time.time()\n    torch.cuda.empty_cache()\n    return run_acc.avg\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n        model,\n        train_loader,\n        val_loader,\n        optimizer,\n        loss_func,\n        acc_func,\n        args,\n        model_inferer=None,\n        scheduler=None,\n        start_epoch=0,\n        post_label=None,\n        post_pred=None,\n):\n\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                acc_func=acc_func,\n                model_inferer=model_inferer,\n                args=args,\n                post_label=post_label,\n                post_pred=post_pred,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/AbdomenAtlas/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/AbdomenAtlas/utils/data_trans.py",
    "content": "import math\r\nimport os\r\nfrom copy import deepcopy\r\nimport numpy as np\r\nimport torch\r\nimport pickle\r\nfrom monai import data, transforms\r\nfrom monai.data import *\r\nfrom monai.transforms import *\r\nfrom torch.utils.data import DataLoader, ConcatDataset\r\n\r\n\r\nclass Sampler(torch.utils.data.Sampler):\r\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\r\n        if num_replicas is None:\r\n            if not torch.distributed.is_available():\r\n                raise RuntimeError(\"Requires distributed package to be available\")\r\n            num_replicas = torch.distributed.get_world_size()\r\n        if rank is None:\r\n            if not torch.distributed.is_available():\r\n                raise RuntimeError(\"Requires distributed package to be available\")\r\n            rank = torch.distributed.get_rank()\r\n        self.shuffle = shuffle\r\n        self.make_even = make_even\r\n        self.dataset = dataset\r\n        self.num_replicas = num_replicas\r\n        self.rank = rank\r\n        self.epoch = 0\r\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\r\n        self.total_size = self.num_samples * self.num_replicas\r\n        indices = list(range(len(self.dataset)))\r\n        self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])\r\n\r\n    def __iter__(self):\r\n        if self.shuffle:\r\n            g = torch.Generator()\r\n            g.manual_seed(self.epoch)\r\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\r\n        else:\r\n            indices = list(range(len(self.dataset)))\r\n        if self.make_even:\r\n            if len(indices) < self.total_size:\r\n                if self.total_size - len(indices) < len(indices):\r\n                    indices += indices[: (self.total_size - len(indices))]\r\n                else:\r\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\r\n                    indices += [indices[ids] for ids in extra_ids]\r\n            assert len(indices) == self.total_size\r\n        indices = indices[self.rank: self.total_size: self.num_replicas]\r\n        self.num_samples = len(indices)\r\n        return iter(indices)\r\n\r\n    def __len__(self):\r\n        return self.num_samples\r\n\r\n    def set_epoch(self, epoch):\r\n        self.epoch = epoch\r\n\r\n\r\ndef get_trans(args):\r\n    base_trans = [\r\n        LoadImaged(keys=[\"image\", \"label\"]),\r\n        EnsureChannelFirstd(keys=[\"image\", \"label\"]),\r\n        Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\r\n        Spacingd(keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z),\r\n                 mode=(\"bilinear\", \"nearest\")),\r\n        ScaleIntensityRanged(\r\n            keys=[\"image\"],\r\n            a_min=args.a_min,\r\n            a_max=args.a_max,\r\n            b_min=0.0,\r\n            b_max=1.0,\r\n            clip=True,\r\n        ),\r\n        CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\r\n        SpatialPadd(keys=[\"image\", \"label\"], spatial_size=(args.roi_x, args.roi_y, args.roi_z),\r\n                    mode='constant'),\r\n        transforms.RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=0),\r\n    ]\r\n\r\n    random_trans = [\r\n        RandCropByPosNegLabeld(\r\n            keys=[\"image\", \"label\"],\r\n            label_key=\"label\",\r\n            spatial_size=(args.roi_x, args.roi_y, args.roi_z),\r\n            pos=args.pos,\r\n            neg=args.neg,\r\n            num_samples=args.sw_batch_size,\r\n            image_key=\"image\",\r\n            image_threshold=0,\r\n        ),\r\n        transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=0),\r\n        transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=1),\r\n        transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=2),\r\n        transforms.RandRotate90d(keys=[\"image\", \"label\"], prob=args.RandRotate90d_prob, max_k=3),\r\n        transforms.RandScaleIntensityd(keys=\"image\", factors=0.1, prob=args.RandScaleIntensityd_prob),\r\n        transforms.RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=args.RandShiftIntensityd_prob),\r\n\r\n        Delete_keys(keys=[\"image\", \"label\"]),\r\n    ]\r\n    return base_trans, random_trans\r\n\r\n\r\nclass Delete_keys(MapTransform):\r\n    \"\"\"Filter unsed label.\r\n    \"\"\"\r\n\r\n    def __call__(self, data):\r\n        d = dict(data)\r\n        if 'name' in d.keys():\r\n            del d['name']\r\n        if 'image_meta_dict' in d.keys():\r\n            del d['image_meta_dict']\r\n\r\n        if 'label_meta_dict' in d.keys():\r\n            del d['label_meta_dict']\r\n\r\n        return d"
  },
  {
    "path": "Finetune/AbdomenAtlas/utils/mixup.py",
    "content": "import torch\r\nimport numpy as np\r\n\r\n\r\ndef mixup(inputs):\r\n    batch_size = inputs[0].size(0)\r\n    rand = torch.randperm(batch_size)\r\n    rand = [ra.tolist() for ra in rand]\r\n\r\n    lam = int(np.random.beta(0.2, 0.2) * inputs[0].size(2))\r\n    new_inputs = []\r\n\r\n    for input in inputs:\r\n        rand_input = input[rand]\r\n        if np.random.rand() < 0.5:\r\n            new_input = torch.cat([input[:, :, :, 0:lam, :],\r\n                                   rand_input[:, :, :, lam:input.size(3), :]], dim=3)\r\n        else:\r\n            new_input = torch.cat([input[:, :, 0:lam, :, :],\r\n                                   rand_input[:, :, lam:input.size(2), :, :]], dim=2)\r\n\r\n        new_inputs.append(new_input)\r\n\r\n    return new_inputs"
  },
  {
    "path": "Finetune/AbdomenAtlas/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\nimport os\nimport SimpleITK as sitk\n\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n\n\ndef color_map(dataset='pascal'):\n    cmap = np.zeros((256, 3), dtype='uint8')\n\n    if dataset == 'pascal' or dataset == 'coco':\n        def bitget(byteval, idx):\n            return (byteval & (1 << idx)) != 0\n\n        for i in range(256):\n            r = g = b = 0\n            c = i\n            for j in range(8):\n                r = r | (bitget(c, 0) << 7-j)\n                g = g | (bitget(c, 1) << 7-j)\n                b = b | (bitget(c, 2) << 7-j)\n                c = c >> 3\n\n            cmap[i] = np.array([r, g, b])\n\n    elif dataset == 'cityscapes':\n        cmap[0] = np.array([128, 64, 128])\n        cmap[1] = np.array([244, 35, 232])\n        cmap[2] = np.array([70, 70, 70])\n        cmap[3] = np.array([102, 102, 156])\n        cmap[4] = np.array([190, 153, 153])\n        cmap[5] = np.array([153, 153, 153])\n        cmap[6] = np.array([250, 170, 30])\n        cmap[7] = np.array([220, 220, 0])\n        cmap[8] = np.array([107, 142, 35])\n        cmap[9] = np.array([152, 251, 152])\n        cmap[10] = np.array([70, 130, 180])\n        cmap[11] = np.array([220, 20, 60])\n        cmap[12] = np.array([255,  0,  0])\n        cmap[13] = np.array([0,  0, 142])\n        cmap[14] = np.array([0,  0, 70])\n        cmap[15] = np.array([0, 60, 100])\n        cmap[16] = np.array([0, 80, 100])\n        cmap[17] = np.array([0,  0, 230])\n        cmap[18] = np.array([119, 11, 32])\n\n        cmap[19] = np.array([0, 0, 0])\n        cmap[255] = np.array([0, 0, 0])\n\n    return cmap\n\n\ndef check_dir(dir):\n    if not os.path.exists(dir):\n        os.makedirs(dir)\n\n\ndef read(img):\n    img = sitk.ReadImage(img)\n    img = sitk.GetArrayFromImage(img)\n    img = img.transpose(1, 2, 0)\n    return img"
  },
  {
    "path": "Finetune/Amos/check_test.py",
    "content": "\r\nimport argparse\r\nimport os\r\nfrom functools import partial\r\nimport nibabel as nib\r\nimport numpy as np\r\nimport torch\r\nimport torch.nn.functional as F\r\nfrom torch.cuda.amp import GradScaler, autocast\r\nfrom utils.data_test import get_loader\r\nfrom utils.utils import dice, resample_3d\r\nfrom utils.utils import AverageMeter, distributed_all_gather\r\n\r\nfrom monai.inferers import sliding_window_inference\r\nfrom monai.data import decollate_batch\r\nfrom monai.losses import DiceCELoss\r\nfrom monai.metrics import DiceMetric\r\nfrom monai.networks.nets import SwinUNETR\r\nfrom monai.transforms import Activations, AsDiscrete, Compose\r\nfrom monai.utils.enums import MetricReduction\r\nimport zipfile\r\nimport shutil\r\nimport SimpleITK as sitk\r\nfrom tqdm import tqdm\r\n\r\nfrom utils.utils import *\r\nfrom PIL import Image\r\n\r\n\r\ndef norm(img):\r\n    new_img = img.copy()\r\n    new_img[img<-175] = 0\r\n    new_img[img>250] = 250\r\n\r\n    out = new_img/250\r\n    out = (255*out).astype(np.uint8)\r\n    return out\r\n\r\n\r\ndef check_size():\r\n    data_path = \"D:\\data/amos22\\imagesTr\"\r\n    pred_path = 'D:\\data/amos22\\labelsTr'\r\n    view_path = './pred/view_tr'\r\n\r\n    # data_path = \"D:\\data/amos22/imagesTs\"\r\n    # pred_path = './pred/test'\r\n    # view_path = './pred/view_ts'\r\n\r\n    check_dir(view_path)\r\n    cmap = color_map()\r\n\r\n    ls = os.listdir(pred_path)\r\n    num = 0\r\n\r\n    # for i in tqdm(ls):\r\n    i = ls[0]\r\n    # i = 'FLARETs_0031_0000.nii'\r\n\r\n    img_path = os.path.join(data_path, i) # i[:-7]+'_0000.nii.gz'\r\n    img_itk = sitk.ReadImage(img_path)\r\n    img = sitk.GetArrayFromImage(img_itk)\r\n    print(img_itk.GetSpacing(), img_itk.GetDirection())\r\n    # img = np.flip(img, 1)\r\n    # img = np.flip(img, 2)\r\n\r\n    pred = os.path.join(pred_path, i)\r\n    pred_itk = sitk.ReadImage(pred)\r\n    pred = sitk.GetArrayFromImage(pred_itk)\r\n    print(pred_itk.GetSpacing(), pred_itk.GetDirection())\r\n    # pred = pred.transpose()\r\n\r\n    print(img.shape, pred.shape)\r\n\r\n    c, h, w = img.shape\r\n    for j in range(c):\r\n        im = img[j, :, :]\r\n        pre = pred[j, :, :].astype(np.uint8)\r\n\r\n        pre = Image.fromarray(pre, mode='P')\r\n        pre.putpalette(cmap)\r\n\r\n        im = norm(im)\r\n\r\n        import cv2\r\n        cv2.imwrite(view_path + '/' + str(j) + '_raw.png', im)\r\n        pre.save(view_path + '/' + str(j) + '_pred.png')\r\n\r\n\r\ndef rename():\r\n    pred_path = './pred/test'\r\n\r\n    ls = os.listdir(pred_path)\r\n\r\n    for i in ls:\r\n        old_name = os.path.join(pred_path, i)\r\n        new_name = os.path.join(pred_path, i[:-13] + '.nii.gz')\r\n        os.rename(old_name, new_name)\r\n\r\n\r\ndef check_direction():\r\n    data_path = \"D:\\data\\FLARE22\\imagesTr\" # (-1, -1, 1)\r\n\r\n    data_path = \"D:\\data/amos22\\imagesTr\" # (1, -1, 1)\r\n\r\n    data_path = 'D:\\data\\BTCV\\imagesTr'  # (1, 1, 1)\r\n\r\n    ls = os.listdir(data_path)\r\n\r\n    for i in tqdm(ls):\r\n\r\n        img_path = os.path.join(data_path, i) # i[:-7]+'_0000.nii.gz'\r\n        img_itk = sitk.ReadImage(img_path)\r\n        print(img_itk.GetSpacing(), img_itk.GetDirection())\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    check_direction()\r\n\r\n"
  },
  {
    "path": "Finetune/Amos/dataset/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Amos/dataset/dataset.json",
    "content": "{\r\n    \"description\": \"0\",\r\n    \"labels\": {\r\n        \"0\": \"background\",\r\n        \"1\": \"Liver\",\r\n        \"10\": \"Esophagus\",\r\n        \"11\": \"Stomach\",\r\n        \"12\": \"Duodenum\",\r\n        \"13\": \"Left Kidney\",\r\n        \"2\": \"Right kidney\",\r\n        \"3\": \"Spleen\",\r\n        \"4\": \"Pancreas\",\r\n        \"5\": \"Aorta\",\r\n        \"6\": \"Inferior vena cava\",\r\n        \"7\": \"Right adrenal gland\",\r\n        \"8\": \"Left adrenal gland\",\r\n        \"9\": \"Gallbladder\"\r\n    },\r\n    \"licence\": \"hands off!\",\r\n    \"modality\": {\r\n        \"0\": \"CT\"\r\n    },\r\n    \"name\": \"FLARE22\",\r\n    \"numTest\": 200,\r\n    \"numTraining\": 50,\r\n    \"reference\": \"0\",\r\n    \"release\": \"0.0\",\r\n    \"tensorImageSize\": \"4D\",\r\n    \"test\": [\r\n        \"./imagesTs/FLARETs_0001_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0002_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0003_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0004_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0005_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0006_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0007_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0008_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0009_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0010_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0011_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0012_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0013_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0014_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0015_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0016_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0017_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0018_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0019_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0020_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0021_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0022_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0023_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0024_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0025_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0026_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0027_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0028_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0029_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0030_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0031_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0032_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0033_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0034_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0035_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0036_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0037_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0038_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0039_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0040_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0041_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0042_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0043_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0044_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0045_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0046_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0047_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0048_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0049_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0050_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0051_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0052_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0053_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0054_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0055_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0056_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0057_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0058_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0059_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0060_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0061_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0062_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0063_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0064_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0065_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0066_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0067_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0068_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0069_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0070_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0071_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0072_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0073_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0074_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0075_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0076_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0077_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0078_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0079_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0080_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0081_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0082_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0083_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0084_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0085_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0086_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0087_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0088_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0089_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0090_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0091_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0092_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0093_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0094_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0095_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0096_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0097_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0098_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0099_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0100_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0101_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0102_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0103_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0104_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0105_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0106_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0107_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0108_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0109_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0110_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0111_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0112_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0113_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0114_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0115_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0116_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0117_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0118_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0119_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0120_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0121_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0122_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0123_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0124_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0125_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0126_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0127_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0128_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0129_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0130_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0131_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0132_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0133_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0134_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0135_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0136_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0137_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0138_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0139_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0140_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0141_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0142_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0143_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0144_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0145_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0146_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0147_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0148_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0149_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0150_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0151_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0152_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0153_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0154_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0155_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0156_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0157_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0158_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0159_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0160_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0161_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0162_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0163_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0164_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0165_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0166_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0167_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0168_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0169_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0170_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0171_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0172_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0173_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0174_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0175_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0176_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0177_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0178_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0179_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0180_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0181_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0182_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0183_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0184_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0185_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0186_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0187_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0188_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0189_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0190_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0191_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0192_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0193_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0194_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0195_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0196_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0197_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0198_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0199_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0200_0000.nii.gz\"\r\n    ],\r\n    \"validation\": [{\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0001_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0001.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0002_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0002.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0003_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0003.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0004_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0004.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0005_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0005.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0006_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0006.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0007_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0007.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0008_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0008.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0009_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0009.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0010_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0010.nii.gz\"\r\n        }\r\n    ],\r\n    \"training\": [\r\n\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0011_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0011.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0012_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0012.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0013_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0013.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0014_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0014.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0015_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0015.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0016_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0016.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0017_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0017.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0018_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0018.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0019_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0019.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0020_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0020.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0021_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0021.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0022_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0022.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0023_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0023.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0024_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0024.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0025_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0025.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0026_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0026.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0027_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0027.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0028_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0028.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0029_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0029.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0030_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0030.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0031_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0031.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0032_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0032.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0033_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0033.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0034_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0034.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0035_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0035.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0036_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0036.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0037_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0037.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0038_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0038.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0039_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0039.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0040_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0040.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0041_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0041.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0042_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0042.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0043_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0043.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0044_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0044.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0045_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0045.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0046_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0046.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0047_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0047.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0048_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0048.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0049_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0049.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0050_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0050.nii.gz\"\r\n        }\r\n    ]\r\n}"
  },
  {
    "path": "Finetune/Amos/dataset/dataset_test50.json",
    "content": "{\r\n    \"description\": \"0\",\r\n    \"labels\": {\r\n        \"0\": \"background\",\r\n        \"1\": \"Liver\",\r\n        \"10\": \"Esophagus\",\r\n        \"11\": \"Stomach\",\r\n        \"12\": \"Duodenum\",\r\n        \"13\": \"Left Kidney\",\r\n        \"2\": \"Right kidney\",\r\n        \"3\": \"Spleen\",\r\n        \"4\": \"Pancreas\",\r\n        \"5\": \"Aorta\",\r\n        \"6\": \"Inferior vena cava\",\r\n        \"7\": \"Right adrenal gland\",\r\n        \"8\": \"Left adrenal gland\",\r\n        \"9\": \"Gallbladder\"\r\n    },\r\n    \"licence\": \"hands off!\",\r\n    \"modality\": {\r\n        \"0\": \"CT\"\r\n    },\r\n    \"name\": \"FLARE22\",\r\n    \"numTest\": 200,\r\n    \"numTraining\": 50,\r\n    \"reference\": \"0\",\r\n    \"release\": \"0.0\",\r\n    \"tensorImageSize\": \"4D\",\r\n    \"test\": [\r\n        \"./imagesTs/FLARETs_0001_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0002_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0003_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0004_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0005_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0006_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0007_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0008_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0009_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0010_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0011_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0012_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0013_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0014_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0015_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0016_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0017_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0018_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0019_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0020_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0021_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0022_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0023_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0024_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0025_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0026_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0027_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0028_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0029_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0030_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0031_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0032_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0033_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0034_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0035_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0036_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0037_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0038_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0039_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0040_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0041_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0042_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0043_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0044_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0045_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0046_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0047_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0048_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0049_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0050_0000.nii.gz\"\r\n    ],\r\n    \"validation\": [{\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0001_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0001.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0002_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0002.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0003_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0003.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0004_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0004.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0005_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0005.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0006_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0006.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0007_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0007.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0008_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0008.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0009_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0009.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0010_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0010.nii.gz\"\r\n        }\r\n    ],\r\n    \"training\": [\r\n\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0011_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0011.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0012_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0012.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0013_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0013.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0014_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0014.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0015_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0015.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0016_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0016.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0017_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0017.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0018_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0018.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0019_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0019.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0020_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0020.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0021_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0021.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0022_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0022.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0023_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0023.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0024_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0024.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0025_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0025.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0026_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0026.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0027_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0027.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0028_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0028.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0029_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0029.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0030_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0030.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0031_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0031.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0032_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0032.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0033_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0033.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0034_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0034.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0035_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0035.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0036_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0036.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0037_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0037.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0038_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0038.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0039_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0039.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0040_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0040.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0041_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0041.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0042_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0042.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0043_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0043.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0044_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0044.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0045_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0045.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0046_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0046.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0047_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0047.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0048_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0048.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0049_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0049.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0050_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0050.nii.gz\"\r\n        }\r\n    ]\r\n}"
  },
  {
    "path": "Finetune/Amos/dataset_CT.json",
    "content": "{\"name\": \"AMOS\", \"description\": \"Amos: A large-scale abdominal multi-organ benchmark for versatile medical image segmentation\", \"author\": \"Yuanfeng Ji\", \"reference\": \"SRIDB x CUHKSZ x HKU x LGCHSZ x LGPHSZ\", \"licence\": \"CC-BY-SA 4.0\", \"release\": \"1.0 01/05/2022\", \"contact\": \"u3008013@connect.hku.hk\", \"tensorImageSize\": \"3D\", \"modality\": {\"0\": \"CT\"},\r\n  \"labels\": {\"0\": \"background\",\r\n    \"1\": \"spleen\",\r\n    \"2\": \"right kidney\",\r\n    \"3\": \"left kidney\",\r\n    \"4\": \"gall bladder\",\r\n    \"5\": \"esophagus\", \"6\": \"liver\",\r\n    \"7\": \"stomach\", \"8\": \"arota\",\r\n    \"9\": \"postcava\", \"10\": \"pancreas\",\r\n    \"11\": \"right adrenal gland\", \"12\": \"left adrenal gland\",\r\n    \"13\": \"duodenum\", \"14\": \"bladder\", \"15\": \"prostate/uterus\"},\r\n  \"numTraining\": 240, \"numValidation\": 120, \"numTest\": 240,\r\n  \"training\": [{\"image\": \"./imagesTr/amos_0001.nii.gz\", \"label\": \"./labelsTr/amos_0001.nii.gz\"}, {\"image\": \"./imagesTr/amos_0004.nii.gz\", \"label\": \"./labelsTr/amos_0004.nii.gz\"}, {\"image\": \"./imagesTr/amos_0005.nii.gz\", \"label\": \"./labelsTr/amos_0005.nii.gz\"}, {\"image\": \"./imagesTr/amos_0006.nii.gz\", \"label\": \"./labelsTr/amos_0006.nii.gz\"}, {\"image\": \"./imagesTr/amos_0007.nii.gz\", \"label\": \"./labelsTr/amos_0007.nii.gz\"}, {\"image\": \"./imagesTr/amos_0009.nii.gz\", \"label\": \"./labelsTr/amos_0009.nii.gz\"}, {\"image\": \"./imagesTr/amos_0010.nii.gz\", \"label\": \"./labelsTr/amos_0010.nii.gz\"}, {\"image\": \"./imagesTr/amos_0011.nii.gz\", \"label\": \"./labelsTr/amos_0011.nii.gz\"}, {\"image\": \"./imagesTr/amos_0014.nii.gz\", \"label\": \"./labelsTr/amos_0014.nii.gz\"}, {\"image\": \"./imagesTr/amos_0015.nii.gz\", \"label\": \"./labelsTr/amos_0015.nii.gz\"}, {\"image\": \"./imagesTr/amos_0016.nii.gz\", \"label\": \"./labelsTr/amos_0016.nii.gz\"}, {\"image\": \"./imagesTr/amos_0017.nii.gz\", \"label\": \"./labelsTr/amos_0017.nii.gz\"}, {\"image\": \"./imagesTr/amos_0019.nii.gz\", \"label\": \"./labelsTr/amos_0019.nii.gz\"}, {\"image\": \"./imagesTr/amos_0021.nii.gz\", \"label\": \"./labelsTr/amos_0021.nii.gz\"}, {\"image\": \"./imagesTr/amos_0023.nii.gz\", \"label\": \"./labelsTr/amos_0023.nii.gz\"}, {\"image\": \"./imagesTr/amos_0024.nii.gz\", \"label\": \"./labelsTr/amos_0024.nii.gz\"}, {\"image\": \"./imagesTr/amos_0025.nii.gz\", \"label\": \"./labelsTr/amos_0025.nii.gz\"}, {\"image\": \"./imagesTr/amos_0027.nii.gz\", \"label\": \"./labelsTr/amos_0027.nii.gz\"}, {\"image\": \"./imagesTr/amos_0030.nii.gz\", \"label\": \"./labelsTr/amos_0030.nii.gz\"}, {\"image\": \"./imagesTr/amos_0033.nii.gz\", \"label\": \"./labelsTr/amos_0033.nii.gz\"}, {\"image\": \"./imagesTr/amos_0035.nii.gz\", \"label\": \"./labelsTr/amos_0035.nii.gz\"}, {\"image\": \"./imagesTr/amos_0036.nii.gz\", \"label\": \"./labelsTr/amos_0036.nii.gz\"}, {\"image\": \"./imagesTr/amos_0038.nii.gz\", \"label\": \"./labelsTr/amos_0038.nii.gz\"}, {\"image\": \"./imagesTr/amos_0042.nii.gz\", \"label\": \"./labelsTr/amos_0042.nii.gz\"}, {\"image\": \"./imagesTr/amos_0043.nii.gz\", \"label\": \"./labelsTr/amos_0043.nii.gz\"}, {\"image\": \"./imagesTr/amos_0044.nii.gz\", \"label\": \"./labelsTr/amos_0044.nii.gz\"}, {\"image\": \"./imagesTr/amos_0045.nii.gz\", \"label\": \"./labelsTr/amos_0045.nii.gz\"}, {\"image\": \"./imagesTr/amos_0047.nii.gz\", \"label\": \"./labelsTr/amos_0047.nii.gz\"}, {\"image\": \"./imagesTr/amos_0048.nii.gz\", \"label\": \"./labelsTr/amos_0048.nii.gz\"}, {\"image\": \"./imagesTr/amos_0049.nii.gz\", \"label\": \"./labelsTr/amos_0049.nii.gz\"}, {\"image\": \"./imagesTr/amos_0050.nii.gz\", \"label\": \"./labelsTr/amos_0050.nii.gz\"}, {\"image\": \"./imagesTr/amos_0052.nii.gz\", \"label\": \"./labelsTr/amos_0052.nii.gz\"}, {\"image\": \"./imagesTr/amos_0054.nii.gz\", \"label\": \"./labelsTr/amos_0054.nii.gz\"}, {\"image\": \"./imagesTr/amos_0057.nii.gz\", \"label\": \"./labelsTr/amos_0057.nii.gz\"}, {\"image\": \"./imagesTr/amos_0058.nii.gz\", \"label\": \"./labelsTr/amos_0058.nii.gz\"}, {\"image\": \"./imagesTr/amos_0059.nii.gz\", \"label\": \"./labelsTr/amos_0059.nii.gz\"}, {\"image\": \"./imagesTr/amos_0060.nii.gz\", \"label\": \"./labelsTr/amos_0060.nii.gz\"}, {\"image\": \"./imagesTr/amos_0064.nii.gz\", \"label\": \"./labelsTr/amos_0064.nii.gz\"}, {\"image\": \"./imagesTr/amos_0066.nii.gz\", \"label\": \"./labelsTr/amos_0066.nii.gz\"}, {\"image\": \"./imagesTr/amos_0067.nii.gz\", \"label\": \"./labelsTr/amos_0067.nii.gz\"}, {\"image\": \"./imagesTr/amos_0069.nii.gz\", \"label\": \"./labelsTr/amos_0069.nii.gz\"}, {\"image\": \"./imagesTr/amos_0071.nii.gz\", \"label\": \"./labelsTr/amos_0071.nii.gz\"}, {\"image\": \"./imagesTr/amos_0072.nii.gz\", \"label\": \"./labelsTr/amos_0072.nii.gz\"}, {\"image\": \"./imagesTr/amos_0075.nii.gz\", \"label\": \"./labelsTr/amos_0075.nii.gz\"}, {\"image\": \"./imagesTr/amos_0076.nii.gz\", \"label\": \"./labelsTr/amos_0076.nii.gz\"}, {\"image\": \"./imagesTr/amos_0077.nii.gz\", \"label\": \"./labelsTr/amos_0077.nii.gz\"}, {\"image\": \"./imagesTr/amos_0078.nii.gz\", \"label\": \"./labelsTr/amos_0078.nii.gz\"}, {\"image\": \"./imagesTr/amos_0079.nii.gz\", \"label\": \"./labelsTr/amos_0079.nii.gz\"}, {\"image\": \"./imagesTr/amos_0081.nii.gz\", \"label\": \"./labelsTr/amos_0081.nii.gz\"}, {\"image\": \"./imagesTr/amos_0083.nii.gz\", \"label\": \"./labelsTr/amos_0083.nii.gz\"}, {\"image\": \"./imagesTr/amos_0084.nii.gz\", \"label\": \"./labelsTr/amos_0084.nii.gz\"}, {\"image\": \"./imagesTr/amos_0086.nii.gz\", \"label\": \"./labelsTr/amos_0086.nii.gz\"}, {\"image\": \"./imagesTr/amos_0088.nii.gz\", \"label\": \"./labelsTr/amos_0088.nii.gz\"}, {\"image\": \"./imagesTr/amos_0089.nii.gz\", \"label\": \"./labelsTr/amos_0089.nii.gz\"}, {\"image\": \"./imagesTr/amos_0092.nii.gz\", \"label\": \"./labelsTr/amos_0092.nii.gz\"}, {\"image\": \"./imagesTr/amos_0094.nii.gz\", \"label\": \"./labelsTr/amos_0094.nii.gz\"}, {\"image\": \"./imagesTr/amos_0097.nii.gz\", \"label\": \"./labelsTr/amos_0097.nii.gz\"}, {\"image\": \"./imagesTr/amos_0098.nii.gz\", \"label\": \"./labelsTr/amos_0098.nii.gz\"}, {\"image\": \"./imagesTr/amos_0099.nii.gz\", \"label\": \"./labelsTr/amos_0099.nii.gz\"}, {\"image\": \"./imagesTr/amos_0102.nii.gz\", \"label\": \"./labelsTr/amos_0102.nii.gz\"}, {\"image\": \"./imagesTr/amos_0103.nii.gz\", \"label\": \"./labelsTr/amos_0103.nii.gz\"}, {\"image\": \"./imagesTr/amos_0104.nii.gz\", \"label\": \"./labelsTr/amos_0104.nii.gz\"}, {\"image\": \"./imagesTr/amos_0105.nii.gz\", \"label\": \"./labelsTr/amos_0105.nii.gz\"}, {\"image\": \"./imagesTr/amos_0109.nii.gz\", \"label\": \"./labelsTr/amos_0109.nii.gz\"}, {\"image\": \"./imagesTr/amos_0110.nii.gz\", \"label\": \"./labelsTr/amos_0110.nii.gz\"}, {\"image\": \"./imagesTr/amos_0111.nii.gz\", \"label\": \"./labelsTr/amos_0111.nii.gz\"}, {\"image\": \"./imagesTr/amos_0113.nii.gz\", \"label\": \"./labelsTr/amos_0113.nii.gz\"}, {\"image\": \"./imagesTr/amos_0115.nii.gz\", \"label\": \"./labelsTr/amos_0115.nii.gz\"}, {\"image\": \"./imagesTr/amos_0116.nii.gz\", \"label\": \"./labelsTr/amos_0116.nii.gz\"}, {\"image\": \"./imagesTr/amos_0118.nii.gz\", \"label\": \"./labelsTr/amos_0118.nii.gz\"}, {\"image\": \"./imagesTr/amos_0119.nii.gz\", \"label\": \"./labelsTr/amos_0119.nii.gz\"}, {\"image\": \"./imagesTr/amos_0121.nii.gz\", \"label\": \"./labelsTr/amos_0121.nii.gz\"}, {\"image\": \"./imagesTr/amos_0124.nii.gz\", \"label\": \"./labelsTr/amos_0124.nii.gz\"}, {\"image\": \"./imagesTr/amos_0125.nii.gz\", \"label\": \"./labelsTr/amos_0125.nii.gz\"}, {\"image\": \"./imagesTr/amos_0126.nii.gz\", \"label\": \"./labelsTr/amos_0126.nii.gz\"}, {\"image\": \"./imagesTr/amos_0127.nii.gz\", \"label\": \"./labelsTr/amos_0127.nii.gz\"}, {\"image\": \"./imagesTr/amos_0129.nii.gz\", \"label\": \"./labelsTr/amos_0129.nii.gz\"}, {\"image\": \"./imagesTr/amos_0131.nii.gz\", \"label\": \"./labelsTr/amos_0131.nii.gz\"}, {\"image\": \"./imagesTr/amos_0133.nii.gz\", \"label\": \"./labelsTr/amos_0133.nii.gz\"}, {\"image\": \"./imagesTr/amos_0134.nii.gz\", \"label\": \"./labelsTr/amos_0134.nii.gz\"}, {\"image\": \"./imagesTr/amos_0135.nii.gz\", \"label\": \"./labelsTr/amos_0135.nii.gz\"}, {\"image\": \"./imagesTr/amos_0137.nii.gz\", \"label\": \"./labelsTr/amos_0137.nii.gz\"}, {\"image\": \"./imagesTr/amos_0138.nii.gz\", \"label\": \"./labelsTr/amos_0138.nii.gz\"}, {\"image\": \"./imagesTr/amos_0141.nii.gz\", \"label\": \"./labelsTr/amos_0141.nii.gz\"}, {\"image\": \"./imagesTr/amos_0142.nii.gz\", \"label\": \"./labelsTr/amos_0142.nii.gz\"}, {\"image\": \"./imagesTr/amos_0143.nii.gz\", \"label\": \"./labelsTr/amos_0143.nii.gz\"}, {\"image\": \"./imagesTr/amos_0147.nii.gz\", \"label\": \"./labelsTr/amos_0147.nii.gz\"}, {\"image\": \"./imagesTr/amos_0149.nii.gz\", \"label\": \"./labelsTr/amos_0149.nii.gz\"}, {\"image\": \"./imagesTr/amos_0152.nii.gz\", \"label\": \"./labelsTr/amos_0152.nii.gz\"}, {\"image\": \"./imagesTr/amos_0153.nii.gz\", \"label\": \"./labelsTr/amos_0153.nii.gz\"}, {\"image\": \"./imagesTr/amos_0154.nii.gz\", \"label\": \"./labelsTr/amos_0154.nii.gz\"}, {\"image\": \"./imagesTr/amos_0156.nii.gz\", \"label\": \"./labelsTr/amos_0156.nii.gz\"}, {\"image\": \"./imagesTr/amos_0158.nii.gz\", \"label\": \"./labelsTr/amos_0158.nii.gz\"}, {\"image\": \"./imagesTr/amos_0159.nii.gz\", \"label\": \"./labelsTr/amos_0159.nii.gz\"}, {\"image\": \"./imagesTr/amos_0160.nii.gz\", \"label\": \"./labelsTr/amos_0160.nii.gz\"}, {\"image\": \"./imagesTr/amos_0161.nii.gz\", \"label\": \"./labelsTr/amos_0161.nii.gz\"}, {\"image\": \"./imagesTr/amos_0162.nii.gz\", \"label\": \"./labelsTr/amos_0162.nii.gz\"}, {\"image\": \"./imagesTr/amos_0166.nii.gz\", \"label\": \"./labelsTr/amos_0166.nii.gz\"}, {\"image\": \"./imagesTr/amos_0170.nii.gz\", \"label\": \"./labelsTr/amos_0170.nii.gz\"}, {\"image\": \"./imagesTr/amos_0171.nii.gz\", \"label\": \"./labelsTr/amos_0171.nii.gz\"}, {\"image\": \"./imagesTr/amos_0172.nii.gz\", \"label\": \"./labelsTr/amos_0172.nii.gz\"}, {\"image\": \"./imagesTr/amos_0173.nii.gz\", \"label\": \"./labelsTr/amos_0173.nii.gz\"}, {\"image\": \"./imagesTr/amos_0175.nii.gz\", \"label\": \"./labelsTr/amos_0175.nii.gz\"}, {\"image\": \"./imagesTr/amos_0177.nii.gz\", \"label\": \"./labelsTr/amos_0177.nii.gz\"}, {\"image\": \"./imagesTr/amos_0179.nii.gz\", \"label\": \"./labelsTr/amos_0179.nii.gz\"}, {\"image\": \"./imagesTr/amos_0180.nii.gz\", \"label\": \"./labelsTr/amos_0180.nii.gz\"}, {\"image\": \"./imagesTr/amos_0181.nii.gz\", \"label\": \"./labelsTr/amos_0181.nii.gz\"}, {\"image\": \"./imagesTr/amos_0184.nii.gz\", \"label\": \"./labelsTr/amos_0184.nii.gz\"}, {\"image\": \"./imagesTr/amos_0185.nii.gz\", \"label\": \"./labelsTr/amos_0185.nii.gz\"}, {\"image\": \"./imagesTr/amos_0186.nii.gz\", \"label\": \"./labelsTr/amos_0186.nii.gz\"}, {\"image\": \"./imagesTr/amos_0188.nii.gz\", \"label\": \"./labelsTr/amos_0188.nii.gz\"}, {\"image\": \"./imagesTr/amos_0190.nii.gz\", \"label\": \"./labelsTr/amos_0190.nii.gz\"}, {\"image\": \"./imagesTr/amos_0192.nii.gz\", \"label\": \"./labelsTr/amos_0192.nii.gz\"}, {\"image\": \"./imagesTr/amos_0193.nii.gz\", \"label\": \"./labelsTr/amos_0193.nii.gz\"}, {\"image\": \"./imagesTr/amos_0195.nii.gz\", \"label\": \"./labelsTr/amos_0195.nii.gz\"}, {\"image\": \"./imagesTr/amos_0196.nii.gz\", \"label\": \"./labelsTr/amos_0196.nii.gz\"}, {\"image\": \"./imagesTr/amos_0197.nii.gz\", \"label\": \"./labelsTr/amos_0197.nii.gz\"}, {\"image\": \"./imagesTr/amos_0198.nii.gz\", \"label\": \"./labelsTr/amos_0198.nii.gz\"}, {\"image\": \"./imagesTr/amos_0199.nii.gz\", \"label\": \"./labelsTr/amos_0199.nii.gz\"}, {\"image\": \"./imagesTr/amos_0212.nii.gz\", \"label\": \"./labelsTr/amos_0212.nii.gz\"}, {\"image\": \"./imagesTr/amos_0214.nii.gz\", \"label\": \"./labelsTr/amos_0214.nii.gz\"}, {\"image\": \"./imagesTr/amos_0215.nii.gz\", \"label\": \"./labelsTr/amos_0215.nii.gz\"}, {\"image\": \"./imagesTr/amos_0217.nii.gz\", \"label\": \"./labelsTr/amos_0217.nii.gz\"}, {\"image\": \"./imagesTr/amos_0224.nii.gz\", \"label\": \"./labelsTr/amos_0224.nii.gz\"}, {\"image\": \"./imagesTr/amos_0225.nii.gz\", \"label\": \"./labelsTr/amos_0225.nii.gz\"}, {\"image\": \"./imagesTr/amos_0226.nii.gz\", \"label\": \"./labelsTr/amos_0226.nii.gz\"}, {\"image\": \"./imagesTr/amos_0230.nii.gz\", \"label\": \"./labelsTr/amos_0230.nii.gz\"}, {\"image\": \"./imagesTr/amos_0231.nii.gz\", \"label\": \"./labelsTr/amos_0231.nii.gz\"}, {\"image\": \"./imagesTr/amos_0235.nii.gz\", \"label\": \"./labelsTr/amos_0235.nii.gz\"}, {\"image\": \"./imagesTr/amos_0237.nii.gz\", \"label\": \"./labelsTr/amos_0237.nii.gz\"}, {\"image\": \"./imagesTr/amos_0239.nii.gz\", \"label\": \"./labelsTr/amos_0239.nii.gz\"}, {\"image\": \"./imagesTr/amos_0242.nii.gz\", \"label\": \"./labelsTr/amos_0242.nii.gz\"}, {\"image\": \"./imagesTr/amos_0245.nii.gz\", \"label\": \"./labelsTr/amos_0245.nii.gz\"}, {\"image\": \"./imagesTr/amos_0248.nii.gz\", \"label\": \"./labelsTr/amos_0248.nii.gz\"}, {\"image\": \"./imagesTr/amos_0249.nii.gz\", \"label\": \"./labelsTr/amos_0249.nii.gz\"}, {\"image\": \"./imagesTr/amos_0254.nii.gz\", \"label\": \"./labelsTr/amos_0254.nii.gz\"}, {\"image\": \"./imagesTr/amos_0259.nii.gz\", \"label\": \"./labelsTr/amos_0259.nii.gz\"}, {\"image\": \"./imagesTr/amos_0263.nii.gz\", \"label\": \"./labelsTr/amos_0263.nii.gz\"}, {\"image\": \"./imagesTr/amos_0264.nii.gz\", \"label\": \"./labelsTr/amos_0264.nii.gz\"}, {\"image\": \"./imagesTr/amos_0268.nii.gz\", \"label\": \"./labelsTr/amos_0268.nii.gz\"}, {\"image\": \"./imagesTr/amos_0272.nii.gz\", \"label\": \"./labelsTr/amos_0272.nii.gz\"}, {\"image\": \"./imagesTr/amos_0273.nii.gz\", \"label\": \"./labelsTr/amos_0273.nii.gz\"}, {\"image\": \"./imagesTr/amos_0274.nii.gz\", \"label\": \"./labelsTr/amos_0274.nii.gz\"}, {\"image\": \"./imagesTr/amos_0276.nii.gz\", \"label\": \"./labelsTr/amos_0276.nii.gz\"}, {\"image\": \"./imagesTr/amos_0279.nii.gz\", \"label\": \"./labelsTr/amos_0279.nii.gz\"}, {\"image\": \"./imagesTr/amos_0281.nii.gz\", \"label\": \"./labelsTr/amos_0281.nii.gz\"}, {\"image\": \"./imagesTr/amos_0282.nii.gz\", \"label\": \"./labelsTr/amos_0282.nii.gz\"}, {\"image\": \"./imagesTr/amos_0288.nii.gz\", \"label\": \"./labelsTr/amos_0288.nii.gz\"}, {\"image\": \"./imagesTr/amos_0294.nii.gz\", \"label\": \"./labelsTr/amos_0294.nii.gz\"}, {\"image\": \"./imagesTr/amos_0296.nii.gz\", \"label\": \"./labelsTr/amos_0296.nii.gz\"}, {\"image\": \"./imagesTr/amos_0297.nii.gz\", \"label\": \"./labelsTr/amos_0297.nii.gz\"}, {\"image\": \"./imagesTr/amos_0299.nii.gz\", \"label\": \"./labelsTr/amos_0299.nii.gz\"}, {\"image\": \"./imagesTr/amos_0301.nii.gz\", \"label\": \"./labelsTr/amos_0301.nii.gz\"}, {\"image\": \"./imagesTr/amos_0302.nii.gz\", \"label\": \"./labelsTr/amos_0302.nii.gz\"}, {\"image\": \"./imagesTr/amos_0307.nii.gz\", \"label\": \"./labelsTr/amos_0307.nii.gz\"}, {\"image\": \"./imagesTr/amos_0317.nii.gz\", \"label\": \"./labelsTr/amos_0317.nii.gz\"}, {\"image\": \"./imagesTr/amos_0320.nii.gz\", \"label\": \"./labelsTr/amos_0320.nii.gz\"}, {\"image\": \"./imagesTr/amos_0321.nii.gz\", \"label\": \"./labelsTr/amos_0321.nii.gz\"}, {\"image\": \"./imagesTr/amos_0330.nii.gz\", \"label\": \"./labelsTr/amos_0330.nii.gz\"}, {\"image\": \"./imagesTr/amos_0332.nii.gz\", \"label\": \"./labelsTr/amos_0332.nii.gz\"}, {\"image\": \"./imagesTr/amos_0336.nii.gz\", \"label\": \"./labelsTr/amos_0336.nii.gz\"}, {\"image\": \"./imagesTr/amos_0337.nii.gz\", \"label\": \"./labelsTr/amos_0337.nii.gz\"}, {\"image\": \"./imagesTr/amos_0341.nii.gz\", \"label\": \"./labelsTr/amos_0341.nii.gz\"}, {\"image\": \"./imagesTr/amos_0348.nii.gz\", \"label\": \"./labelsTr/amos_0348.nii.gz\"}, {\"image\": \"./imagesTr/amos_0349.nii.gz\", \"label\": \"./labelsTr/amos_0349.nii.gz\"}, {\"image\": \"./imagesTr/amos_0350.nii.gz\", \"label\": \"./labelsTr/amos_0350.nii.gz\"}, {\"image\": \"./imagesTr/amos_0351.nii.gz\", \"label\": \"./labelsTr/amos_0351.nii.gz\"}, {\"image\": \"./imagesTr/amos_0353.nii.gz\", \"label\": \"./labelsTr/amos_0353.nii.gz\"}, {\"image\": \"./imagesTr/amos_0358.nii.gz\", \"label\": \"./labelsTr/amos_0358.nii.gz\"}, {\"image\": \"./imagesTr/amos_0361.nii.gz\", \"label\": \"./labelsTr/amos_0361.nii.gz\"}, {\"image\": \"./imagesTr/amos_0362.nii.gz\", \"label\": \"./labelsTr/amos_0362.nii.gz\"}, {\"image\": \"./imagesTr/amos_0366.nii.gz\", \"label\": \"./labelsTr/amos_0366.nii.gz\"}, {\"image\": \"./imagesTr/amos_0367.nii.gz\", \"label\": \"./labelsTr/amos_0367.nii.gz\"}, {\"image\": \"./imagesTr/amos_0370.nii.gz\", \"label\": \"./labelsTr/amos_0370.nii.gz\"}, {\"image\": \"./imagesTr/amos_0371.nii.gz\", \"label\": \"./labelsTr/amos_0371.nii.gz\"}, {\"image\": \"./imagesTr/amos_0374.nii.gz\", \"label\": \"./labelsTr/amos_0374.nii.gz\"}, {\"image\": \"./imagesTr/amos_0376.nii.gz\", \"label\": \"./labelsTr/amos_0376.nii.gz\"}, {\"image\": \"./imagesTr/amos_0378.nii.gz\", \"label\": \"./labelsTr/amos_0378.nii.gz\"}, {\"image\": \"./imagesTr/amos_0379.nii.gz\", \"label\": \"./labelsTr/amos_0379.nii.gz\"}, {\"image\": \"./imagesTr/amos_0380.nii.gz\", \"label\": \"./labelsTr/amos_0380.nii.gz\"}, {\"image\": \"./imagesTr/amos_0381.nii.gz\", \"label\": \"./labelsTr/amos_0381.nii.gz\"}, {\"image\": \"./imagesTr/amos_0383.nii.gz\", \"label\": \"./labelsTr/amos_0383.nii.gz\"}, {\"image\": \"./imagesTr/amos_0384.nii.gz\", \"label\": \"./labelsTr/amos_0384.nii.gz\"}, {\"image\": \"./imagesTr/amos_0387.nii.gz\", \"label\": \"./labelsTr/amos_0387.nii.gz\"}, {\"image\": \"./imagesTr/amos_0388.nii.gz\", \"label\": \"./labelsTr/amos_0388.nii.gz\"}, {\"image\": \"./imagesTr/amos_0390.nii.gz\", \"label\": \"./labelsTr/amos_0390.nii.gz\"}, {\"image\": \"./imagesTr/amos_0391.nii.gz\", \"label\": \"./labelsTr/amos_0391.nii.gz\"}, {\"image\": \"./imagesTr/amos_0392.nii.gz\", \"label\": \"./labelsTr/amos_0392.nii.gz\"}, {\"image\": \"./imagesTr/amos_0395.nii.gz\", \"label\": \"./labelsTr/amos_0395.nii.gz\"}, {\"image\": \"./imagesTr/amos_0396.nii.gz\", \"label\": \"./labelsTr/amos_0396.nii.gz\"}, {\"image\": \"./imagesTr/amos_0398.nii.gz\", \"label\": \"./labelsTr/amos_0398.nii.gz\"}, {\"image\": \"./imagesTr/amos_0400.nii.gz\", \"label\": \"./labelsTr/amos_0400.nii.gz\"}, {\"image\": \"./imagesTr/amos_0401.nii.gz\", \"label\": \"./labelsTr/amos_0401.nii.gz\"}, {\"image\": \"./imagesTr/amos_0402.nii.gz\", \"label\": \"./labelsTr/amos_0402.nii.gz\"}, {\"image\": \"./imagesTr/amos_0403.nii.gz\", \"label\": \"./labelsTr/amos_0403.nii.gz\"}, {\"image\": \"./imagesTr/amos_0404.nii.gz\", \"label\": \"./labelsTr/amos_0404.nii.gz\"}, {\"image\": \"./imagesTr/amos_0405.nii.gz\", \"label\": \"./labelsTr/amos_0405.nii.gz\"}, {\"image\": \"./imagesTr/amos_0406.nii.gz\", \"label\": \"./labelsTr/amos_0406.nii.gz\"}, {\"image\": \"./imagesTr/amos_0408.nii.gz\", \"label\": \"./labelsTr/amos_0408.nii.gz\"}, {\"image\": \"./imagesTr/amos_0410.nii.gz\", \"label\": \"./labelsTr/amos_0410.nii.gz\"}],\r\n  \"validation\": [{\"image\": \"./imagesVa/amos_0008.nii.gz\", \"label\": \"./labelsVa/amos_0008.nii.gz\"}, {\"image\": \"./imagesVa/amos_0013.nii.gz\", \"label\": \"./labelsVa/amos_0013.nii.gz\"}, {\"image\": \"./imagesVa/amos_0018.nii.gz\", \"label\": \"./labelsVa/amos_0018.nii.gz\"}, {\"image\": \"./imagesVa/amos_0022.nii.gz\", \"label\": \"./labelsVa/amos_0022.nii.gz\"}, {\"image\": \"./imagesVa/amos_0029.nii.gz\", \"label\": \"./labelsVa/amos_0029.nii.gz\"}, {\"image\": \"./imagesVa/amos_0032.nii.gz\", \"label\": \"./labelsVa/amos_0032.nii.gz\"}, {\"image\": \"./imagesVa/amos_0034.nii.gz\", \"label\": \"./labelsVa/amos_0034.nii.gz\"}, {\"image\": \"./imagesVa/amos_0040.nii.gz\", \"label\": \"./labelsVa/amos_0040.nii.gz\"}, {\"image\": \"./imagesVa/amos_0041.nii.gz\", \"label\": \"./labelsVa/amos_0041.nii.gz\"}, {\"image\": \"./imagesVa/amos_0051.nii.gz\", \"label\": \"./labelsVa/amos_0051.nii.gz\"}, {\"image\": \"./imagesVa/amos_0056.nii.gz\", \"label\": \"./labelsVa/amos_0056.nii.gz\"}, {\"image\": \"./imagesVa/amos_0061.nii.gz\", \"label\": \"./labelsVa/amos_0061.nii.gz\"}, {\"image\": \"./imagesVa/amos_0063.nii.gz\", \"label\": \"./labelsVa/amos_0063.nii.gz\"}, {\"image\": \"./imagesVa/amos_0070.nii.gz\", \"label\": \"./labelsVa/amos_0070.nii.gz\"}, {\"image\": \"./imagesVa/amos_0073.nii.gz\", \"label\": \"./labelsVa/amos_0073.nii.gz\"}, {\"image\": \"./imagesVa/amos_0085.nii.gz\", \"label\": \"./labelsVa/amos_0085.nii.gz\"}, {\"image\": \"./imagesVa/amos_0087.nii.gz\", \"label\": \"./labelsVa/amos_0087.nii.gz\"}, {\"image\": \"./imagesVa/amos_0090.nii.gz\", \"label\": \"./labelsVa/amos_0090.nii.gz\"}, {\"image\": \"./imagesVa/amos_0106.nii.gz\", \"label\": \"./labelsVa/amos_0106.nii.gz\"}, {\"image\": \"./imagesVa/amos_0108.nii.gz\", \"label\": \"./labelsVa/amos_0108.nii.gz\"}, {\"image\": \"./imagesVa/amos_0112.nii.gz\", \"label\": \"./labelsVa/amos_0112.nii.gz\"}, {\"image\": \"./imagesVa/amos_0117.nii.gz\", \"label\": \"./labelsVa/amos_0117.nii.gz\"}, {\"image\": \"./imagesVa/amos_0120.nii.gz\", \"label\": \"./labelsVa/amos_0120.nii.gz\"}, {\"image\": \"./imagesVa/amos_0123.nii.gz\", \"label\": \"./labelsVa/amos_0123.nii.gz\"}, {\"image\": \"./imagesVa/amos_0128.nii.gz\", \"label\": \"./labelsVa/amos_0128.nii.gz\"}, {\"image\": \"./imagesVa/amos_0132.nii.gz\", \"label\": \"./labelsVa/amos_0132.nii.gz\"}, {\"image\": \"./imagesVa/amos_0136.nii.gz\", \"label\": \"./labelsVa/amos_0136.nii.gz\"}, {\"image\": \"./imagesVa/amos_0140.nii.gz\", \"label\": \"./labelsVa/amos_0140.nii.gz\"}, {\"image\": \"./imagesVa/amos_0144.nii.gz\", \"label\": \"./labelsVa/amos_0144.nii.gz\"}, {\"image\": \"./imagesVa/amos_0150.nii.gz\", \"label\": \"./labelsVa/amos_0150.nii.gz\"}, {\"image\": \"./imagesVa/amos_0155.nii.gz\", \"label\": \"./labelsVa/amos_0155.nii.gz\"}, {\"image\": \"./imagesVa/amos_0157.nii.gz\", \"label\": \"./labelsVa/amos_0157.nii.gz\"}, {\"image\": \"./imagesVa/amos_0167.nii.gz\", \"label\": \"./labelsVa/amos_0167.nii.gz\"}, {\"image\": \"./imagesVa/amos_0174.nii.gz\", \"label\": \"./labelsVa/amos_0174.nii.gz\"}, {\"image\": \"./imagesVa/amos_0176.nii.gz\", \"label\": \"./labelsVa/amos_0176.nii.gz\"}, {\"image\": \"./imagesVa/amos_0189.nii.gz\", \"label\": \"./labelsVa/amos_0189.nii.gz\"}, {\"image\": \"./imagesVa/amos_0191.nii.gz\", \"label\": \"./labelsVa/amos_0191.nii.gz\"}, {\"image\": \"./imagesVa/amos_0194.nii.gz\", \"label\": \"./labelsVa/amos_0194.nii.gz\"}, {\"image\": \"./imagesVa/amos_0200.nii.gz\", \"label\": \"./labelsVa/amos_0200.nii.gz\"}, {\"image\": \"./imagesVa/amos_0202.nii.gz\", \"label\": \"./labelsVa/amos_0202.nii.gz\"}, {\"image\": \"./imagesVa/amos_0203.nii.gz\", \"label\": \"./labelsVa/amos_0203.nii.gz\"}, {\"image\": \"./imagesVa/amos_0204.nii.gz\", \"label\": \"./labelsVa/amos_0204.nii.gz\"}, {\"image\": \"./imagesVa/amos_0206.nii.gz\", \"label\": \"./labelsVa/amos_0206.nii.gz\"}, {\"image\": \"./imagesVa/amos_0207.nii.gz\", \"label\": \"./labelsVa/amos_0207.nii.gz\"}, {\"image\": \"./imagesVa/amos_0208.nii.gz\", \"label\": \"./labelsVa/amos_0208.nii.gz\"}, {\"image\": \"./imagesVa/amos_0216.nii.gz\", \"label\": \"./labelsVa/amos_0216.nii.gz\"}, {\"image\": \"./imagesVa/amos_0218.nii.gz\", \"label\": \"./labelsVa/amos_0218.nii.gz\"}, {\"image\": \"./imagesVa/amos_0219.nii.gz\", \"label\": \"./labelsVa/amos_0219.nii.gz\"}, {\"image\": \"./imagesVa/amos_0223.nii.gz\", \"label\": \"./labelsVa/amos_0223.nii.gz\"}, {\"image\": \"./imagesVa/amos_0228.nii.gz\", \"label\": \"./labelsVa/amos_0228.nii.gz\"}, {\"image\": \"./imagesVa/amos_0233.nii.gz\", \"label\": \"./labelsVa/amos_0233.nii.gz\"}, {\"image\": \"./imagesVa/amos_0238.nii.gz\", \"label\": \"./labelsVa/amos_0238.nii.gz\"}, {\"image\": \"./imagesVa/amos_0244.nii.gz\", \"label\": \"./labelsVa/amos_0244.nii.gz\"}, {\"image\": \"./imagesVa/amos_0247.nii.gz\", \"label\": \"./labelsVa/amos_0247.nii.gz\"}, {\"image\": \"./imagesVa/amos_0250.nii.gz\", \"label\": \"./labelsVa/amos_0250.nii.gz\"}, {\"image\": \"./imagesVa/amos_0255.nii.gz\", \"label\": \"./labelsVa/amos_0255.nii.gz\"}, {\"image\": \"./imagesVa/amos_0257.nii.gz\", \"label\": \"./labelsVa/amos_0257.nii.gz\"}, {\"image\": \"./imagesVa/amos_0258.nii.gz\", \"label\": \"./labelsVa/amos_0258.nii.gz\"}, {\"image\": \"./imagesVa/amos_0278.nii.gz\", \"label\": \"./labelsVa/amos_0278.nii.gz\"}, {\"image\": \"./imagesVa/amos_0280.nii.gz\", \"label\": \"./labelsVa/amos_0280.nii.gz\"}, {\"image\": \"./imagesVa/amos_0283.nii.gz\", \"label\": \"./labelsVa/amos_0283.nii.gz\"}, {\"image\": \"./imagesVa/amos_0284.nii.gz\", \"label\": \"./labelsVa/amos_0284.nii.gz\"}, {\"image\": \"./imagesVa/amos_0286.nii.gz\", \"label\": \"./labelsVa/amos_0286.nii.gz\"}, {\"image\": \"./imagesVa/amos_0287.nii.gz\", \"label\": \"./labelsVa/amos_0287.nii.gz\"}, {\"image\": \"./imagesVa/amos_0289.nii.gz\", \"label\": \"./labelsVa/amos_0289.nii.gz\"}, {\"image\": \"./imagesVa/amos_0290.nii.gz\", \"label\": \"./labelsVa/amos_0290.nii.gz\"}, {\"image\": \"./imagesVa/amos_0292.nii.gz\", \"label\": \"./labelsVa/amos_0292.nii.gz\"}, {\"image\": \"./imagesVa/amos_0293.nii.gz\", \"label\": \"./labelsVa/amos_0293.nii.gz\"}, {\"image\": \"./imagesVa/amos_0304.nii.gz\", \"label\": \"./labelsVa/amos_0304.nii.gz\"}, {\"image\": \"./imagesVa/amos_0308.nii.gz\", \"label\": \"./labelsVa/amos_0308.nii.gz\"}, {\"image\": \"./imagesVa/amos_0309.nii.gz\", \"label\": \"./labelsVa/amos_0309.nii.gz\"}, {\"image\": \"./imagesVa/amos_0310.nii.gz\", \"label\": \"./labelsVa/amos_0310.nii.gz\"}, {\"image\": \"./imagesVa/amos_0311.nii.gz\", \"label\": \"./labelsVa/amos_0311.nii.gz\"}, {\"image\": \"./imagesVa/amos_0313.nii.gz\", \"label\": \"./labelsVa/amos_0313.nii.gz\"}, {\"image\": \"./imagesVa/amos_0316.nii.gz\", \"label\": \"./labelsVa/amos_0316.nii.gz\"}, {\"image\": \"./imagesVa/amos_0318.nii.gz\", \"label\": \"./labelsVa/amos_0318.nii.gz\"}, {\"image\": \"./imagesVa/amos_0323.nii.gz\", \"label\": \"./labelsVa/amos_0323.nii.gz\"}, {\"image\": \"./imagesVa/amos_0325.nii.gz\", \"label\": \"./labelsVa/amos_0325.nii.gz\"}, {\"image\": \"./imagesVa/amos_0326.nii.gz\", \"label\": \"./labelsVa/amos_0326.nii.gz\"}, {\"image\": \"./imagesVa/amos_0328.nii.gz\", \"label\": \"./labelsVa/amos_0328.nii.gz\"}, {\"image\": \"./imagesVa/amos_0333.nii.gz\", \"label\": \"./labelsVa/amos_0333.nii.gz\"}, {\"image\": \"./imagesVa/amos_0334.nii.gz\", \"label\": \"./labelsVa/amos_0334.nii.gz\"}, {\"image\": \"./imagesVa/amos_0339.nii.gz\", \"label\": \"./labelsVa/amos_0339.nii.gz\"}, {\"image\": \"./imagesVa/amos_0342.nii.gz\", \"label\": \"./labelsVa/amos_0342.nii.gz\"}, {\"image\": \"./imagesVa/amos_0344.nii.gz\", \"label\": \"./labelsVa/amos_0344.nii.gz\"}, {\"image\": \"./imagesVa/amos_0346.nii.gz\", \"label\": \"./labelsVa/amos_0346.nii.gz\"}, {\"image\": \"./imagesVa/amos_0352.nii.gz\", \"label\": \"./labelsVa/amos_0352.nii.gz\"}, {\"image\": \"./imagesVa/amos_0356.nii.gz\", \"label\": \"./labelsVa/amos_0356.nii.gz\"}, {\"image\": \"./imagesVa/amos_0357.nii.gz\", \"label\": \"./labelsVa/amos_0357.nii.gz\"}, {\"image\": \"./imagesVa/amos_0363.nii.gz\", \"label\": \"./labelsVa/amos_0363.nii.gz\"}, {\"image\": \"./imagesVa/amos_0364.nii.gz\", \"label\": \"./labelsVa/amos_0364.nii.gz\"}, {\"image\": \"./imagesVa/amos_0365.nii.gz\", \"label\": \"./labelsVa/amos_0365.nii.gz\"}, {\"image\": \"./imagesVa/amos_0368.nii.gz\", \"label\": \"./labelsVa/amos_0368.nii.gz\"}, {\"image\": \"./imagesVa/amos_0372.nii.gz\", \"label\": \"./labelsVa/amos_0372.nii.gz\"}, {\"image\": \"./imagesVa/amos_0373.nii.gz\", \"label\": \"./labelsVa/amos_0373.nii.gz\"}, {\"image\": \"./imagesVa/amos_0377.nii.gz\", \"label\": \"./labelsVa/amos_0377.nii.gz\"}, {\"image\": \"./imagesVa/amos_0385.nii.gz\", \"label\": \"./labelsVa/amos_0385.nii.gz\"}, {\"image\": \"./imagesVa/amos_0397.nii.gz\", \"label\": \"./labelsVa/amos_0397.nii.gz\"}, {\"image\": \"./imagesVa/amos_0399.nii.gz\", \"label\": \"./labelsVa/amos_0399.nii.gz\"}, {\"image\": \"./imagesVa/amos_0409.nii.gz\", \"label\": \"./labelsVa/amos_0409.nii.gz\"}],\r\n  \"test\": [{\"image\": \"./imagesTs/amos_0002.nii.gz\"}, {\"image\": \"./imagesTs/amos_0003.nii.gz\"}, {\"image\": \"./imagesTs/amos_0012.nii.gz\"}, {\"image\": \"./imagesTs/amos_0020.nii.gz\"}, {\"image\": \"./imagesTs/amos_0026.nii.gz\"}, {\"image\": \"./imagesTs/amos_0028.nii.gz\"}, {\"image\": \"./imagesTs/amos_0031.nii.gz\"}, {\"image\": \"./imagesTs/amos_0037.nii.gz\"}, {\"image\": \"./imagesTs/amos_0039.nii.gz\"}, {\"image\": \"./imagesTs/amos_0046.nii.gz\"}, {\"image\": \"./imagesTs/amos_0053.nii.gz\"}, {\"image\": \"./imagesTs/amos_0055.nii.gz\"}, {\"image\": \"./imagesTs/amos_0062.nii.gz\"}, {\"image\": \"./imagesTs/amos_0065.nii.gz\"}, {\"image\": \"./imagesTs/amos_0068.nii.gz\"}, {\"image\": \"./imagesTs/amos_0074.nii.gz\"}, {\"image\": \"./imagesTs/amos_0080.nii.gz\"}, {\"image\": \"./imagesTs/amos_0082.nii.gz\"}, {\"image\": \"./imagesTs/amos_0091.nii.gz\"}, {\"image\": \"./imagesTs/amos_0093.nii.gz\"}, {\"image\": \"./imagesTs/amos_0095.nii.gz\"}, {\"image\": \"./imagesTs/amos_0096.nii.gz\"}, {\"image\": \"./imagesTs/amos_0100.nii.gz\"}, {\"image\": \"./imagesTs/amos_0101.nii.gz\"}, {\"image\": \"./imagesTs/amos_0107.nii.gz\"}, {\"image\": \"./imagesTs/amos_0114.nii.gz\"}, {\"image\": \"./imagesTs/amos_0122.nii.gz\"}, {\"image\": \"./imagesTs/amos_0130.nii.gz\"}, {\"image\": \"./imagesTs/amos_0139.nii.gz\"}, {\"image\": \"./imagesTs/amos_0145.nii.gz\"}, {\"image\": \"./imagesTs/amos_0146.nii.gz\"}, {\"image\": \"./imagesTs/amos_0148.nii.gz\"}, {\"image\": \"./imagesTs/amos_0151.nii.gz\"}, {\"image\": \"./imagesTs/amos_0163.nii.gz\"}, {\"image\": \"./imagesTs/amos_0164.nii.gz\"}, {\"image\": \"./imagesTs/amos_0165.nii.gz\"}, {\"image\": \"./imagesTs/amos_0168.nii.gz\"}, {\"image\": \"./imagesTs/amos_0169.nii.gz\"}, {\"image\": \"./imagesTs/amos_0178.nii.gz\"}, {\"image\": \"./imagesTs/amos_0182.nii.gz\"}, {\"image\": \"./imagesTs/amos_0183.nii.gz\"}, {\"image\": \"./imagesTs/amos_0187.nii.gz\"}, {\"image\": \"./imagesTs/amos_0201.nii.gz\"}, {\"image\": \"./imagesTs/amos_0205.nii.gz\"}, {\"image\": \"./imagesTs/amos_0209.nii.gz\"}, {\"image\": \"./imagesTs/amos_0210.nii.gz\"}, {\"image\": \"./imagesTs/amos_0211.nii.gz\"}, {\"image\": \"./imagesTs/amos_0213.nii.gz\"}, {\"image\": \"./imagesTs/amos_0220.nii.gz\"}, {\"image\": \"./imagesTs/amos_0221.nii.gz\"}, {\"image\": \"./imagesTs/amos_0222.nii.gz\"}, {\"image\": \"./imagesTs/amos_0227.nii.gz\"}, {\"image\": \"./imagesTs/amos_0229.nii.gz\"}, {\"image\": \"./imagesTs/amos_0232.nii.gz\"}, {\"image\": \"./imagesTs/amos_0234.nii.gz\"}, {\"image\": \"./imagesTs/amos_0236.nii.gz\"}, {\"image\": \"./imagesTs/amos_0240.nii.gz\"}, {\"image\": \"./imagesTs/amos_0241.nii.gz\"}, {\"image\": \"./imagesTs/amos_0243.nii.gz\"}, {\"image\": \"./imagesTs/amos_0246.nii.gz\"}, {\"image\": \"./imagesTs/amos_0251.nii.gz\"}, {\"image\": \"./imagesTs/amos_0252.nii.gz\"}, {\"image\": \"./imagesTs/amos_0253.nii.gz\"}, {\"image\": \"./imagesTs/amos_0256.nii.gz\"}, {\"image\": \"./imagesTs/amos_0260.nii.gz\"}, {\"image\": \"./imagesTs/amos_0261.nii.gz\"}, {\"image\": \"./imagesTs/amos_0262.nii.gz\"}, {\"image\": \"./imagesTs/amos_0265.nii.gz\"}, {\"image\": \"./imagesTs/amos_0266.nii.gz\"}, {\"image\": \"./imagesTs/amos_0267.nii.gz\"}, {\"image\": \"./imagesTs/amos_0269.nii.gz\"}, {\"image\": \"./imagesTs/amos_0270.nii.gz\"}, {\"image\": \"./imagesTs/amos_0271.nii.gz\"}, {\"image\": \"./imagesTs/amos_0275.nii.gz\"}, {\"image\": \"./imagesTs/amos_0277.nii.gz\"}, {\"image\": \"./imagesTs/amos_0285.nii.gz\"}, {\"image\": \"./imagesTs/amos_0291.nii.gz\"}, {\"image\": \"./imagesTs/amos_0295.nii.gz\"}, {\"image\": \"./imagesTs/amos_0298.nii.gz\"}, {\"image\": \"./imagesTs/amos_0300.nii.gz\"}, {\"image\": \"./imagesTs/amos_0303.nii.gz\"}, {\"image\": \"./imagesTs/amos_0305.nii.gz\"}, {\"image\": \"./imagesTs/amos_0306.nii.gz\"}, {\"image\": \"./imagesTs/amos_0312.nii.gz\"}, {\"image\": \"./imagesTs/amos_0314.nii.gz\"}, {\"image\": \"./imagesTs/amos_0315.nii.gz\"}, {\"image\": \"./imagesTs/amos_0319.nii.gz\"}, {\"image\": \"./imagesTs/amos_0322.nii.gz\"}, {\"image\": \"./imagesTs/amos_0324.nii.gz\"}, {\"image\": \"./imagesTs/amos_0327.nii.gz\"}, {\"image\": \"./imagesTs/amos_0329.nii.gz\"}, {\"image\": \"./imagesTs/amos_0331.nii.gz\"}, {\"image\": \"./imagesTs/amos_0335.nii.gz\"}, {\"image\": \"./imagesTs/amos_0338.nii.gz\"}, {\"image\": \"./imagesTs/amos_0340.nii.gz\"}, {\"image\": \"./imagesTs/amos_0343.nii.gz\"}, {\"image\": \"./imagesTs/amos_0345.nii.gz\"}, {\"image\": \"./imagesTs/amos_0347.nii.gz\"}, {\"image\": \"./imagesTs/amos_0354.nii.gz\"}, {\"image\": \"./imagesTs/amos_0355.nii.gz\"}, {\"image\": \"./imagesTs/amos_0359.nii.gz\"}, {\"image\": \"./imagesTs/amos_0360.nii.gz\"}, {\"image\": \"./imagesTs/amos_0369.nii.gz\"}, {\"image\": \"./imagesTs/amos_0375.nii.gz\"}, {\"image\": \"./imagesTs/amos_0382.nii.gz\"}, {\"image\": \"./imagesTs/amos_0386.nii.gz\"}, {\"image\": \"./imagesTs/amos_0389.nii.gz\"}, {\"image\": \"./imagesTs/amos_0393.nii.gz\"}, {\"image\": \"./imagesTs/amos_0394.nii.gz\"}, {\"image\": \"./imagesTs/amos_0407.nii.gz\"}, {\"image\": \"./imagesTs/amos_0411.nii.gz\"}, {\"image\": \"./imagesTs/amos_0412.nii.gz\"}, {\"image\": \"./imagesTs/amos_0413.nii.gz\"}, {\"image\": \"./imagesTs/amos_0414.nii.gz\"}, {\"image\": \"./imagesTs/amos_0415.nii.gz\"}, {\"image\": \"./imagesTs/amos_0416.nii.gz\"}, {\"image\": \"./imagesTs/amos_0417.nii.gz\"}, {\"image\": \"./imagesTs/amos_0418.nii.gz\"}, {\"image\": \"./imagesTs/amos_0419.nii.gz\"}, {\"image\": \"./imagesTs/amos_0420.nii.gz\"}, {\"image\": \"./imagesTs/amos_0421.nii.gz\"}, {\"image\": \"./imagesTs/amos_0422.nii.gz\"}, {\"image\": \"./imagesTs/amos_0423.nii.gz\"}, {\"image\": \"./imagesTs/amos_0424.nii.gz\"}, {\"image\": \"./imagesTs/amos_0425.nii.gz\"}, {\"image\": \"./imagesTs/amos_0426.nii.gz\"}, {\"image\": \"./imagesTs/amos_0427.nii.gz\"}, {\"image\": \"./imagesTs/amos_0428.nii.gz\"}, {\"image\": \"./imagesTs/amos_0429.nii.gz\"}, {\"image\": \"./imagesTs/amos_0430.nii.gz\"}, {\"image\": \"./imagesTs/amos_0431.nii.gz\"}, {\"image\": \"./imagesTs/amos_0432.nii.gz\"}, {\"image\": \"./imagesTs/amos_0433.nii.gz\"}, {\"image\": \"./imagesTs/amos_0434.nii.gz\"}, {\"image\": \"./imagesTs/amos_0435.nii.gz\"}, {\"image\": \"./imagesTs/amos_0436.nii.gz\"}, {\"image\": \"./imagesTs/amos_0437.nii.gz\"}, {\"image\": \"./imagesTs/amos_0438.nii.gz\"}, {\"image\": \"./imagesTs/amos_0439.nii.gz\"}, {\"image\": \"./imagesTs/amos_0440.nii.gz\"}, {\"image\": \"./imagesTs/amos_0441.nii.gz\"}, {\"image\": \"./imagesTs/amos_0442.nii.gz\"}, {\"image\": \"./imagesTs/amos_0443.nii.gz\"}, {\"image\": \"./imagesTs/amos_0444.nii.gz\"}, {\"image\": \"./imagesTs/amos_0445.nii.gz\"}, {\"image\": \"./imagesTs/amos_0446.nii.gz\"}, {\"image\": \"./imagesTs/amos_0447.nii.gz\"}, {\"image\": \"./imagesTs/amos_0448.nii.gz\"}, {\"image\": \"./imagesTs/amos_0449.nii.gz\"}, {\"image\": \"./imagesTs/amos_0450.nii.gz\"}, {\"image\": \"./imagesTs/amos_0451.nii.gz\"}, {\"image\": \"./imagesTs/amos_0452.nii.gz\"}, {\"image\": \"./imagesTs/amos_0453.nii.gz\"}, {\"image\": \"./imagesTs/amos_0454.nii.gz\"}, {\"image\": \"./imagesTs/amos_0455.nii.gz\"}, {\"image\": \"./imagesTs/amos_0456.nii.gz\"}, {\"image\": \"./imagesTs/amos_0457.nii.gz\"}, {\"image\": \"./imagesTs/amos_0458.nii.gz\"}, {\"image\": \"./imagesTs/amos_0459.nii.gz\"}, {\"image\": \"./imagesTs/amos_0460.nii.gz\"}, {\"image\": \"./imagesTs/amos_0461.nii.gz\"}, {\"image\": \"./imagesTs/amos_0462.nii.gz\"}, {\"image\": \"./imagesTs/amos_0463.nii.gz\"}, {\"image\": \"./imagesTs/amos_0464.nii.gz\"}, {\"image\": \"./imagesTs/amos_0465.nii.gz\"}, {\"image\": \"./imagesTs/amos_0466.nii.gz\"}, {\"image\": \"./imagesTs/amos_0467.nii.gz\"}, {\"image\": \"./imagesTs/amos_0468.nii.gz\"}, {\"image\": \"./imagesTs/amos_0469.nii.gz\"}, {\"image\": \"./imagesTs/amos_0470.nii.gz\"}, {\"image\": \"./imagesTs/amos_0471.nii.gz\"}, {\"image\": \"./imagesTs/amos_0472.nii.gz\"}, {\"image\": \"./imagesTs/amos_0473.nii.gz\"}, {\"image\": \"./imagesTs/amos_0474.nii.gz\"}, {\"image\": \"./imagesTs/amos_0475.nii.gz\"}, {\"image\": \"./imagesTs/amos_0476.nii.gz\"}, {\"image\": \"./imagesTs/amos_0477.nii.gz\"}, {\"image\": \"./imagesTs/amos_0478.nii.gz\"}, {\"image\": \"./imagesTs/amos_0479.nii.gz\"}, {\"image\": \"./imagesTs/amos_0480.nii.gz\"}, {\"image\": \"./imagesTs/amos_0481.nii.gz\"}, {\"image\": \"./imagesTs/amos_0482.nii.gz\"}, {\"image\": \"./imagesTs/amos_0483.nii.gz\"}, {\"image\": \"./imagesTs/amos_0484.nii.gz\"}, {\"image\": \"./imagesTs/amos_0485.nii.gz\"}, {\"image\": \"./imagesTs/amos_0486.nii.gz\"}, {\"image\": \"./imagesTs/amos_0487.nii.gz\"}, {\"image\": \"./imagesTs/amos_0488.nii.gz\"}, {\"image\": \"./imagesTs/amos_0489.nii.gz\"}, {\"image\": \"./imagesTs/amos_0490.nii.gz\"}, {\"image\": \"./imagesTs/amos_0491.nii.gz\"}, {\"image\": \"./imagesTs/amos_0492.nii.gz\"}, {\"image\": \"./imagesTs/amos_0493.nii.gz\"}, {\"image\": \"./imagesTs/amos_0494.nii.gz\"}, {\"image\": \"./imagesTs/amos_0495.nii.gz\"}, {\"image\": \"./imagesTs/amos_0496.nii.gz\"}, {\"image\": \"./imagesTs/amos_0497.nii.gz\"}, {\"image\": \"./imagesTs/amos_0498.nii.gz\"}, {\"image\": \"./imagesTs/amos_0499.nii.gz\"}, {\"image\": \"./imagesTs/amos_0500.nii.gz\"}]}"
  },
  {
    "path": "Finetune/Amos/gen_json.py",
    "content": "from typing import Tuple\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\n\ndef get_identifiers_from_splitted_files(folder: str):\n    uniques = np.unique([i[:-12] for i in subfiles(folder, suffix='.nii.gz', join=False)])\n    return uniques\n\n\ndef generate_dataset_json(output_file: str, imagesTr_dir: str, imagesTs_dir: str, modalities: dict,\n                          labels: dict, dataset_name: str, sort_keys=True, license: str = \"hands off!\", dataset_description: str = \"\",\n                          dataset_reference=\"\", dataset_release='0.0'):\n    \"\"\"\n    :param output_file: This needs to be the full path to the dataset_CT.json you intend to write, so\n    output_file='DATASET_PATH/dataset_CT.json' where the folder DATASET_PATH points to is the one with the\n    imagesTr and labelsTr subfolders\n    :param imagesTr_dir: path to the imagesTr folder of that dataset\n    :param imagesTs_dir: path to the imagesTs folder of that dataset. Can be None\n    :param modalities: tuple of strings with modality names. must be in the same order as the images (first entry\n    corresponds to _0000.nii.gz, etc). Example: ('T1', 'T2', 'FLAIR').\n    :param labels: dict with int->str (key->value) mapping the label IDs to label names. Note that 0 is always\n    supposed to be background! Example: {0: 'background', 1: 'edema', 2: 'enhancing tumor'}\n    :param dataset_name: The name of the dataset. Can be anything you want\n    :param sort_keys: In order to sort or not, the keys in dataset_CT.json\n    :param license:\n    :param dataset_description:\n    :param dataset_reference: website of the dataset, if available\n    :param dataset_release:\n    :return:\n    \"\"\"\n    train_identifiers = get_identifiers_from_splitted_files(imagesTr_dir)\n\n    if imagesTs_dir is not None:\n        test_identifiers = get_identifiers_from_splitted_files(imagesTs_dir)\n    else:\n        test_identifiers = []\n\n    json_dict = {}\n    json_dict['name'] = dataset_name\n    json_dict['description'] = dataset_description\n    json_dict['tensorImageSize'] = \"4D\"\n    json_dict['reference'] = dataset_reference\n    json_dict['licence'] = license\n    json_dict['release'] = dataset_release\n    json_dict['modality'] = {str(i): modalities[i] for i in modalities.keys()}\n    json_dict['labels'] = {str(i): labels[i] for i in labels.keys()}\n\n    json_dict['numTraining'] = len(train_identifiers)\n    json_dict['numTest'] = len(test_identifiers)\n    json_dict['training'] = [\n        {'image': \"./imagesTr/%s_0000.nii.gz\" % i, \"label\": \"./labelsTr/%s.nii.gz\" % i} for i\n        in\n        train_identifiers]\n    json_dict['test'] = [\"./imagesTs/%s_0000.nii.gz\" % i for i in test_identifiers]\n\n    if not output_file.endswith(\"dataset_CT.json\"):\n        print(\"WARNING: output file name is not dataset_CT.json! This may be intentional or not. You decide. \"\n              \"Proceeding anyways...\")\n    save_json(json_dict, os.path.join(output_file), sort_keys=sort_keys)\n\n\nif __name__=='__main__':\n    generate_dataset_json(output_file='dataset/dataset.json',\n                          imagesTr_dir='D:\\data\\FLARE22\\imagesTr',\n                          imagesTs_dir='D:\\data\\FLARE22\\imagesTs',\n                          modalities={\"0\": \"CT\"},\n                          labels={\"0\": \"background\",\n                                  \"1\": \"Liver\",\n                                  \"2\": \"Right kidney\",\n                                  \"3\": \"Spleen\",\n                                  \"4\": \"Pancreas\",\n                                  \"5\": \"Aorta\",\n                                  \"6\": \"Inferior vena cava\",\n                                  \"7\": \"Right adrenal gland\",\n                                  \"8\": \"Left adrenal gland\",\n                                  \"9\": \"Gallbladder\",\n                                  \"10\": \"Esophagus\",\n                                  \"11\": \"Stomach\",\n                                  \"12\": \"Duodenum\",\n                                  \"13\": \"Left Kidney\"\n                                  },\n                          dataset_name=\"FLARE22\",\n                          dataset_description='0',\n                          dataset_reference='0')\n\n# nnUNet_predict -i nnUNet_raw_data_base/nnUNet_raw_data/Task022_FLARE22/imagesTs -o eval -t 22 -tr nnUNetTrainerV2_FLARE_Big -m 3d_fullres -p nnUNetPlansFLARE22Big --all_in_gpu True"
  },
  {
    "path": "Finetune/Amos/inferers.py",
    "content": "\"\"\"Multiview inferer.\"\"\"\r\n\r\nimport warnings\r\nfrom typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union\r\n\r\nimport torch\r\nimport torch.nn.functional as F\r\n\r\nfrom monai.data.utils import compute_importance_map, dense_patch_slices, get_valid_patch_size\r\nfrom monai.transforms import Resize\r\nfrom monai.utils import (\r\n    BlendMode,\r\n    PytorchPadMode,\r\n    convert_data_type,\r\n    ensure_tuple,\r\n    fall_back_tuple,\r\n    look_up_option,\r\n    optional_import,\r\n)\r\nfrom monai.inferers.utils import _get_scan_interval\r\n\r\n# from utils import view_ops\r\n# from utils import view_transforms\r\n\r\ntqdm, _ = optional_import(\"tqdm\", name=\"tqdm\")\r\n\r\n\r\ndef double_sliding_window_inference(\r\n    inputs: torch.Tensor,\r\n    view: int,\r\n    roi_size: Union[Sequence[int], int],\r\n    sw_batch_size: int,\r\n    predictor: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor], Dict[Any, torch.Tensor]]],\r\n    overlap: float = 0.25,\r\n    mode: Union[BlendMode, str] = BlendMode.CONSTANT,\r\n    sigma_scale: Union[Sequence[float], float] = 0.125,\r\n    padding_mode: Union[PytorchPadMode, str] = PytorchPadMode.CONSTANT,\r\n    cval: float = 0.0,\r\n    sw_device: Union[torch.device, str, None] = None,\r\n    device: Union[torch.device, str, None] = None,\r\n    progress: bool = False,\r\n    roi_weight_map: Union[torch.Tensor, None] = None,\r\n    *args: Any,\r\n    **kwargs: Any,\r\n) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[Any, torch.Tensor]]:\r\n    \"\"\"\r\n    Sliding window inference on two `inputs` with `predictor`.\r\n\r\n    The outputs of `predictor` could be a tensor, a tuple, or a dictionary of tensors.\r\n    Each output in the tuple or dict value is allowed to have different resolutions with respect to the input.\r\n    e.g., the input patch spatial size is [128,128,128], the output (a tuple of two patches) patch sizes\r\n    could be ([128,64,256], [64,32,128]).\r\n    In this case, the parameter `overlap` and `roi_size` need to be carefully chosen to ensure the output ROI is still\r\n    an integer. If the predictor's input and output spatial sizes are not equal, we recommend choosing the parameters\r\n    so that `overlap*roi_size*output_size/input_size` is an integer (for each spatial dimension).\r\n\r\n    When roi_size is larger than the inputs' spatial size, the input image are padded during inference.\r\n    To maintain the same spatial sizes, the output image will be cropped to the original input size.\r\n\r\n    Args:\r\n        inputs: input image to be processed (assuming NCHW[D])\r\n        roi_size: the spatial window size for inferences.\r\n            When its components have None or non-positives, the corresponding inputs dimension will be used.\r\n            if the components of the `roi_size` are non-positive values, the transform will use the\r\n            corresponding components of img size. For example, `roi_size=(32, -1)` will be adapted\r\n            to `(32, 64)` if the second spatial dimension size of img is `64`.\r\n        sw_batch_size: the batch size to run window slices.\r\n        predictor: given input tensor ``patch_data`` in shape NCHW[D],\r\n            The outputs of the function call ``predictor(patch_data)`` should be a tensor, a tuple, or a dictionary\r\n            with Tensor values. Each output in the tuple or dict value should have the same batch_size, i.e. NM'H'W'[D'];\r\n            where H'W'[D'] represents the output patch's spatial size, M is the number of output channels,\r\n            N is `sw_batch_size`, e.g., the input shape is (7, 1, 128,128,128),\r\n            the output could be a tuple of two tensors, with shapes: ((7, 5, 128, 64, 256), (7, 4, 64, 32, 128)).\r\n            In this case, the parameter `overlap` and `roi_size` need to be carefully chosen\r\n            to ensure the scaled output ROI sizes are still integers.\r\n            If the `predictor`'s input and output spatial sizes are different,\r\n            we recommend choosing the parameters so that ``overlap*roi_size*zoom_scale`` is an integer for each dimension.\r\n        overlap: Amount of overlap between scans.\r\n        mode: {``\"constant\"``, ``\"gaussian\"``}\r\n            How to blend output of overlapping windows. Defaults to ``\"constant\"``.\r\n\r\n            - ``\"constant``\": gives equal weight to all predictions.\r\n            - ``\"gaussian``\": gives less weight to predictions on edges of windows.\r\n\r\n        sigma_scale: the standard deviation coefficient of the Gaussian window when `mode` is ``\"gaussian\"``.\r\n            Default: 0.125. Actual window sigma is ``sigma_scale`` * ``dim_size``.\r\n            When sigma_scale is a sequence of floats, the values denote sigma_scale at the corresponding\r\n            spatial dimensions.\r\n        padding_mode: {``\"constant\"``, ``\"reflect\"``, ``\"replicate\"``, ``\"circular\"``}\r\n            Padding mode for ``inputs``, when ``roi_size`` is larger than inputs. Defaults to ``\"constant\"``\r\n            See also: https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html\r\n        cval: fill value for 'constant' padding mode. Default: 0\r\n        sw_device: device for the window data.\r\n            By default the device (and accordingly the memory) of the `inputs` is used.\r\n            Normally `sw_device` should be consistent with the device where `predictor` is defined.\r\n        device: device for the stitched output prediction.\r\n            By default the device (and accordingly the memory) of the `inputs` is used. If for example\r\n            set to device=torch.device('cpu') the gpu memory consumption is less and independent of the\r\n            `inputs` and `roi_size`. Output is on the `device`.\r\n        progress: whether to print a `tqdm` progress bar.\r\n        roi_weight_map: pre-computed (non-negative) weight map for each ROI.\r\n            If not given, and ``mode`` is not `constant`, this map will be computed on the fly.\r\n        args: optional args to be passed to ``predictor``.\r\n        kwargs: optional keyword args to be passed to ``predictor``.\r\n\r\n    Note:\r\n        - input must be channel-first and have a batch dim, supports N-D sliding window.\r\n\r\n    \"\"\"\r\n    compute_dtype = inputs.dtype\r\n    num_spatial_dims = len(inputs.shape) - 2\r\n    if overlap < 0 or overlap >= 1:\r\n        raise ValueError(\"overlap must be >= 0 and < 1.\")\r\n\r\n    # determine image spatial size and batch size\r\n    # Note: all input images must have the same image size and batch size\r\n    batch_size, _, *image_size_ = inputs.shape\r\n\r\n    if device is None:\r\n        device = inputs.device\r\n    if sw_device is None:\r\n        sw_device = inputs.device\r\n\r\n    roi_size = fall_back_tuple(roi_size, image_size_)\r\n    # in case that image size is smaller than roi size\r\n    image_size = tuple(max(image_size_[i], roi_size[i]) for i in range(num_spatial_dims))\r\n    pad_size = []\r\n    for k in range(len(inputs.shape) - 1, 1, -1):\r\n        diff = max(roi_size[k - 2] - inputs.shape[k], 0)\r\n        half = diff // 2\r\n        pad_size.extend([half, diff - half])\r\n    inputs = F.pad(inputs, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)\r\n    # inputs2 = F.pad(inputs2, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)\r\n\r\n    scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims, overlap)\r\n\r\n    # Store all slices in list\r\n    slices = dense_patch_slices(image_size, roi_size, scan_interval)\r\n    num_win = len(slices)  # number of windows per image\r\n    total_slices = num_win * batch_size  # total number of windows\r\n\r\n    # Create window-level importance map\r\n    valid_patch_size = get_valid_patch_size(image_size, roi_size)\r\n    if valid_patch_size == roi_size and (roi_weight_map is not None):\r\n        importance_map = roi_weight_map\r\n    else:\r\n        try:\r\n            importance_map = compute_importance_map(valid_patch_size, mode=mode, sigma_scale=sigma_scale, device=device)\r\n        except BaseException as e:\r\n            raise RuntimeError(\r\n                \"Seems to be OOM. Please try smaller patch size or mode='constant' instead of mode='gaussian'.\"\r\n            ) from e\r\n    importance_map = convert_data_type(importance_map, torch.Tensor, device, compute_dtype)[0]  # type: ignore\r\n    # handle non-positive weights\r\n    min_non_zero = max(importance_map[importance_map != 0].min().item(), 1e-3)\r\n    importance_map = torch.clamp(importance_map.to(torch.float32), min=min_non_zero).to(compute_dtype)\r\n\r\n    # Perform predictions\r\n    dict_key, output_image_list_1, output_image_list_2, count_map_list = None, [], [], []\r\n    _initialized_ss = -1\r\n    is_tensor_output = True  # whether the predictor's output is a tensor (instead of dict/tuple)\r\n\r\n    # for each patch\r\n    for slice_g in tqdm(range(0, total_slices, sw_batch_size)) if progress else range(0, total_slices, sw_batch_size):\r\n        slice_range = range(slice_g, min(slice_g + sw_batch_size, total_slices))\r\n        unravel_slice = [\r\n            [slice(int(idx / num_win), int(idx / num_win) + 1), slice(None)] + list(slices[idx % num_win])\r\n            for idx in slice_range\r\n        ]\r\n        window_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)\r\n        view_list = [view, (view + 1) % len(view_transforms.permutation_transforms)]\r\n        window_data_list = [view_ops.get_permute_transform(0, dst)(window_data) for dst in view_list]\r\n        # window_data_2 = torch.cat([inputs2[win_slice] for win_slice in unravel_slice]).to(sw_device)\r\n        seg_prob_out_1, seg_prob_out_2 = predictor(window_data_list[0], window_data_list[1], view_list, *args, **kwargs)  # batched patch segmentation\r\n        seg_prob_out_1, seg_prob_out_2 = view_ops.permute_inverse([seg_prob_out_1, seg_prob_out_2], view_list)\r\n\r\n        # convert seg_prob_out to tuple seg_prob_tuple, this does not allocate new memory.\r\n        seg_prob_tuple_1: Tuple[torch.Tensor, ...]\r\n        seg_prob_tuple_2: Tuple[torch.Tensor, ...]\r\n        if isinstance(seg_prob_out_1, torch.Tensor):\r\n            seg_prob_tuple_1 = (seg_prob_out_1,)\r\n            seg_prob_tuple_2 = (seg_prob_out_2,)\r\n        elif isinstance(seg_prob_out_1, Mapping):\r\n            if dict_key is None:\r\n                dict_key = sorted(seg_prob_out_1.keys())  # track predictor's output keys\r\n            seg_prob_tuple_1 = tuple(seg_prob_out_1[k] for k in dict_key)\r\n            seg_prob_tuple_2 = tuple(seg_prob_out_2[k] for k in dict_key)\r\n            is_tensor_output = False\r\n        else:\r\n            seg_prob_tuple_1 = ensure_tuple(seg_prob_out_1)\r\n            seg_prob_tuple_2 = ensure_tuple(seg_prob_out_2)\r\n            is_tensor_output = False\r\n\r\n        # for each output in multi-output list\r\n        for ss in range(len(seg_prob_tuple_1)):\r\n            seg_prob_1 = seg_prob_tuple_1[ss].to(device)  # BxCxMxNxP or BxCxMxN\r\n            seg_prob_2 = seg_prob_tuple_2[ss].to(device)\r\n\r\n            # compute zoom scale: out_roi_size/in_roi_size\r\n            zoom_scale = []\r\n            for axis, (img_s_i, out_w_i, in_w_i) in enumerate(\r\n                zip(image_size, seg_prob_1.shape[2:], window_data.shape[2:])\r\n            ):\r\n                _scale = out_w_i / float(in_w_i)\r\n                if not (img_s_i * _scale).is_integer():\r\n                    warnings.warn(\r\n                        f\"For spatial axis: {axis}, output[{ss}] will have non-integer shape. Spatial \"\r\n                        f\"zoom_scale between output[{ss}] and input is {_scale}. Please pad inputs.\"\r\n                    )\r\n                zoom_scale.append(_scale)\r\n\r\n            if _initialized_ss < ss:  # init. the ss-th buffer at the first iteration\r\n                # construct multi-resolution outputs\r\n                output_classes = seg_prob_1.shape[1]\r\n                output_shape = [batch_size, output_classes] + [\r\n                    int(image_size_d * zoom_scale_d) for image_size_d, zoom_scale_d in zip(image_size, zoom_scale)\r\n                ]\r\n                # allocate memory to store the full output and the count for overlapping parts\r\n                output_image_list_1.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))\r\n                output_image_list_2.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))\r\n                count_map_list.append(torch.zeros([1, 1] + output_shape[2:], dtype=compute_dtype, device=device))\r\n                _initialized_ss += 1\r\n\r\n            # resizing the importance_map\r\n            resizer = Resize(spatial_size=seg_prob_1.shape[2:], mode=\"nearest\", anti_aliasing=False)\r\n\r\n            # store the result in the proper location of the full output. Apply weights from importance map.\r\n            for idx, original_idx in zip(slice_range, unravel_slice):\r\n                # zoom roi\r\n                original_idx_zoom = list(original_idx)  # 4D for 2D image, 5D for 3D image\r\n                for axis in range(2, len(original_idx_zoom)):\r\n                    zoomed_start = original_idx[axis].start * zoom_scale[axis - 2]\r\n                    zoomed_end = original_idx[axis].stop * zoom_scale[axis - 2]\r\n                    if not zoomed_start.is_integer() or (not zoomed_end.is_integer()):\r\n                        warnings.warn(\r\n                            f\"For axis-{axis-2} of output[{ss}], the output roi range is not int. \"\r\n                            f\"Input roi range is ({original_idx[axis].start}, {original_idx[axis].stop}). \"\r\n                            f\"Spatial zoom_scale between output[{ss}] and input is {zoom_scale[axis - 2]}. \"\r\n                            f\"Corresponding output roi range is ({zoomed_start}, {zoomed_end}).\\n\"\r\n                            f\"Please change overlap ({overlap}) or roi_size ({roi_size[axis-2]}) for axis-{axis-2}. \"\r\n                            \"Tips: if overlap*roi_size*zoom_scale is an integer, it usually works.\"\r\n                        )\r\n                    original_idx_zoom[axis] = slice(int(zoomed_start), int(zoomed_end), None)\r\n                importance_map_zoom = resizer(importance_map.unsqueeze(0))[0].to(compute_dtype)\r\n                # store results and weights\r\n                output_image_list_1[ss][original_idx_zoom] += importance_map_zoom * seg_prob_1[idx - slice_g]\r\n                output_image_list_2[ss][original_idx_zoom] += importance_map_zoom * seg_prob_2[idx - slice_g]\r\n                count_map_list[ss][original_idx_zoom] += (\r\n                    importance_map_zoom.unsqueeze(0).unsqueeze(0).expand(count_map_list[ss][original_idx_zoom].shape)\r\n                )\r\n\r\n    # account for any overlapping sections\r\n    for ss in range(len(output_image_list_1)):\r\n        count_map_pop = count_map_list.pop(0)\r\n        output_image_list_1[ss] = (output_image_list_1[ss] / count_map_pop).to(compute_dtype)\r\n        output_image_list_2[ss] = (output_image_list_2[ss] / count_map_pop).to(compute_dtype)\r\n\r\n    # remove padding if image_size smaller than roi_size\r\n    for ss in range(len(output_image_list_1)):\r\n        output_i_1, output_i_2 = output_image_list_1[ss], output_image_list_2[ss]\r\n        if torch.isnan(output_i_1).any() or torch.isinf(output_i_1).any():\r\n            warnings.warn(\"Sliding window inference results contain NaN or Inf.\")\r\n        if torch.isnan(output_i_2).any() or torch.isinf(output_i_2).any():\r\n            warnings.warn(\"Sliding window inference results contain NaN or Inf.\")\r\n\r\n        zoom_scale = [\r\n            seg_prob_map_shape_d / roi_size_d for seg_prob_map_shape_d, roi_size_d in zip(output_i_1.shape[2:], roi_size)\r\n        ]\r\n\r\n        final_slicing: List[slice] = []\r\n        for sp in range(num_spatial_dims):\r\n            slice_dim = slice(pad_size[sp * 2], image_size_[num_spatial_dims - sp - 1] + pad_size[sp * 2])\r\n            slice_dim = slice(\r\n                int(round(slice_dim.start * zoom_scale[num_spatial_dims - sp - 1])),\r\n                int(round(slice_dim.stop * zoom_scale[num_spatial_dims - sp - 1])),\r\n            )\r\n            final_slicing.insert(0, slice_dim)\r\n        while len(final_slicing) < len(output_i_1.shape):\r\n            final_slicing.insert(0, slice(None))\r\n        output_image_list_1[ss] = output_i_1[final_slicing]\r\n        output_image_list_2[ss] = output_i_2[final_slicing]\r\n\r\n    if dict_key is not None:  # if output of predictor is a dict\r\n        final_output_1 = dict(zip(dict_key, output_image_list_1))\r\n        final_output_2 = dict(zip(dict_key, output_image_list_2))\r\n    else:\r\n        final_output_1 = tuple(output_image_list_1)  # type: ignore\r\n        final_output_2 = tuple(output_image_list_2)  # type: ignore\r\n    final_output_1 = final_output_1[0] if is_tensor_output else final_output_1  # type: ignore\r\n    final_output_2 = final_output_2[0] if is_tensor_output else final_output_2  # type: ignore\r\n    return final_output_1, final_output_2\r\n\r\n\r\ndef one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:\r\n    \"\"\"\r\n    For every value v in `labels`, the value in the output will be either 1 or 0. Each vector along the `dim`-th\r\n    dimension has the \"one-hot\" format, i.e., it has a total length of `num_classes`,\r\n    with a one and `num_class-1` zeros.\r\n    Note that this will include the background label, thus a binary mask should be treated as having two classes.\r\n\r\n    Args:\r\n        labels: input tensor of integers to be converted into the 'one-hot' format. Internally `labels` will be\r\n            converted into integers `labels.long()`.\r\n        num_classes: number of output channels, the corresponding length of `labels[dim]` will be converted to\r\n            `num_classes` from `1`.\r\n        dtype: the data type of the output one_hot label.\r\n        dim: the dimension to be converted to `num_classes` channels from `1` channel, should be non-negative number.\r\n\r\n    Example:\r\n\r\n    For a tensor `labels` of dimensions [B]1[spatial_dims], return a tensor of dimensions `[B]N[spatial_dims]`\r\n    when `num_classes=N` number of classes and `dim=1`.\r\n\r\n    .. code-block:: python\r\n\r\n        from monai.networks.utils import one_hot\r\n        import torch\r\n\r\n        a = torch.randint(0, 2, size=(1, 2, 2, 2))\r\n        out = one_hot(a, num_classes=2, dim=0)\r\n        print(out.shape)  # torch.Size([2, 2, 2, 2])\r\n\r\n        a = torch.randint(0, 2, size=(2, 1, 2, 2, 2))\r\n        out = one_hot(a, num_classes=2, dim=1)\r\n        print(out.shape)  # torch.Size([2, 2, 2, 2, 2])\r\n\r\n    \"\"\"\r\n\r\n    # if `dim` is bigger, add singleton dim at the end\r\n    if labels.ndim < dim + 1:\r\n        shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))\r\n        labels = torch.reshape(labels, shape)\r\n\r\n    sh = list(labels.shape)\r\n\r\n    if sh[dim] != 1:\r\n        raise AssertionError(\"labels should have a channel with length equal to one.\")\r\n\r\n    sh[dim] = num_classes\r\n\r\n    o = torch.zeros(size=sh, dtype=dtype, device=labels.device)\r\n    labels = o.scatter_(dim=dim, index=labels.long(), value=1)\r\n\r\n    return labels\r\n\r\n\r\n\"\"\"View operations.\"\"\"\r\n\r\nfrom typing import Sequence, Tuple\r\n\r\n\r\n\"\"\"View operations.\r\n\r\nInput format: [B, C, X, Y, Z, ...]\r\n\r\nNOTE(meijieru): 0 is reserved for identify transform.\r\n\"\"\"\r\n\r\nfrom typing import Callable, Sequence, Union\r\n\r\nimport enum\r\n\r\nimport torch\r\n\r\nRotateType = int\r\nPermuteType = int\r\nTransformFuncType = Callable[[torch.Tensor], torch.Tensor]\r\n# A composition of multiple view transoforms.\r\nTransformsType = Sequence[Union[PermuteType, RotateType]]\r\n\r\n\r\nclass GroupName(enum.Enum):\r\n\r\n    ROTATE = 1\r\n    PERMUTE = 2\r\n\r\n\r\nDEFAULT_ORDER = (GroupName.ROTATE, GroupName.PERMUTE)\r\n\r\nrotation_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.rot90(1, (3, 4)),\r\n    2: lambda x: x.rot90(2, (3, 4)),\r\n    3: lambda x: x.rot90(3, (3, 4)),\r\n}\r\nrotation_inverse_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.rot90(3, (3, 4)),\r\n    2: lambda x: x.rot90(2, (3, 4)),\r\n    3: lambda x: x.rot90(1, (3, 4)),\r\n}\r\npermutation_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.permute(0, 1, 3, 2, 4),\r\n    2: lambda x: x.permute(0, 1, 4, 3, 2),\r\n}\r\npermutation_inverse_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.permute(0, 1, 3, 2, 4),\r\n    2: lambda x: x.permute(0, 1, 4, 3, 2),\r\n}\r\n\r\nall_forward_transforms = {\r\n    GroupName.ROTATE: rotation_transforms,\r\n    GroupName.PERMUTE: permutation_transforms,\r\n}\r\nall_backward_transforms = {\r\n    GroupName.ROTATE: rotation_inverse_transforms,\r\n    GroupName.PERMUTE: permutation_inverse_transforms,\r\n}\r\n\r\n\r\ndef get_transforms_func(views: TransformsType,\r\n                        orders: Sequence[GroupName] = DEFAULT_ORDER,\r\n                        inverse: bool = False) -> TransformFuncType:\r\n    \"\"\"Gets sequential transform functions.\"\"\"\r\n    if len(views) != len(orders):\r\n        raise ValueError()\r\n\r\n    all_transforms = (all_forward_transforms\r\n                      if not inverse else all_backward_transforms)\r\n    funcs = [\r\n        all_transforms[group_name][view]\r\n        for view, group_name in zip(views, orders)\r\n    ]\r\n    funcs = funcs if not inverse else funcs[::-1]\r\n\r\n    def aux(val):\r\n        for func in funcs:\r\n            val = func(val)\r\n        return val\r\n\r\n    return aux\r\n\r\n\r\nimport torch\r\nimport numpy as np\r\n\r\n\r\ndef get_permute_transform(view_src: PermuteType,\r\n                          view_dst: PermuteType) -> TransformFuncType:\r\n    \"\"\"Gets transform function from view src to view dst.\"\"\"\r\n\r\n    def transform(x: torch.Tensor) -> torch.Tensor:\r\n        x_view_0 = view_transforms.permutation_inverse_transforms[view_src](x)\r\n        return view_transforms.permutation_transforms[view_dst](\r\n            x_view_0).contiguous()\r\n\r\n    return transform\r\n\r\n\r\ndef permute_inverse(xs: Sequence[torch.Tensor],\r\n                    views: Sequence[PermuteType]) -> Sequence[torch.Tensor]:\r\n    \"\"\"Transforms data back to origin view.\"\"\"\r\n    return [get_permute_transform(view, 0)(x) for x, view in zip(xs, views)]\r\n\r\n\r\ndef permute_rand(\r\n    x: torch.Tensor,\r\n    num_samples: int = 2\r\n) -> Tuple[Sequence[torch.Tensor], Sequence[PermuteType]]:\r\n    \"\"\"Samples different transforms of data.\"\"\"\r\n    num_permutes = len(view_transforms.permutation_transforms)\r\n    if num_samples > num_permutes:\r\n        raise ValueError('Duplicate samples.')\r\n    view_dsts = np.random.permutation(num_permutes)[:num_samples].tolist()\r\n    return [get_permute_transform(0, view)(x) for view in view_dsts], view_dsts"
  },
  {
    "path": "Finetune/Amos/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom utils.data_utils import get_loader\nimport torch.nn as nn\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\nfrom monai.utils import ensure_tuple_rep\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"4\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/linshan/CTs/Amos2022/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--json_list\", default=\"dataset_CT.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_checkpoint\",default=\"VoCo_10k.pt\", type=str, help=\"VoCo_10k pretrained model\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_bestVal.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=1000, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=16, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=3e-4, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=0.005, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", default=False, help=\"do NOT use amp for training\")\nparser.add_argument(\"--val_every\", default=50, type=int, help=\"validation frequency\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=16, type=int, help=\"number of output channels\")\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\nparser.add_argument(\"--warmup_epochs\", default=50, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--smooth_dr\", default=1e-6, type=float, help=\"constant added to dice denominator to avoid nan\")\nparser.add_argument(\"--smooth_nr\", default=0.0, type=float, help=\"constant added to dice numerator to avoid zero\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n    torch.cuda.set_device(0)\n\n    torch.backends.cudnn.enabled = True\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=args.dropout_path_rate,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use pretrained weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            # model_VoCoEMA.pt\n            # model_dict = torch.load(\"./pretrained_models/supervised_suprem_swinunetr_2100.pth\", map_location=torch.device('cpu'))\n            # model_dict = torch.load(\"./pretrained_models/model_VoCoEMA.pt\", map_location=torch.device('cpu'))\n            model_dict = torch.load(args.pretrained_checkpoint,\n                                    map_location=torch.device('cpu'))\n\n            state_dict = model_dict\n            # state_dict = model_dict['net']\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    if args.squared_dice:\n        dice_loss = DiceCELoss(\n            to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr\n        )\n    else:\n        dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)\n\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n    dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda(args.gpu)\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n        # optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        loss_func=dice_loss,\n        acc_func=dice_acc,\n        args=args,\n        model_inferer=model_inferer,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n        post_label=post_label,\n        post_pred=post_pred,\n    )\n    return accuracy\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/Amos/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Amos/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/Amos/pre_cache.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport nibabel as nib\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.data_test import get_loader\nfrom utils.utils import dice, resample_3d\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.data import decollate_batch\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import *\nfrom monai.utils.enums import MetricReduction\nfrom monai.handlers import StatsHandler, from_engine\nfrom monai import data, transforms\nfrom monai.data import *\n\n# import resource\n#\n# rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\n# resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\n# print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\n# os.environ['CUDA_VISIBLE_DEVICES'] = \"2\"\n# os.environ['MASTER_ADDR'] = 'localhost'\n# os.environ['MASTER_PORT'] = '28890'\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./runs/logs_0.9129/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"D:\\data/amos22\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--exp_name\", default=\"logs_0.9129\", type=str, help=\"experiment name\")\nparser.add_argument(\"--json_list\", default=\"dataset_CT.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_best.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=16, type=int, help=\"number of output channels\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.test_mode = True\n    val_loader, test_transforms = get_loader(args)\n\n    with torch.no_grad():\n        for idx, batch_data in enumerate(val_loader):\n            print(idx)\n            # print(batch_data.keys())\n\n            # img_name = batch_data[\"image_meta_dict\"][\"filename_or_obj\"][0].split(\"/\")[-1]\n\n            # raw_data = np.load('./raw_data.npy', allow_pickle=True)\n            # raw_data = raw_data.item()\n            #\n            # shapes, affines = raw_data['shape'], raw_data['affine']\n            # shape, affine = shapes[img_name], affines[img_name]\n            # h, w, d = shape\n            # target_shape = (h, w, d)\n\n            data = batch_data[\"image\"]\n\n\nif __name__ == \"__main__\":\n    main()"
  },
  {
    "path": "Finetune/Amos/test.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport nibabel as nib\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.data_test import get_loader\nimport SimpleITK as sitk\nfrom monai.inferers import sliding_window_inference\nfrom monai.data import decollate_batch\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import *\nfrom monai.utils.enums import MetricReduction\nfrom monai.handlers import StatsHandler, from_engine\nimport matplotlib.pyplot as plt\nfrom utils.utils import *\nfrom PIL import Image\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\nfrom monai import data, transforms\nfrom monai.data import *\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"5\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./runs/logs/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/linshan/CTs/Amos2022/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--exp_name\", default=\"test\", type=str, help=\"experiment name\")\nparser.add_argument(\"--json_list\", default=\"dataset_CT.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=2, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=16, type=int, help=\"number of output channels\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.test_mode = True\n    output_directory = \"./pred/\" + args.exp_name\n    if not os.path.exists(output_directory):\n        os.makedirs(output_directory)\n    val_loader, test_transforms = get_loader(args)\n\n    pretrained_dir = args.pretrained_dir\n    model_name = args.pretrained_model_name\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    pretrained_pth = os.path.join(pretrained_dir, model_name)\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=0.0,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    model_dict = torch.load(pretrained_pth)[\"state_dict\"]\n    model.load_state_dict(model_dict, strict=True)\n    model.eval()\n    model.to(device)\n\n    # enable cuDNN benchmark\n    torch.backends.cudnn.benchmark = True\n\n    post_transforms = Compose([EnsureTyped(keys=[\"pred\"]),\n                                Invertd(keys=[\"pred\"],\n                                     transform=test_transforms,\n                                     orig_keys=\"image\",\n                                     meta_keys=\"pred_meta_dict\",\n                                     orig_meta_keys=\"image_meta_dict\",\n                                     meta_key_postfix=\"meta_dict\",\n                                     nearest_interp=False,\n                                     to_tensor=True),\n                               # Invertd(keys=[\"image\"],\n                               #         transform=test_transforms,\n                               #         orig_keys=\"image\",\n                               #         meta_keys=\"pred_meta_dict\",\n                               #         orig_meta_keys=\"image_meta_dict\",\n                               #         meta_key_postfix=\"meta_dict\",\n                               #         nearest_interp=False,\n                               #         to_tensor=True),\n\n                               AsDiscreted(keys=\"pred\", argmax=True, to_onehot=None),\n                               SaveImaged(keys=\"pred\", meta_keys=\"pred_meta_dict\", output_dir=output_directory,\n                                          separate_folder=False, folder_layout=None,\n                                          resample=False),\n                               ])\n\n    cmap = color_map()\n\n    num = 0\n\n    with torch.no_grad():\n        for idx, batch_data in enumerate(val_loader):\n            torch.cuda.empty_cache()\n\n            img_name = batch_data[\"image_meta_dict\"][\"filename_or_obj\"][0].split(\"/\")[-1]\n            print('img_name:', img_name, num)\n            num += 1\n\n            if isinstance(batch_data, list):\n                data = batch_data\n            else:\n                data = batch_data[\"image\"].cuda()\n\n            with autocast(enabled=True):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n\n            batch_data['pred'] = logits\n\n            # ori = torch.argmax(logits, 1).cpu().numpy().astype(np.uint8)[0]\n            # ori = Image.fromarray(ori[:, :, 50].astype(np.uint8), mode='P')\n            # ori.putpalette(cmap)\n\n            batch_data = [post_transforms(i) for i in\n                         decollate_batch(batch_data)]  # apply post-processing to output tensors\n\n            # test_img, val_outputs = from_engine([\"image\", \"pred\"])(batch_data)\n\n            # test_img = test_img[0][0].data.cpu().numpy()\n            # print(test_img.shape)\n\n            # c = val_outputs[0].shape[-1]\n            # val_outputs = val_outputs[0].argmax(0).cpu().numpy().astype(np.uint8)\n\n            # # # vis\n            # print(np.unique(val_outputs[:, :, c//3].astype(np.uint8)))\n            # val = Image.fromarray(val_outputs[:, :, c//3].astype(np.uint8), mode='P')\n            # val.putpalette(cmap)\n\n            # # # show\n            # plt.figure(\"check\", (18, 6))\n            # plt.subplot(1, 2, 1)\n            # plt.imshow(test_img[:, :, c//3], cmap=\"gray\")\n            # # plt.imshow(ori)\n            #\n            # plt.subplot(1, 2, 2)\n            # plt.imshow(val)\n            # plt.show()\n\n            # # # save predict\n            # seg = sitk.GetImageFromArray(val_outputs)\n            # seg.SetSpacing(img_itk.GetSpacing())\n            # seg.SetDirection(img_itk.GetDirection())\n            # sitk.WriteImage(seg, os.path.join(output_directory, img_name[:-12] + '.nii.gz'))\n\n\nif __name__ == \"__main__\":\n    main()"
  },
  {
    "path": "Finetune/Amos/train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs\nmkdir -p $logdir\n\ntorchrun --master_port=21198 main.py \\\n    --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "Finetune/Amos/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.data import decollate_batch\n\n\ndef train_epoch(model, loader, optimizer, scaler, epoch, loss_func, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n    for idx, batch_data in enumerate(loader):\n        if isinstance(batch_data, list):\n            data, target = batch_data\n        else:\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n        data, target = data.cuda(), target.cuda()\n        for param in model.parameters():\n            param.grad = None\n        with autocast(enabled=args.amp):\n            logits = model(data)\n            loss = loss_func(logits, target)\n            #\n        if args.amp:\n            scaler.scale(loss).backward()\n            scaler.step(optimizer)\n            scaler.update()\n        else:\n            loss.backward()\n            optimizer.step()\n        if args.distributed:\n            loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)\n            run_loss.update(\n                np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size\n            )\n        else:\n            run_loss.update(loss.item(), n=args.batch_size)\n\n        lr = optimizer.param_groups[0][\"lr\"]\n        if args.rank == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):\n    model.eval()\n    run_acc = AverageMeter()\n    start_time = time.time()\n    with torch.no_grad():\n        for idx, batch_data in enumerate(loader):\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n            with autocast(enabled=args.amp):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            acc = acc.cuda(args.rank)\n\n            if args.distributed:\n                acc_list, not_nans_list = distributed_all_gather(\n                    [acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length\n                )\n                for al, nl in zip(acc_list, not_nans_list):\n                    run_acc.update(al, n=nl)\n\n            else:\n                run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n\n            if args.rank == 0:\n                avg_acc = np.mean(run_acc.avg)\n                print(\n                    \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                    \"acc\",\n                    avg_acc,\n                    \"time {:.2f}s\".format(time.time() - start_time),\n                )\n            start_time = time.time()\n    return run_acc.avg\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n        model,\n        train_loader,\n        val_loader,\n        optimizer,\n        loss_func,\n        acc_func,\n        args,\n        model_inferer=None,\n        scheduler=None,\n        start_epoch=0,\n        post_label=None,\n        post_pred=None,\n):\n    writer = None\n    if args.logdir is not None and args.rank == 0:\n        writer = SummaryWriter(log_dir=args.logdir)\n        if args.rank == 0:\n            print(\"Writing Tensorboard logs to \", args.logdir)\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n        if args.rank == 0 and writer is not None:\n            writer.add_scalar(\"train_loss\", train_loss, epoch)\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                acc_func=acc_func,\n                model_inferer=model_inferer,\n                args=args,\n                post_label=post_label,\n                post_pred=post_pred,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n                if writer is not None:\n                    writer.add_scalar(\"val_acc\", val_avg_acc, epoch)\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n        if scheduler is not None:\n            scheduler.step()\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/Amos/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Amos/utils/data_test.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = os.path.join(data_dir, args.json_list)\n    transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\"]),\n            transforms.Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n        ]\n    )\n\n    datalist = load_decathlon_datalist(datalist_json, True, \"test\", base_dir=data_dir)\n\n    print('use persistent')\n    ds = PersistentDataset(data=datalist,\n                             transform=transform,\n                             pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                             cache_dir='/data/linshan/cache/amos_test')\n    # /data/linshan/cache/flare22_test\n\n    sampler = Sampler(ds) if args.distributed else None\n    loader = data.DataLoader(\n        ds,\n        batch_size=args.batch_size,\n        shuffle=(sampler is None),\n        num_workers=args.workers,\n        sampler=sampler,\n        pin_memory=True,\n    )\n\n    return loader, transform\n"
  },
  {
    "path": "Finetune/Amos/utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = os.path.join(data_dir, args.json_list)\n    train_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            transforms.RandCropByPosNegLabeld(\n                keys=[\"image\", \"label\"],\n                label_key=\"label\",\n                spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                pos=9,\n                neg=1,\n                num_samples=args.sw_batch_size,\n                image_key=\"image\",\n                image_threshold=0,\n            ),\n            # transforms.RandCropByLabelClassesd(\n            #     keys=[\"image\", \"label\"],\n            #     image_key=\"image\",\n            #     label_key=\"label\",\n            #     spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n            #     num_classes=args.out_channels,\n            #     ratios=[0, *it.repeat(1, args.out_channels-1)],\n            #     num_samples=args.sw_batch_size,\n            #     image_threshold=0,\n            #     warn=False,\n            # ),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=0),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=1),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=2),\n            transforms.RandRotate90d(keys=[\"image\", \"label\"], prob=args.RandRotate90d_prob, max_k=3),\n            #transforms.RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=args.RandShiftIntensityd_prob),\n            transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n    val_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n\n    if args.test_mode:\n        test_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n        test_ds = PersistentDataset(data=test_files,\n                                     transform=val_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/flare22')\n        test_sampler = Sampler(test_ds, shuffle=False) if args.distributed else None\n        test_loader = data.DataLoader(\n            test_ds,\n            batch_size=1,\n            shuffle=False,\n            num_workers=args.workers,\n            sampler=test_sampler,\n            pin_memory=True,\n            persistent_workers=True,\n        )\n        loader = test_loader\n    else:\n        datalist = load_decathlon_datalist(datalist_json, True, \"training\", base_dir=data_dir)\n        if args.use_normal_dataset:\n            print('use persistent')\n            train_ds = PersistentDataset(data=datalist,\n                                     transform=train_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/amos')\n            # train_ds = data.Dataset(data=datalist, transform=train_transform)\n        else:\n            train_ds = data.CacheDataset(\n                data=datalist, transform=train_transform, cache_num=24, cache_rate=1.0, num_workers=args.workers\n            )\n        train_sampler = Sampler(train_ds) if args.distributed else None\n        train_loader = data.DataLoader(\n            train_ds,\n            batch_size=args.batch_size,\n            shuffle=(train_sampler is None),\n            num_workers=args.workers,\n            sampler=train_sampler,\n            pin_memory=True,\n        )\n        val_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n        # val_ds = data.Dataset(data=val_files, transform=val_transform)\n        val_ds = PersistentDataset(data=val_files,\n                                     transform=val_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/amos')\n        val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None\n        val_loader = data.DataLoader(\n            val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=False\n        )\n        loader = [train_loader, val_loader]\n\n    return loader\n"
  },
  {
    "path": "Finetune/Amos/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\nimport os\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n\n\ndef color_map(dataset='pascal'):\n    cmap = np.zeros((256, 3), dtype='uint8')\n\n    if dataset == 'pascal' or dataset == 'coco':\n        def bitget(byteval, idx):\n            return (byteval & (1 << idx)) != 0\n\n        for i in range(256):\n            r = g = b = 0\n            c = i\n            for j in range(8):\n                r = r | (bitget(c, 0) << 7-j)\n                g = g | (bitget(c, 1) << 7-j)\n                b = b | (bitget(c, 2) << 7-j)\n                c = c >> 3\n\n            cmap[i] = np.array([r, g, b])\n\n    elif dataset == 'cityscapes':\n        cmap[0] = np.array([128, 64, 128])\n        cmap[1] = np.array([244, 35, 232])\n        cmap[2] = np.array([70, 70, 70])\n        cmap[3] = np.array([102, 102, 156])\n        cmap[4] = np.array([190, 153, 153])\n        cmap[5] = np.array([153, 153, 153])\n        cmap[6] = np.array([250, 170, 30])\n        cmap[7] = np.array([220, 220, 0])\n        cmap[8] = np.array([107, 142, 35])\n        cmap[9] = np.array([152, 251, 152])\n        cmap[10] = np.array([70, 130, 180])\n        cmap[11] = np.array([220, 20, 60])\n        cmap[12] = np.array([255,  0,  0])\n        cmap[13] = np.array([0,  0, 142])\n        cmap[14] = np.array([0,  0, 70])\n        cmap[15] = np.array([0, 60, 100])\n        cmap[16] = np.array([0, 80, 100])\n        cmap[17] = np.array([0,  0, 230])\n        cmap[18] = np.array([119, 11, 32])\n\n        cmap[19] = np.array([0, 0, 0])\n        cmap[255] = np.array([0, 0, 0])\n\n    return cmap\n\n\ndef check_dir(dir):\n    if not os.path.exists(dir):\n        os.makedirs(dir)"
  },
  {
    "path": "Finetune/Amos/val.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport nibabel as nib\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.data_utils import get_loader\nfrom utils.utils import dice, resample_3d\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.data import decollate_batch\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./runs/logs_scratch_v2/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/linshan/CTs/BTCV/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--exp_name\", default=\"BTCV_0.8451\", type=str, help=\"experiment name\")\nparser.add_argument(\"--json_list\", default=\"dataset_0.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_0.8451.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi=96\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.test_mode = True\n    output_directory = \"./outputs/\" + args.exp_name\n    if not os.path.exists(output_directory):\n        os.makedirs(output_directory)\n    val_loader = get_loader(args)\n    pretrained_dir = args.pretrained_dir\n    model_name = args.pretrained_model_name\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    pretrained_pth = os.path.join(pretrained_dir, model_name)\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=0.0,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    model_dict = torch.load(pretrained_pth)[\"state_dict\"]\n    model.load_state_dict(model_dict, strict=True)\n    model.eval()\n    model.to(device)\n\n    acc_func = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    run_acc = AverageMeter()\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n\n    with torch.no_grad():\n        all_dice = None\n        num = np.zeros(13)\n        dice_list_case = []\n        for idx, batch_data in enumerate(val_loader):\n            img_name = batch_data[\"image_meta_dict\"][\"filename_or_obj\"][0].split(\"/\")[-1]\n\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n\n            print(data.shape, target.shape)\n            z = data.shape[-1]\n            data = F.interpolate(data, size=(263, 218, z), mode='trilinear')\n            target = F.interpolate(target, size=(263, 218, z), mode='nearest')\n            print(data.shape, target.shape)\n\n            with autocast(enabled=True):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n            print(np.mean(run_acc.avg))\n\n            # # save predict\n            # print(logits.shape)\n            # val_outputs = torch.argmax(logits, 1).cpu().numpy()\n            # np.save(os.path.join(output_directory, 'pre'+ img_name[3:-7]+'.npy'), val_outputs.astype(np.uint8)[0])\n            # # save label\n            # val_labels = target.cpu().numpy()\n            # np.save(os.path.join(output_directory, 'label' + img_name[3:-7] + '.npy'), val_labels.astype(np.uint8)[0][0])\n            #\n            # # save input\n            # img = data.cpu().numpy()\n            # img = img * 255\n            # print(np.max(img))\n            # np.save(os.path.join(output_directory, 'img' + img_name[3:-7] + '.npy'), img.astype(np.uint8)[0][0])\n\n\nif __name__ == \"__main__\":\n    main()\n\n    # outputs = torch.argmax(logits, 1).cpu().numpy()\n    # outputs = outputs.astype(np.uint8)[0]\n    # val_labels = target.cpu().numpy()[0, 0, :, :, :]\n    #\n    # len_class = len(list(np.unique(val_labels))) - 1\n    # dice_list_sub = []\n    # for i in range(1, 14):\n    #     # judge this class exist or not, ignore background\n    #     num[i - 1] += (np.sum(val_labels == i) > 0).astype(np.uint8)\n    #     organ_Dice = dice(outputs == i, val_labels == i)\n    #     dice_list_sub.append(organ_Dice)\n    #\n    # mean_dice = np.sum(dice_list_sub) / len_class\n    # print(\"Mean Organ Dice: {}\".format(mean_dice))\n    #\n    # # acc of each organ\n    # print(\"Organ Dice:\", dice_list_sub)\n    #\n    # if all_dice is None:\n    #     all_dice = (np.asarray(dice_list_sub)).copy()\n    # else:\n    #     all_dice = all_dice + np.asarray(dice_list_sub)\n    # print(\"Organ Dice accumulate:\", all_dice*100 / num)\n    #\n    # dice_list_case.append(mean_dice)\n    # print(\"Overall Mean Dice: {}\".format(100*np.mean(dice_list_case)))\n"
  },
  {
    "path": "Finetune/BTCV/dataset/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/BTCV/dataset/dataset_0.json",
    "content": " {\n    \"description\": \"btcv yucheng\",\n    \"labels\": {\n        \"0\": \"background\",\n        \"1\": \"spleen\",\n        \"2\": \"rkid\",\n        \"3\": \"lkid\",\n        \"4\": \"gall\",\n        \"5\": \"eso\",\n        \"6\": \"liver\",\n        \"7\": \"sto\",\n        \"8\": \"aorta\",\n        \"9\": \"IVC\",\n        \"10\": \"veins\",\n        \"11\": \"pancreas\",\n        \"12\": \"rad\",\n        \"13\": \"lad\"\n    },\n    \"licence\": \"yt\",\n    \"\": {\n        \"0\": \"CT\"\n    },\n    \"name\": \"btcv\",\n    \"numTest\": 20,\n    \"numTraining\": 80,\n    \"reference\": \"Vanderbilt University\",\n    \"release\": \"1.0 06/08/2015\",\n    \"tensorImageSize\": \"3D\",\n    \"test\": [\n        \"imagesTs/img0061.nii.gz\",\n        \"imagesTs/img0062.nii.gz\",\n        \"imagesTs/img0063.nii.gz\",\n        \"imagesTs/img0064.nii.gz\",\n        \"imagesTs/img0065.nii.gz\",\n        \"imagesTs/img0066.nii.gz\",\n        \"imagesTs/img0067.nii.gz\",\n        \"imagesTs/img0068.nii.gz\",\n        \"imagesTs/img0069.nii.gz\",\n        \"imagesTs/img0070.nii.gz\",\n        \"imagesTs/img0071.nii.gz\",\n        \"imagesTs/img0072.nii.gz\",\n        \"imagesTs/img0073.nii.gz\",\n        \"imagesTs/img0074.nii.gz\",\n        \"imagesTs/img0075.nii.gz\",\n        \"imagesTs/img0076.nii.gz\",\n        \"imagesTs/img0077.nii.gz\",\n        \"imagesTs/img0078.nii.gz\",\n        \"imagesTs/img0079.nii.gz\",\n        \"imagesTs/img0080.nii.gz\"\n    ],\n    \"training\": [\n        {\n            \"image\": \"imagesTr/img0001.nii.gz\",\n            \"label\": \"labelsTr/label0001.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0002.nii.gz\",\n            \"label\": \"labelsTr/label0002.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0003.nii.gz\",\n            \"label\": \"labelsTr/label0003.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0004.nii.gz\",\n            \"label\": \"labelsTr/label0004.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0005.nii.gz\",\n            \"label\": \"labelsTr/label0005.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0006.nii.gz\",\n            \"label\": \"labelsTr/label0006.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0007.nii.gz\",\n            \"label\": \"labelsTr/label0007.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0008.nii.gz\",\n            \"label\": \"labelsTr/label0008.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0009.nii.gz\",\n            \"label\": \"labelsTr/label0009.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0010.nii.gz\",\n            \"label\": \"labelsTr/label0010.nii.gz\"\n        },\n\n        {\n            \"image\": \"imagesTr/img0021.nii.gz\",\n            \"label\": \"labelsTr/label0021.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0022.nii.gz\",\n            \"label\": \"labelsTr/label0022.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0023.nii.gz\",\n            \"label\": \"labelsTr/label0023.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0024.nii.gz\",\n            \"label\": \"labelsTr/label0024.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0025.nii.gz\",\n            \"label\": \"labelsTr/label0025.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0026.nii.gz\",\n            \"label\": \"labelsTr/label0026.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0027.nii.gz\",\n            \"label\": \"labelsTr/label0027.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0028.nii.gz\",\n            \"label\": \"labelsTr/label0028.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0029.nii.gz\",\n            \"label\": \"labelsTr/label0029.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0030.nii.gz\",\n            \"label\": \"labelsTr/label0030.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0031.nii.gz\",\n            \"label\": \"labelsTr/label0031.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0032.nii.gz\",\n            \"label\": \"labelsTr/label0032.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0033.nii.gz\",\n            \"label\": \"labelsTr/label0033.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0034.nii.gz\",\n            \"label\": \"labelsTr/label0034.nii.gz\"\n        }\n    ],\n    \"validation\": [\n        {\n            \"image\": \"imagesTr/img0035.nii.gz\",\n            \"label\": \"labelsTr/label0035.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0036.nii.gz\",\n            \"label\": \"labelsTr/label0036.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0037.nii.gz\",\n            \"label\": \"labelsTr/label0037.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0038.nii.gz\",\n            \"label\": \"labelsTr/label0038.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0039.nii.gz\",\n            \"label\": \"labelsTr/label0039.nii.gz\"\n        },\n        {\n            \"image\": \"imagesTr/img0040.nii.gz\",\n            \"label\": \"labelsTr/label0040.nii.gz\"\n        }\n    ]\n}\n"
  },
  {
    "path": "Finetune/BTCV/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom utils.data_utils import get_loader\nimport torch.nn as nn\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\nfrom monai.utils import ensure_tuple_rep\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"./data/BTCV/\", type=str, help=\"YOUR btcv dataset directory\")\nparser.add_argument(\"--cache_dir\", default=\"./data/cache/BTCV/\", type=str, help=\"YOUR btcv dataset cache directory\")\nparser.add_argument(\"--json_list\", default=\"dataset_0.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_checkpoint\",default=\"VoCo_10k.pt\", type=str, help=\"VoCo_10k pretrained model\")\n\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_bestVal.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=3000, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=16, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=3e-4, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=1e-5, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", default=True, help=\"do NOT use amp for training\")\nparser.add_argument(\"--val_every\", default=50, type=int, help=\"validation frequency\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.5, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\nparser.add_argument(\"--warmup_epochs\", default=100, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--smooth_dr\", default=1e-6, type=float, help=\"constant added to dice denominator to avoid nan\")\nparser.add_argument(\"--smooth_nr\", default=0.0, type=float, help=\"constant added to dice numerator to avoid zero\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n    torch.cuda.set_device(0)\n\n    torch.backends.cudnn.enabled = True\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=args.dropout_path_rate,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use pretrained weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            # model_VoCoEMA.pt\n            # model_dict = torch.load(\"./pretrained_models/supervised_suprem_swinunetr_2100.pth\", map_location=torch.device('cpu'))\n            # model_dict = torch.load(\"./pretrained_models/model_VoCoEMA.pt\", map_location=torch.device('cpu'))\n            model_dict = torch.load(args.pretrained_checkpoint,\n                                    map_location=torch.device('cpu'))\n            state_dict = model_dict\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    if args.squared_dice:\n        dice_loss = DiceCELoss(\n            to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr\n        )\n    else:\n        dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)\n\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n    dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda(args.gpu)\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n        # optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        loss_func=dice_loss,\n        acc_func=dice_acc,\n        args=args,\n        model_inferer=model_inferer,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n        post_label=post_label,\n        post_pred=post_pred,\n    )\n    return accuracy\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/BTCV/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/BTCV/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/BTCV/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.data import decollate_batch\n\n\ndef train_epoch(model, loader, optimizer, scheduler, scaler, epoch, loss_func, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n    for idx, batch_data in enumerate(loader):\n        if isinstance(batch_data, list):\n            data, target = batch_data\n        else:\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n        data, target = data.cuda(), target.cuda()\n        for param in model.parameters():\n            param.grad = None\n        with autocast(enabled=args.amp):\n            logits = model(data)\n            loss = loss_func(logits, target)\n            #\n        if args.amp:\n            scaler.scale(loss).backward()\n            scaler.step(optimizer)\n            scaler.update()\n        else:\n            loss.backward()\n            optimizer.step()\n        if args.distributed:\n            loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)\n            run_loss.update(\n                np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size\n            )\n        else:\n            run_loss.update(loss.item(), n=args.batch_size)\n        \n        lr = optimizer.param_groups[0][\"lr\"]\n        if scheduler is not None:\n            scheduler.step()\n        if args.rank == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n    \n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):\n    model.eval()\n    run_acc = AverageMeter()\n    start_time = time.time()\n    with torch.no_grad():\n        for idx, batch_data in enumerate(loader):\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n            with autocast(enabled=args.amp):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            acc = acc.cuda(args.rank)\n\n            if args.distributed:\n                acc_list, not_nans_list = distributed_all_gather(\n                    [acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length\n                )\n                for al, nl in zip(acc_list, not_nans_list):\n                    run_acc.update(al, n=nl)\n\n            else:\n                run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n\n            if args.rank == 0:\n                avg_acc = np.mean(run_acc.avg)\n                print(\n                    \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                    \"acc\",\n                    avg_acc,\n                    \"time {:.2f}s\".format(time.time() - start_time),\n                )\n            start_time = time.time()\n    torch.cuda.empty_cache()\n    return run_acc.avg\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n    model,\n    train_loader,\n    val_loader,\n    optimizer,\n    loss_func,\n    acc_func,\n    args,\n    model_inferer=None,\n    scheduler=None,\n    start_epoch=0,\n    post_label=None,\n    post_pred=None,\n):\n    writer = None\n    if args.logdir is not None and args.rank == 0:\n        writer = SummaryWriter(log_dir=args.logdir)\n        if args.rank == 0:\n            print(\"Writing Tensorboard logs to \", args.logdir)\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n        if args.rank == 0 and writer is not None:\n            writer.add_scalar(\"train_loss\", train_loss, epoch)\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                acc_func=acc_func,\n                model_inferer=model_inferer,\n                args=args,\n                post_label=post_label,\n                post_pred=post_pred,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n                if writer is not None:\n                    writer.add_scalar(\"val_acc\", val_avg_acc, epoch)\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/BTCV/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/BTCV/utils/data_test.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = os.path.join(data_dir, args.json_list)\n    transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\"]),\n            transforms.Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n            transforms.ToTensord(keys=[\"image\"]),\n        ]\n    )\n\n    datalist = load_decathlon_datalist(datalist_json, True, \"test\", base_dir=data_dir)\n\n    print('use persistent')\n    ds = PersistentDataset(data=datalist,\n                             transform=transform,\n                             pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                             cache_dir='/data/linshan/cache/BTCV_test')\n\n    sampler = Sampler(ds) if args.distributed else None\n    loader = data.DataLoader(\n        ds,\n        batch_size=args.batch_size,\n        shuffle=(sampler is None),\n        num_workers=args.workers,\n        sampler=sampler,\n        pin_memory=True,\n    )\n\n    return loader\n"
  },
  {
    "path": "Finetune/BTCV/utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = os.path.join(data_dir, args.json_list)\n    train_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            \n            transforms.RandCropByPosNegLabeld(\n                keys=[\"image\", \"label\"],\n                label_key=\"label\",\n                spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                pos=3,\n                neg=1,\n                num_samples=args.sw_batch_size,\n                image_key=\"image\",\n                image_threshold=0,\n            ),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=0),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=1),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=2),\n            transforms.RandRotate90d(keys=[\"image\", \"label\"], prob=args.RandRotate90d_prob, max_k=3),\n            transforms.RandScaleIntensityd(keys=\"image\", factors=0.1, prob=args.RandScaleIntensityd_prob),\n            transforms.RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=args.RandShiftIntensityd_prob),\n            \n            # transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n    val_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            # transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n\n    if args.test_mode:\n        test_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n        test_ds = PersistentDataset(data=test_files,\n                                     transform=val_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir=args.cache_dir)\n        test_sampler = Sampler(test_ds, shuffle=False) if args.distributed else None\n        test_loader = data.DataLoader(\n            test_ds,\n            batch_size=1,\n            shuffle=False,\n            num_workers=args.workers,\n            sampler=test_sampler,\n            pin_memory=True,\n            persistent_workers=True,\n        )\n        loader = test_loader\n    else:\n        datalist = load_decathlon_datalist(datalist_json, True, \"training\", base_dir=data_dir)\n        if args.use_normal_dataset:\n            print('use persistent')\n            train_ds = PersistentDataset(data=datalist,\n                                     transform=train_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir=args.cache_dir)\n            # train_ds = data.Dataset(data=datalist, transform=train_transform)\n        else:\n            train_ds = data.CacheDataset(\n                data=datalist, transform=train_transform, cache_num=24, cache_rate=1.0, num_workers=args.workers\n            )\n        train_sampler = Sampler(train_ds) if args.distributed else None\n        train_loader = data.DataLoader(\n            train_ds,\n            batch_size=args.batch_size,\n            shuffle=(train_sampler is None),\n            num_workers=args.workers,\n            sampler=train_sampler,\n            pin_memory=True,\n        )\n        val_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n        # val_ds = data.Dataset(data=val_files, transform=val_transform)\n        val_ds = PersistentDataset(data=val_files,\n                                     transform=val_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir=args.cache_dir)\n        val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None\n        val_loader = data.DataLoader(\n            val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=True\n        )\n        loader = [train_loader, val_loader]\n\n    return loader\n"
  },
  {
    "path": "Finetune/BTCV/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\n\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n"
  },
  {
    "path": "Finetune/BTCV/val.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport nibabel as nib\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.data_utils import get_loader\nfrom utils.utils import dice, resample_3d\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.data import decollate_batch\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./runs/logs_scratch_v2/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/linshan/CTs/BTCV/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--exp_name\", default=\"BTCV_0.8451\", type=str, help=\"experiment name\")\nparser.add_argument(\"--json_list\", default=\"dataset_0.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_0.8451.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi=96\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.test_mode = True\n    output_directory = \"./outputs/\" + args.exp_name\n    if not os.path.exists(output_directory):\n        os.makedirs(output_directory)\n    val_loader = get_loader(args)\n    pretrained_dir = args.pretrained_dir\n    model_name = args.pretrained_model_name\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    pretrained_pth = os.path.join(pretrained_dir, model_name)\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=0.0,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    model_dict = torch.load(pretrained_pth)[\"state_dict\"]\n    model.load_state_dict(model_dict, strict=True)\n    model.eval()\n    model.to(device)\n\n    acc_func = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    run_acc = AverageMeter()\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n\n    with torch.no_grad():\n        all_dice = None\n        num = np.zeros(13)\n        dice_list_case = []\n        for idx, batch_data in enumerate(val_loader):\n            # img_name = batch_data[\"image_meta_dict\"][\"filename_or_obj\"][0].split(\"/\")[-1]\n\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n\n            print(data.shape, target.shape)\n            z = data.shape[-1]\n            data = F.interpolate(data, size=(263, 218, z), mode='trilinear')\n            target = F.interpolate(target, size=(263, 218, z), mode='nearest')\n            print(data.shape, target.shape)\n\n            with autocast(enabled=True):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n            print(np.mean(run_acc.avg))\n\n            # # save predict\n            # print(logits.shape)\n            # val_outputs = torch.argmax(logits, 1).cpu().numpy()\n            # np.save(os.path.join(output_directory, 'pre'+ img_name[3:-7]+'.npy'), val_outputs.astype(np.uint8)[0])\n            # # save label\n            # val_labels = target.cpu().numpy()\n            # np.save(os.path.join(output_directory, 'label' + img_name[3:-7] + '.npy'), val_labels.astype(np.uint8)[0][0])\n            #\n            # # save input\n            # img = data.cpu().numpy()\n            # img = img * 255\n            # print(np.max(img))\n            # np.save(os.path.join(output_directory, 'img' + img_name[3:-7] + '.npy'), img.astype(np.uint8)[0][0])\n\n\nif __name__ == \"__main__\":\n    main()\n\n    # outputs = torch.argmax(logits, 1).cpu().numpy()\n    # outputs = outputs.astype(np.uint8)[0]\n    # val_labels = target.cpu().numpy()[0, 0, :, :, :]\n    #\n    # len_class = len(list(np.unique(val_labels))) - 1\n    # dice_list_sub = []\n    # for i in range(1, 14):\n    #     # judge this class exist or not, ignore background\n    #     num[i - 1] += (np.sum(val_labels == i) > 0).astype(np.uint8)\n    #     organ_Dice = dice(outputs == i, val_labels == i)\n    #     dice_list_sub.append(organ_Dice)\n    #\n    # mean_dice = np.sum(dice_list_sub) / len_class\n    # print(\"Mean Organ Dice: {}\".format(mean_dice))\n    #\n    # # acc of each organ\n    # print(\"Organ Dice:\", dice_list_sub)\n    #\n    # if all_dice is None:\n    #     all_dice = (np.asarray(dice_list_sub)).copy()\n    # else:\n    #     all_dice = all_dice + np.asarray(dice_list_sub)\n    # print(\"Organ Dice accumulate:\", all_dice*100 / num)\n    #\n    # dice_list_case.append(mean_dice)\n    # print(\"Overall Mean Dice: {}\".format(100*np.mean(dice_list_case)))\n"
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_fold0_train.csv",
    "content": "zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids\r\nCP-20.zip,1,CP,2668,3259,52,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-11.zip,1,CP,1436,3940,45,2,\"[3940, 3941]\"\r\nNCP-13.zip,2,NCP,364,1880,56,2,\"[1879, 1880]\"\r\nCP-9.zip,1,CP,1369,3790,67,2,\"[3790, 3791]\"\r\nNCP-21.zip,2,NCP,65,1263,128,2,\"[1263, 1264]\"\r\nCP-23.zip,1,CP,661,3023,116,1,[3023]\r\nCP-30.zip,1,CP,3937,5643,66,2,\"[5643, 5644]\"\r\nCP-25.zip,1,CP,8,3514,36,2,\"[3513, 3514]\"\r\nNCP-15.zip,2,NCP,421,1996,67,2,\"[1995, 1996]\"\r\nCP-25.zip,1,CP,738,3100,110,1,[3100]\r\nNCP-11.zip,2,NCP,304,1755,67,2,\"[1754, 1755]\"\r\nNCP-22.zip,2,NCP,834,2348,226,2,\"[2347, 2348]\"\r\nNormal-1.zip,0,Normal,1680,840,66,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-13.zip,1,CP,1519,4141,68,2,\"[4141, 4142]\"\r\nNCP-12.zip,2,NCP,315,1777,107,2,\"[1777, 1778]\"\r\nNormal-2.zip,0,Normal,1753,1088,66,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nCP-8.zip,1,CP,1341,3722,57,1,[3722]\r\nCP-13.zip,1,CP,1491,4075,48,3,\"[4074, 4075, 4076]\"\r\nCP-28.zip,1,CP,3785,5729,28,1,[5729]\r\nNCP-6.zip,2,NCP,212,1568,165,2,\"[1568, 1569]\"\r\nCP-12.zip,1,CP,1477,4035,54,2,\"[4035, 4036]\"\r\nCP-16.zip,1,CP,1605,4293,23,1,[4293]\r\nNCP-29.zip,2,NCP,926,2468,24,1,[2468]\r\nCP-10.zip,1,CP,1394,3847,62,2,\"[3847, 3848]\"\r\nNCP-21.zip,2,NCP,580,2318,58,2,\"[2317, 2318]\"\r\nNCP-19.zip,2,NCP,526,2208,137,2,\"[2208, 2209]\"\r\nCP-13.zip,1,CP,1494,4085,65,3,\"[4083, 4084, 4085]\"\r\nNormal-27.zip,0,Normal,3895,5421,71,4,\"[5418, 5419, 5420, 5421]\"\r\nNCP-8.zip,2,NCP,267,1680,129,2,\"[1680, 1681]\"\r\nNCP-18.zip,2,NCP,49,1232,61,2,\"[1231, 1232]\"\r\nCP-21.zip,1,CP,589,2951,300,1,[2951]\r\nCP-25.zip,1,CP,8,3513,42,2,\"[3513, 3514]\"\r\nCP-27.zip,1,CP,3765,5709,20,1,[5709]\r\nNCP-4.zip,2,NCP,147,1438,173,2,\"[1438, 1439]\"\r\nNormal-27.zip,0,Normal,3904,5436,82,1,[5436]\r\nNCP-14.zip,2,NCP,384,1921,54,2,\"[1920, 1921]\"\r\nCP-18.zip,1,CP,1780,3560,69,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-14.zip,1,CP,1522,4148,61,2,\"[4148, 4149]\"\r\nNCP-8.zip,2,NCP,256,1658,139,2,\"[1658, 1659]\"\r\nCP-10.zip,1,CP,1406,3874,60,2,\"[3874, 3875]\"\r\nCP-4.zip,1,CP,1177,3395,210,1,[3395]\r\nNormal-1.zip,0,Normal,1673,804,291,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNCP-2.zip,2,NCP,122,1385,149,2,\"[1385, 1386]\"\r\nCP-9.zip,1,CP,1354,3752,46,3,\"[3751, 3752, 3753]\"\r\nNCP-23.zip,2,NCP,922,2464,240,1,[2464]\r\nCP-20.zip,1,CP,2668,3251,58,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-6.zip,0,Normal,1796,251,96,1,[251]\r\nCP-9.zip,1,CP,1374,3803,50,2,\"[3802, 3803]\"\r\nNCP-7.zip,2,NCP,237,1620,61,2,\"[1619, 1620]\"\r\nNCP-13.zip,2,NCP,363,1878,58,2,\"[1877, 1878]\"\r\nCP-1.zip,1,CP,1084,3130,67,1,[3130]\r\nNormal-14.zip,0,Normal,2082,537,78,1,[537]\r\nCP-18.zip,1,CP,1656,4344,26,1,[4344]\r\nNCP-18.zip,2,NCP,491,2138,149,2,\"[2138, 2139]\"\r\nCP-22.zip,1,CP,609,2971,76,1,[2971]\r\nNormal-18.zip,0,Normal,2198,653,88,1,[653]\r\nNCP-6.zip,2,NCP,212,1569,69,2,\"[1568, 1569]\"\r\nCP-21.zip,1,CP,607,2969,178,1,[2969]\r\nNCP-9.zip,2,NCP,269,1685,64,2,\"[1684, 1685]\"\r\nCP-9.zip,1,CP,1364,3777,56,3,\"[3776, 3777, 3778]\"\r\nCP-17.zip,1,CP,1622,4310,27,1,[4310]\r\nCP-16.zip,1,CP,1601,4289,19,1,[4289]\r\nCP-10.zip,1,CP,1388,3832,51,2,\"[3831, 3832]\"\r\nNormal-27.zip,0,Normal,3908,5442,56,1,[5442]\r\nCP-25.zip,1,CP,732,3094,159,1,[3094]\r\nNCP-14.zip,2,NCP,40,1212,149,2,\"[1212, 1213]\"\r\nNCP-21.zip,2,NCP,65,1264,54,2,\"[1263, 1264]\"\r\nCP-12.zip,1,CP,1477,4036,54,2,\"[4035, 4036]\"\r\nNormal-10.zip,0,Normal,1953,408,94,1,[408]\r\nCP-15.zip,1,CP,1577,4265,22,1,[4265]\r\nNormal-14.zip,0,Normal,2055,510,91,1,[510]\r\nNormal-17.zip,0,Normal,2154,609,94,1,[609]\r\nNormal-27.zip,0,Normal,3895,5418,61,4,\"[5418, 5419, 5420, 5421]\"\r\nNormal-19.zip,0,Normal,2227,682,73,1,[682]\r\nNormal-11.zip,0,Normal,1975,430,101,1,[430]\r\nCP-15.zip,1,CP,1584,4272,20,1,[4272]\r\nNormal-20.zip,0,Normal,2262,717,84,1,[717]\r\nCP-14.zip,1,CP,1543,4200,190,3,\"[4200, 4201, 4202]\"\r\nNormal-3.zip,0,Normal,753,188,300,1,[188]\r\nCP-12.zip,1,CP,1475,4032,50,2,\"[4031, 4032]\"\r\nNCP-16.zip,2,NCP,458,2071,55,2,\"[2070, 2071]\"\r\nNCP-5.zip,2,NCP,180,1504,136,2,\"[1504, 1505]\"\r\nCP-30.zip,1,CP,3938,5645,94,1,[5645]\r\nCP-9.zip,1,CP,1364,3778,56,3,\"[3776, 3777, 3778]\"\r\nNormal-23.zip,0,Normal,2632,142,39,1,[142]\r\nNormal-5.zip,0,Normal,810,245,324,1,[245]\r\nNCP-5.zip,2,NCP,174,1493,56,2,\"[1492, 1493]\"\r\nCP-17.zip,1,CP,1632,4320,23,1,[4320]\r\nNCP-2.zip,2,NCP,112,1366,56,2,\"[1365, 1366]\"\r\nCP-18.zip,1,CP,1780,3554,67,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-20.zip,1,CP,2668,3252,51,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-6.zip,0,Normal,1820,275,83,1,[275]\r\nNormal-1.zip,0,Normal,1673,809,57,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNormal-18.zip,0,Normal,2204,659,94,1,[659]\r\nCP-14.zip,1,CP,1531,4169,59,2,\"[4169, 4170]\"\r\nCP-12.zip,1,CP,1474,4030,62,2,\"[4029, 4030]\"\r\nNormal-18.zip,0,Normal,2215,670,80,1,[670]\r\nNCP-21.zip,2,NCP,579,2315,150,2,\"[2315, 2316]\"\r\nNCP-28.zip,2,NCP,854,2374,265,1,[2374]\r\nNormal-25.zip,0,Normal,3838,5350,201,1,[5350]\r\nCP-9.zip,1,CP,1352,3747,61,1,[3747]\r\nNormal-1.zip,0,Normal,1719,994,76,2,\"[993, 994]\"\r\nNCP-28.zip,2,NCP,852,2372,47,2,\"[2371, 2372]\"\r\nNormal-19.zip,0,Normal,2225,680,94,1,[680]\r\nNormal-16.zip,0,Normal,2148,603,86,1,[603]\r\nNCP-19.zip,2,NCP,544,2245,147,2,\"[2245, 2246]\"\r\nCP-29.zip,1,CP,3826,5770,26,1,[5770]\r\nNCP-7.zip,2,NCP,229,1602,156,2,\"[1602, 1603]\"\r\nNormal-1.zip,0,Normal,1673,807,283,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNormal-6.zip,0,Normal,1823,278,85,1,[278]\r\nNCP-27.zip,2,NCP,824,2335,259,1,[2335]\r\nCP-18.zip,1,CP,1776,3535,64,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-18.zip,2,NCP,513,2183,68,2,\"[2182, 2183]\"\r\nCP-30.zip,1,CP,3934,5639,77,3,\"[5638, 5639, 5640]\"\r\nCP-4.zip,1,CP,1168,3386,203,1,[3386]\r\nNCP-12.zip,2,NCP,323,1794,116,2,\"[1794, 1795]\"\r\nCP-8.zip,1,CP,1340,3720,64,2,\"[3720, 3721]\"\r\nCP-5.zip,1,CP,1223,3441,232,1,[3441]\r\nNCP-4.zip,2,NCP,166,1477,58,2,\"[1476, 1477]\"\r\nNCP-6.zip,2,NCP,219,1583,65,2,\"[1582, 1583]\"\r\nNCP-1.zip,2,NCP,101,1340,57,2,\"[1339, 1340]\"\r\nNCP-11.zip,2,NCP,298,1742,145,2,\"[1742, 1743]\"\r\nNormal-1.zip,0,Normal,1684,874,71,5,\"[870, 871, 873, 874, 875]\"\r\nCP-14.zip,1,CP,1554,4227,41,2,\"[4226, 4227]\"\r\nNCP-18.zip,2,NCP,489,2134,139,2,\"[2134, 2135]\"\r\nNormal-23.zip,0,Normal,2615,125,36,1,[125]\r\nNCP-8.zip,2,NCP,2674,2693,45,1,[2693]\r\nNCP-6.zip,2,NCP,226,1596,142,2,\"[1596, 1597]\"\r\nNCP-10.zip,2,NCP,274,1695,67,2,\"[1694, 1695]\"\r\nNormal-10.zip,0,Normal,1944,399,97,1,[399]\r\nCP-6.zip,1,CP,1236,3454,159,1,[3454]\r\nCP-20.zip,1,CP,2668,3257,53,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-23.zip,1,CP,670,3032,78,1,[3032]\r\nNCP-20.zip,2,NCP,548,2253,144,2,\"[2253, 2254]\"\r\nCP-18.zip,1,CP,1769,3516,23,1,[3516]\r\nNormal-3.zip,0,Normal,754,189,308,1,[189]\r\nNCP-7.zip,2,NCP,239,1623,146,2,\"[1623, 1624]\"\r\nNCP-14.zip,2,NCP,392,1935,58,2,\"[1934, 1935]\"\r\nNormal-6.zip,0,Normal,1824,279,86,1,[279]\r\nNormal-2.zip,0,Normal,1753,1087,77,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-30.zip,2,NCP,997,2554,49,2,\"[2553, 2554]\"\r\nCP-26.zip,1,CP,3727,5663,42,1,[5663]\r\nCP-11.zip,1,CP,1433,3934,62,2,\"[3934, 3935]\"\r\nNormal-18.zip,0,Normal,2187,642,92,1,[642]\r\nNCP-2.zip,2,NCP,112,1365,133,2,\"[1365, 1366]\"\r\nNCP-6.zip,2,NCP,219,1582,156,2,\"[1582, 1583]\"\r\nNormal-10.zip,0,Normal,1939,394,93,1,[394]\r\nCP-18.zip,1,CP,1775,3532,57,4,\"[3530, 3531, 3532, 3533]\"\r\nCP-2.zip,1,CP,11,3165,268,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNormal-12.zip,0,Normal,2012,467,102,1,[467]\r\nCP-21.zip,1,CP,587,2949,151,1,[2949]\r\nNormal-15.zip,0,Normal,2116,571,92,1,[571]\r\nCP-1.zip,1,CP,10,3156,289,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNormal-27.zip,0,Normal,3895,5419,61,4,\"[5418, 5419, 5420, 5421]\"\r\nNormal-25.zip,0,Normal,3854,5366,197,1,[5366]\r\nNormal-4.zip,0,Normal,771,206,306,1,[206]\r\nNCP-3.zip,2,NCP,129,1403,132,2,\"[1403, 1404]\"\r\nNormal-13.zip,0,Normal,2042,497,90,1,[497]\r\nNormal-2.zip,0,Normal,1753,1090,296,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-17.zip,2,NCP,478,2111,145,2,\"[2111, 2112]\"\r\nNormal-17.zip,0,Normal,2171,626,92,1,[626]\r\nCP-10.zip,1,CP,1410,3884,51,2,\"[3883, 3884]\"\r\nCP-3.zip,1,CP,1140,3358,370,1,[3358]\r\nNCP-22.zip,2,NCP,885,2422,52,2,\"[2422, 2423]\"\r\nNCP-27.zip,2,NCP,1050,2624,428,2,\"[2623, 2624]\"\r\nNCP-17.zip,2,NCP,478,2112,61,2,\"[2111, 2112]\"\r\nCP-20.zip,1,CP,2668,3254,47,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-16.zip,2,NCP,433,2019,120,2,\"[2019, 2020]\"\r\nNCP-19.zip,2,NCP,517,2191,58,2,\"[2190, 2191]\"\r\nNormal-24.zip,0,Normal,2657,167,27,1,[167]\r\nCP-8.zip,1,CP,1339,3718,59,2,\"[3718, 3719]\"\r\nNCP-17.zip,2,NCP,482,2119,139,2,\"[2119, 2120]\"\r\nCP-17.zip,1,CP,1635,4323,27,1,[4323]\r\nNormal-10.zip,0,Normal,1930,385,98,1,[385]\r\nNormal-1.zip,0,Normal,1679,837,70,6,\"[833, 834, 835, 836, 837, 838]\"\r\nNCP-25.zip,2,NCP,3942,5539,37,1,[5539]\r\nNormal-17.zip,0,Normal,2180,635,95,1,[635]\r\nNormal-1.zip,0,Normal,1680,839,66,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNormal-1.zip,0,Normal,1705,965,69,2,\"[965, 966]\"\r\nNCP-5.zip,2,NCP,174,1492,134,2,\"[1492, 1493]\"\r\nNCP-14.zip,2,NCP,386,1923,62,1,[1923]\r\nCP-22.zip,1,CP,625,2987,100,1,[2987]\r\nCP-20.zip,1,CP,2450,2929,90,2,\"[2928, 2929]\"\r\nNormal-10.zip,0,Normal,1949,404,92,1,[404]\r\nCP-14.zip,1,CP,1546,4208,58,2,\"[4208, 4209]\"\r\nNCP-21.zip,2,NCP,63,1260,58,2,\"[1259, 1260]\"\r\nNormal-23.zip,0,Normal,2624,134,38,1,[134]\r\nNCP-10.zip,2,NCP,272,1690,153,2,\"[1690, 1691]\"\r\nCP-5.zip,1,CP,1209,3427,313,1,[3427]\r\nNCP-11.zip,2,NCP,293,1731,122,2,\"[1731, 1732]\"\r\nCP-9.zip,1,CP,1383,3822,71,2,\"[3821, 3822]\"\r\nNormal-4.zip,0,Normal,793,228,94,1,[228]\r\nNCP-2.zip,2,NCP,1057,2633,570,1,[2633]\r\nNormal-1.zip,0,Normal,1679,835,67,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-4.zip,1,CP,1185,3403,131,1,[3403]\r\nCP-11.zip,1,CP,1446,3965,63,2,\"[3965, 3966]\"\r\nCP-15.zip,1,CP,1576,4264,23,1,[4264]\r\nCP-12.zip,1,CP,1487,4062,68,3,\"[4061, 4062, 4063]\"\r\nCP-9.zip,1,CP,1381,3817,66,3,\"[3815, 3816, 3817]\"\r\nCP-28.zip,1,CP,3767,5711,17,1,[5711]\r\nNormal-23.zip,0,Normal,2610,120,41,1,[120]\r\nCP-10.zip,1,CP,1394,3848,62,2,\"[3847, 3848]\"\r\nNCP-4.zip,2,NCP,160,1465,61,2,\"[1464, 1465]\"\r\nCP-14.zip,1,CP,1543,4201,57,3,\"[4200, 4201, 4202]\"\r\nCP-23.zip,1,CP,652,3014,277,1,[3014]\r\nCP-16.zip,1,CP,1607,4295,17,1,[4295]\r\nNormal-18.zip,0,Normal,2213,668,84,1,[668]\r\nNormal-16.zip,0,Normal,2121,576,87,1,[576]\r\nNormal-23.zip,0,Normal,2627,137,41,1,[137]\r\nNCP-21.zip,2,NCP,582,2322,54,2,\"[2321, 2322]\"\r\nCP-19.zip,1,CP,2431,2893,361,1,[2893]\r\nNormal-1.zip,0,Normal,1717,989,67,2,\"[989, 990]\"\r\nCP-10.zip,1,CP,1385,3825,64,2,\"[3825, 3826]\"\r\nCP-5.zip,1,CP,1198,3416,162,1,[3416]\r\nNCP-21.zip,2,NCP,578,2314,55,2,\"[2313, 2314]\"\r\nNCP-20.zip,2,NCP,56,1246,68,2,\"[1245, 1246]\"\r\nNCP-19.zip,2,NCP,532,2222,139,2,\"[2222, 2223]\"\r\nNormal-21.zip,0,Normal,2283,738,87,1,[738]\r\nNormal-19.zip,0,Normal,2222,677,78,1,[677]\r\nCP-9.zip,1,CP,1361,3770,50,2,\"[3770, 3771]\"\r\nNCP-15.zip,2,NCP,420,1993,177,2,\"[1993, 1994]\"\r\nCP-18.zip,1,CP,1776,3538,76,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNormal-1.zip,0,Normal,1706,968,64,2,\"[967, 968]\"\r\nCP-20.zip,1,CP,2668,3253,51,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-5.zip,2,NCP,171,1486,143,2,\"[1486, 1487]\"\r\nNormal-3.zip,0,Normal,750,185,281,1,[185]\r\nCP-18.zip,1,CP,1780,3565,80,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-13.zip,2,NCP,362,1876,63,2,\"[1875, 1876]\"\r\nCP-6.zip,1,CP,1234,3452,191,1,[3452]\r\nNormal-1.zip,0,Normal,1684,873,133,5,\"[870, 871, 873, 874, 875]\"\r\nNormal-6.zip,0,Normal,1812,267,99,1,[267]\r\nNCP-17.zip,2,NCP,474,2103,114,2,\"[2103, 2104]\"\r\nNormal-7.zip,0,Normal,1857,312,80,1,[312]\r\nNormal-12.zip,0,Normal,1992,447,104,1,[447]\r\nCP-18.zip,1,CP,1664,4352,20,1,[4352]\r\nNormal-27.zip,0,Normal,3895,5420,71,4,\"[5418, 5419, 5420, 5421]\"\r\nNCP-19.zip,2,NCP,517,2190,139,2,\"[2190, 2191]\"\r\nNormal-23.zip,0,Normal,2625,135,39,1,[135]\r\nNormal-5.zip,0,Normal,811,246,124,1,[246]\r\nCP-4.zip,1,CP,1162,3380,212,1,[3380]\r\nCP-22.zip,1,CP,611,2973,76,1,[2973]\r\nCP-9.zip,1,CP,1381,3815,261,3,\"[3815, 3816, 3817]\"\r\nCP-9.zip,1,CP,1371,3794,200,3,\"[3794, 3795, 3796]\"\r\nNCP-16.zip,2,NCP,432,2017,128,2,\"[2017, 2018]\"\r\nNormal-20.zip,0,Normal,2278,733,90,1,[733]\r\nNormal-19.zip,0,Normal,2240,695,78,1,[695]\r\nCP-28.zip,1,CP,3786,5730,29,1,[5730]\r\nNormal-15.zip,0,Normal,2097,552,89,1,[552]\r\nNCP-18.zip,2,NCP,500,2156,162,2,\"[2156, 2157]\"\r\nCP-9.zip,1,CP,1374,3802,50,2,\"[3802, 3803]\"\r\nNormal-23.zip,0,Normal,2606,116,33,1,[116]\r\nCP-26.zip,1,CP,3651,5550,395,1,[5550]\r\nNormal-9.zip,0,Normal,1912,367,92,1,[367]\r\nNCP-25.zip,2,NCP,3953,5466,44,1,[5466]\r\nCP-25.zip,1,CP,724,3086,100,1,[3086]\r\nNormal-21.zip,0,Normal,2292,747,82,1,[747]\r\nCP-7.zip,1,CP,1262,3480,384,1,[3480]\r\nNormal-10.zip,0,Normal,1931,386,80,1,[386]\r\nNCP-20.zip,2,NCP,563,2284,141,2,\"[2284, 2285]\"\r\nCP-2.zip,1,CP,1123,3341,213,1,[3341]\r\nNCP-17.zip,2,NCP,486,2127,153,2,\"[2127, 2128]\"\r\nCP-26.zip,1,CP,3733,5673,32,3,\"[5673, 5674, 5675]\"\r\nCP-3.zip,1,CP,1152,3370,69,1,[3370]\r\nNCP-28.zip,2,NCP,838,2353,89,1,[2353]\r\nNormal-1.zip,0,Normal,1717,990,67,2,\"[989, 990]\"\r\nNCP-30.zip,2,NCP,997,2553,54,2,\"[2553, 2554]\"\r\nNCP-17.zip,2,NCP,48,1230,61,2,\"[1229, 1230]\"\r\nNCP-17.zip,2,NCP,467,2089,138,2,\"[2089, 2090]\"\r\nNCP-20.zip,2,NCP,564,2286,143,2,\"[2286, 2287]\"\r\nNormal-7.zip,0,Normal,1854,309,82,1,[309]\r\nNormal-2.zip,0,Normal,1747,1065,60,1,[1065]\r\nNCP-19.zip,2,NCP,535,2228,47,2,\"[2227, 2228]\"\r\nNCP-26.zip,2,NCP,3974,5508,52,1,[5508]\r\nNormal-7.zip,0,Normal,1829,284,92,1,[284]\r\nNormal-1.zip,0,Normal,1673,808,57,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNCP-2.zip,2,NCP,1271,2712,56,1,[2712]\r\nCP-30.zip,1,CP,3934,5638,59,3,\"[5638, 5639, 5640]\"\r\nNCP-26.zip,2,NCP,3979,5486,52,1,[5486]\r\nNCP-20.zip,2,NCP,554,2265,128,2,\"[2265, 2266]\"\r\nNCP-6.zip,2,NCP,221,1587,53,2,\"[1586, 1587]\"\r\nNCP-20.zip,2,NCP,558,2273,119,2,\"[2273, 2274]\"\r\nCP-8.zip,1,CP,1321,3678,58,2,\"[3678, 3679]\"\r\nNCP-6.zip,2,NCP,226,1597,60,2,\"[1596, 1597]\"\r\nNCP-21.zip,2,NCP,76,1286,51,2,\"[1285, 1286]\"\r\nNCP-1.zip,2,NCP,1042,2613,143,2,\"[2613, 2614]\"\r\nNCP-13.zip,2,NCP,366,1884,67,2,\"[1883, 1884]\"\r\nNCP-18.zip,2,NCP,490,2136,147,2,\"[2136, 2137]\"\r\nNCP-28.zip,2,NCP,856,2376,227,2,\"[2376, 2377]\"\r\nCP-19.zip,1,CP,2445,2920,283,2,\"[2920, 2921]\"\r\nNormal-1.zip,0,Normal,1673,806,59,6,\"[804, 805, 806, 807, 808, 809]\"\r\nCP-25.zip,1,CP,9,3151,72,4,\"[3148, 3149, 3150, 3151]\"\r\nNormal-25.zip,0,Normal,3847,5359,219,1,[5359]\r\nNormal-12.zip,0,Normal,2005,460,77,1,[460]\r\nCP-30.zip,1,CP,3936,5642,59,1,[5642]\r\nNCP-12.zip,2,NCP,326,1800,117,2,\"[1800, 1801]\"\r\nNormal-13.zip,0,Normal,2045,500,85,1,[500]\r\nCP-15.zip,1,CP,1583,4271,18,1,[4271]\r\nNormal-20.zip,0,Normal,2261,716,83,1,[716]\r\nNormal-20.zip,0,Normal,2276,731,91,1,[731]\r\nCP-18.zip,1,CP,1776,3536,75,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-27.zip,2,NCP,1034,2605,19,1,[2605]\r\nNCP-16.zip,2,NCP,445,2044,139,2,\"[2044, 2045]\"\r\nCP-12.zip,1,CP,1461,4001,53,2,\"[4000, 4001]\"\r\nCP-12.zip,1,CP,1485,4056,114,3,\"[4056, 4057, 4058]\"\r\nNCP-7.zip,2,NCP,231,1606,139,2,\"[1606, 1607]\"\r\nNCP-13.zip,2,NCP,343,1838,55,2,\"[1837, 1838]\"\r\nNCP-6.zip,2,NCP,202,1548,161,2,\"[1548, 1549]\"\r\nNormal-17.zip,0,Normal,2160,615,96,1,[615]\r\nCP-28.zip,1,CP,3780,5724,27,1,[5724]\r\nCP-9.zip,1,CP,1354,3753,46,3,\"[3751, 3752, 3753]\"\r\nCP-16.zip,1,CP,1598,4286,23,1,[4286]\r\nCP-19.zip,1,CP,2445,2921,119,2,\"[2920, 2921]\"\r\nCP-9.zip,1,CP,1361,3771,50,2,\"[3770, 3771]\"\r\nNCP-15.zip,2,NCP,412,1974,54,2,\"[1973, 1974]\"\r\nNormal-8.zip,0,Normal,1861,316,76,1,[316]\r\nNormal-3.zip,0,Normal,1766,1150,57,3,\"[1149, 1150, 1151]\"\r\nNormal-17.zip,0,Normal,2182,637,96,1,[637]\r\nNormal-7.zip,0,Normal,1833,288,102,1,[288]\r\nNormal-9.zip,0,Normal,1894,349,99,1,[349]\r\nNormal-22.zip,0,Normal,2319,774,101,1,[774]\r\nNormal-1.zip,0,Normal,1680,844,64,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-24.zip,1,CP,679,3041,94,1,[3041]\r\nCP-30.zip,1,CP,3832,5776,23,1,[5776]\r\nCP-25.zip,1,CP,720,3082,84,1,[3082]\r\nNormal-19.zip,0,Normal,2235,690,89,1,[690]\r\nCP-11.zip,1,CP,1429,3927,52,2,\"[3926, 3927]\"\r\nNormal-7.zip,0,Normal,1835,290,83,1,[290]\r\nNCP-7.zip,2,NCP,239,1624,61,2,\"[1623, 1624]\"\r\nNormal-27.zip,0,Normal,3899,5430,76,2,\"[5429, 5430]\"\r\nCP-4.zip,1,CP,1165,3383,151,1,[3383]\r\nNCP-3.zip,2,NCP,1297,2738,56,1,[2738]\r\nNCP-22.zip,2,NCP,832,2345,25,1,[2345]\r\nNCP-25.zip,2,NCP,3952,5505,46,1,[5505]\r\nNCP-26.zip,2,NCP,3977,5509,56,1,[5509]\r\nCP-16.zip,1,CP,1609,4297,20,1,[4297]\r\nNormal-21.zip,0,Normal,2294,749,103,1,[749]\r\nNCP-25.zip,2,NCP,3967,5507,46,1,[5507]\r\nCP-13.zip,1,CP,1495,4089,48,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-7.zip,1,CP,1317,3672,58,3,\"[3670, 3671, 3672]\"\r\nNormal-26.zip,0,Normal,3877,5389,25,1,[5389]\r\nCP-20.zip,1,CP,2766,3297,41,1,[3297]\r\nCP-18.zip,1,CP,1661,4349,32,1,[4349]\r\nNCP-19.zip,2,NCP,535,2227,112,2,\"[2227, 2228]\"\r\nCP-2.zip,1,CP,1120,3338,159,1,[3338]\r\nNCP-2.zip,2,NCP,118,1377,142,2,\"[1377, 1378]\"\r\nNormal-7.zip,0,Normal,1843,298,96,1,[298]\r\nNCP-15.zip,2,NCP,400,1950,155,1,[1950]\r\nNCP-25.zip,2,NCP,3704,5531,60,1,[5531]\r\nNormal-15.zip,0,Normal,2095,550,99,1,[550]\r\nNormal-1.zip,0,Normal,1684,870,68,5,\"[870, 871, 873, 874, 875]\"\r\nNCP-16.zip,2,NCP,44,1222,52,2,\"[1221, 1222]\"\r\nNCP-11.zip,2,NCP,31,1194,137,2,\"[1194, 1195]\"\r\nNCP-15.zip,2,NCP,409,1968,64,2,\"[1967, 1968]\"\r\nNCP-16.zip,2,NCP,451,2057,48,3,\"[2056, 2057, 2058]\"\r\nNormal-2.zip,0,Normal,1753,1086,77,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-8.zip,2,NCP,262,1670,139,2,\"[1670, 1671]\"\r\nNormal-10.zip,0,Normal,1955,410,93,1,[410]\r\nNormal-6.zip,0,Normal,1826,281,104,1,[281]\r\nNCP-28.zip,2,NCP,852,2371,47,2,\"[2371, 2372]\"\r\nNCP-27.zip,2,NCP,1000,2558,39,1,[2558]\r\nCP-1.zip,1,CP,1072,3115,52,1,[3115]\r\nNormal-13.zip,0,Normal,2052,507,71,1,[507]\r\nCP-7.zip,1,CP,1314,3663,30,2,\"[3663, 3664]\"\r\nNCP-21.zip,2,NCP,67,1267,70,2,\"[1266, 1267]\"\r\nNCP-3.zip,2,NCP,132,1409,117,1,[1409]\r\nNormal-18.zip,0,Normal,2205,660,91,1,[660]\r\nNormal-14.zip,0,Normal,2054,509,88,1,[509]\r\nNormal-5.zip,0,Normal,809,244,114,1,[244]\r\nNCP-27.zip,2,NCP,1029,2599,39,1,[2599]\r\nNCP-26.zip,2,NCP,3972,5481,58,1,[5481]\r\nNormal-13.zip,0,Normal,2026,481,85,1,[481]\r\nNCP-17.zip,2,NCP,47,1227,139,2,\"[1227, 1228]\"\r\nCP-27.zip,1,CP,3763,5707,20,1,[5707]\r\nNormal-6.zip,0,Normal,1798,253,93,1,[253]\r\nNCP-9.zip,2,NCP,2703,2669,41,1,[2669]\r\nCP-1.zip,1,CP,1071,3113,57,2,\"[3113, 3114]\"\r\nNCP-16.zip,2,NCP,430,2014,64,2,\"[2013, 2014]\"\r\nNCP-4.zip,2,NCP,144,1432,139,2,\"[1432, 1433]\"\r\nNormal-4.zip,0,Normal,780,215,116,1,[215]\r\nNormal-12.zip,0,Normal,2020,475,88,1,[475]\r\nNCP-13.zip,2,NCP,366,1883,161,2,\"[1883, 1884]\"\r\nNormal-2.zip,0,Normal,1761,1127,18,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-29.zip,2,NCP,899,2441,42,2,\"[2440, 2441]\"\r\nCP-16.zip,1,CP,1612,4300,26,1,[4300]\r\nNCP-15.zip,2,NCP,412,1973,129,2,\"[1973, 1974]\"\r\nNCP-10.zip,2,NCP,2717,2710,42,1,[2710]\r\nCP-19.zip,1,CP,1792,3214,71,2,\"[3214, 3215]\"\r\nNormal-20.zip,0,Normal,2269,724,113,1,[724]\r\nCP-11.zip,1,CP,1451,3976,51,2,\"[3975, 3976]\"\r\nNormal-11.zip,0,Normal,1978,433,94,1,[433]\r\nNCP-3.zip,2,NCP,1282,2723,70,1,[2723]\r\nCP-23.zip,1,CP,654,3016,74,1,[3016]\r\nNCP-13.zip,2,NCP,345,1842,62,2,\"[1841, 1842]\"\r\nCP-22.zip,1,CP,610,2972,70,1,[2972]\r\nCP-29.zip,1,CP,3799,5743,23,1,[5743]\r\nNCP-18.zip,2,NCP,506,2168,124,2,\"[2168, 2169]\"\r\nNormal-19.zip,0,Normal,2218,673,84,1,[673]\r\nNCP-7.zip,2,NCP,243,1632,31,3,\"[1631, 1632, 1633]\"\r\nNCP-25.zip,2,NCP,3948,5504,50,1,[5504]\r\nCP-7.zip,1,CP,1312,3658,65,2,\"[3658, 3659]\"\r\nNCP-16.zip,2,NCP,451,2058,23,3,\"[2056, 2057, 2058]\"\r\nCP-12.zip,1,CP,1461,4000,53,2,\"[4000, 4001]\"\r\nCP-1.zip,1,CP,10,3154,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-10.zip,1,CP,1388,3831,51,2,\"[3831, 3832]\"\r\nNormal-1.zip,0,Normal,1702,957,69,2,\"[957, 958]\"\r\nNormal-17.zip,0,Normal,2181,636,100,1,[636]\r\nNCP-19.zip,2,NCP,521,2198,139,2,\"[2198, 2199]\"\r\nNormal-9.zip,0,Normal,1922,377,87,1,[377]\r\nNormal-8.zip,0,Normal,1872,327,86,1,[327]\r\nCP-9.zip,1,CP,1369,3791,67,2,\"[3790, 3791]\"\r\nCP-29.zip,1,CP,3815,5759,23,1,[5759]\r\nNCP-2.zip,2,NCP,118,1378,60,2,\"[1377, 1378]\"\r\nCP-19.zip,1,CP,1793,3216,69,1,[3216]\r\nNCP-5.zip,2,NCP,178,1501,52,2,\"[1500, 1501]\"\r\nCP-13.zip,1,CP,1495,4087,50,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-18.zip,1,CP,1780,3566,41,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-8.zip,1,CP,1323,3682,62,2,\"[3682, 3683]\"\r\nCP-20.zip,1,CP,2754,3285,30,1,[3285]\r\nNormal-26.zip,0,Normal,3865,5377,24,1,[5377]\r\nNormal-23.zip,0,Normal,2614,124,37,1,[124]\r\nCP-12.zip,1,CP,1465,4009,67,2,\"[4009, 4010]\"\r\nCP-14.zip,1,CP,1537,4183,53,3,\"[4182, 4183, 4184]\"\r\nNormal-1.zip,0,Normal,1719,993,76,2,\"[993, 994]\"\r\nNCP-3.zip,2,NCP,128,1401,122,2,\"[1401, 1402]\"\r\nCP-28.zip,1,CP,3778,5722,25,1,[5722]\r\nNCP-1.zip,2,NCP,1018,2584,252,1,[2584]\r\nNCP-9.zip,2,NCP,27,1187,33,2,\"[1186, 1187]\"\r\nCP-13.zip,1,CP,1494,4084,65,3,\"[4083, 4084, 4085]\"\r\nNCP-13.zip,2,NCP,344,1839,152,2,\"[1839, 1840]\"\r\nCP-21.zip,1,CP,604,2966,134,1,[2966]\r\nNCP-1.zip,2,NCP,1037,2608,32,1,[2608]\r\nCP-12.zip,1,CP,1485,4057,49,3,\"[4056, 4057, 4058]\"\r\nNCP-16.zip,2,NCP,45,1223,152,2,\"[1223, 1224]\"\r\nNormal-14.zip,0,Normal,2058,513,95,1,[513]\r\nNCP-12.zip,2,NCP,323,1795,49,2,\"[1794, 1795]\"\r\nNCP-26.zip,2,NCP,3999,5496,52,1,[5496]\r\nNormal-15.zip,0,Normal,2107,562,92,1,[562]\r\nCP-12.zip,1,CP,1478,4038,53,2,\"[4037, 4038]\"\r\nNormal-15.zip,0,Normal,2099,554,85,1,[554]\r\nNCP-21.zip,2,NCP,64,1261,132,2,\"[1261, 1262]\"\r\nCP-9.zip,1,CP,1384,3824,66,2,\"[3823, 3824]\"\r\nNCP-18.zip,2,NCP,511,2178,132,2,\"[2178, 2179]\"\r\nCP-6.zip,1,CP,1227,3445,307,1,[3445]\r\nNormal-23.zip,0,Normal,2633,143,40,1,[143]\r\nNCP-10.zip,2,NCP,2722,2678,53,1,[2678]\r\nNCP-15.zip,2,NCP,427,2008,56,2,\"[2007, 2008]\"\r\nNCP-23.zip,2,NCP,94,1324,153,2,\"[1324, 1325]\"\r\nCP-19.zip,1,CP,2446,2922,690,1,[2922]\r\nCP-26.zip,1,CP,3728,5664,229,1,[5664]\r\nCP-20.zip,1,CP,2668,3249,45,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-27.zip,0,Normal,3899,5429,75,2,\"[5429, 5430]\"\r\nNormal-9.zip,0,Normal,1902,357,93,1,[357]\r\nNCP-9.zip,2,NCP,27,1186,75,2,\"[1186, 1187]\"\r\nNCP-18.zip,2,NCP,508,2172,145,2,\"[2172, 2173]\"\r\nNormal-8.zip,0,Normal,1862,317,91,1,[317]\r\nNCP-3.zip,2,NCP,128,1402,52,2,\"[1401, 1402]\"\r\nNCP-8.zip,2,NCP,257,1660,152,2,\"[1660, 1661]\"\r\nNCP-30.zip,2,NCP,973,2516,57,1,[2516]\r\nCP-9.zip,1,CP,1357,3759,61,3,\"[3758, 3759, 3760]\"\r\nNormal-26.zip,0,Normal,3864,5376,178,1,[5376]\r\nCP-25.zip,1,CP,727,3089,104,1,[3089]\r\nNCP-8.zip,2,NCP,259,1664,155,2,\"[1664, 1665]\"\r\nCP-10.zip,1,CP,1390,3838,56,3,\"[3836, 3837, 3838]\"\r\nNormal-21.zip,0,Normal,2295,750,79,1,[750]\r\nNCP-18.zip,2,NCP,49,1231,146,2,\"[1231, 1232]\"\r\nCP-10.zip,1,CP,1391,3840,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNCP-17.zip,2,NCP,48,1229,145,2,\"[1229, 1230]\"\r\nNCP-21.zip,2,NCP,73,1278,130,3,\"[1278, 1279, 1280]\"\r\nNCP-11.zip,2,NCP,296,1738,58,2,\"[1737, 1738]\"\r\nNCP-3.zip,2,NCP,129,1404,56,2,\"[1403, 1404]\"\r\nNCP-12.zip,2,NCP,330,1808,153,2,\"[1808, 1809]\"\r\nCP-14.zip,1,CP,1529,4165,100,3,\"[4165, 4166, 4167]\"\r\nCP-4.zip,1,CP,1187,3405,325,1,[3405]\r\nNCP-11.zip,2,NCP,307,1761,136,2,\"[1761, 1762]\"\r\nCP-26.zip,1,CP,3725,5661,258,2,\"[5660, 5661]\"\r\nNormal-10.zip,0,Normal,1950,405,102,1,[405]\r\nCP-15.zip,1,CP,1563,4247,61,3,\"[4245, 4246, 4247]\"\r\nNCP-4.zip,2,NCP,144,1433,58,2,\"[1432, 1433]\"\r\nNCP-28.zip,2,NCP,855,2375,39,1,[2375]\r\nNormal-1.zip,0,Normal,1726,1008,69,2,\"[1007, 1008]\"\r\nCP-22.zip,1,CP,629,2991,304,1,[2991]\r\nNCP-4.zip,2,NCP,142,1428,141,2,\"[1428, 1429]\"\r\nCP-21.zip,1,CP,592,2954,104,1,[2954]\r\nCP-1.zip,1,CP,10,3159,293,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-9.zip,1,CP,1357,3760,61,3,\"[3758, 3759, 3760]\"\r\nNormal-24.zip,0,Normal,2648,158,32,1,[158]\r\nNCP-9.zip,2,NCP,269,1684,153,2,\"[1684, 1685]\"\r\nNormal-15.zip,0,Normal,2108,563,101,1,[563]\r\nCP-25.zip,1,CP,9,3148,290,4,\"[3148, 3149, 3150, 3151]\"\r\nNCP-13.zip,2,NCP,364,1879,132,2,\"[1879, 1880]\"\r\nNormal-23.zip,0,Normal,2605,115,35,1,[115]\r\nNCP-10.zip,2,NCP,282,1711,51,2,\"[1710, 1711]\"\r\nCP-14.zip,1,CP,1546,4209,58,2,\"[4208, 4209]\"\r\nNCP-29.zip,2,NCP,925,2467,22,1,[2467]\r\nNormal-21.zip,0,Normal,2296,751,102,1,[751]\r\nCP-2.zip,1,CP,1114,3332,361,1,[3332]\r\nNCP-5.zip,2,NCP,19,1171,61,2,\"[1170, 1171]\"\r\nNCP-13.zip,2,NCP,363,1877,139,2,\"[1877, 1878]\"\r\nCP-12.zip,1,CP,1475,4031,50,2,\"[4031, 4032]\"\r\nNCP-14.zip,2,NCP,399,1949,62,2,\"[1948, 1949]\"\r\nCP-17.zip,1,CP,1626,4314,26,1,[4314]\r\nCP-18.zip,1,CP,1780,3556,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNormal-19.zip,0,Normal,2236,691,83,1,[691]\r\nCP-15.zip,1,CP,1572,4260,19,1,[4260]\r\nCP-6.zip,1,CP,1240,3458,137,1,[3458]\r\nNCP-21.zip,2,NCP,76,1285,121,2,\"[1285, 1286]\"\r\nCP-22.zip,1,CP,623,2985,463,1,[2985]\r\nCP-27.zip,1,CP,3760,5704,23,1,[5704]\r\nCP-23.zip,1,CP,672,3034,86,1,[3034]\r\nNCP-1.zip,2,NCP,1026,2596,21,1,[2596]\r\nCP-22.zip,1,CP,635,2997,106,1,[2997]\r\nNCP-14.zip,2,NCP,375,1901,115,3,\"[1901, 1902, 1903]\"\r\nNCP-11.zip,2,NCP,304,1754,161,2,\"[1754, 1755]\"\r\nNCP-15.zip,2,NCP,408,1965,131,2,\"[1965, 1966]\"\r\nNCP-9.zip,2,NCP,2702,2668,41,1,[2668]\r\nCP-11.zip,1,CP,1452,3978,56,2,\"[3977, 3978]\"\r\nNCP-29.zip,2,NCP,891,2430,22,1,[2430]\r\nNCP-16.zip,2,NCP,458,2070,131,2,\"[2070, 2071]\"\r\nNormal-2.zip,0,Normal,1753,1092,60,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNormal-1.zip,0,Normal,1702,958,69,2,\"[957, 958]\"\r\nNormal-2.zip,0,Normal,1761,1126,45,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nCP-12.zip,1,CP,1487,4063,68,3,\"[4061, 4062, 4063]\"\r\nNCP-25.zip,2,NCP,3958,5471,38,1,[5471]\r\nCP-15.zip,1,CP,1556,4231,40,2,\"[4230, 4231]\"\r\nNCP-16.zip,2,NCP,431,2015,160,2,\"[2015, 2016]\"\r\nNormal-2.zip,0,Normal,1745,1060,298,3,\"[1060, 1061, 1062]\"\r\nNCP-23.zip,2,NCP,906,2448,55,1,[2448]\r\nCP-2.zip,1,CP,11,3163,265,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-17.zip,2,NCP,487,2130,70,2,\"[2129, 2130]\"\r\nCP-16.zip,1,CP,1600,4288,19,1,[4288]\r\nNCP-21.zip,2,NCP,580,2317,139,2,\"[2317, 2318]\"\r\nNormal-1.zip,0,Normal,1673,805,59,6,\"[804, 805, 806, 807, 808, 809]\"\r\nCP-29.zip,1,CP,3801,5745,26,1,[5745]\r\nNormal-1.zip,0,Normal,1726,1007,69,2,\"[1007, 1008]\"\r\nNCP-29.zip,2,NCP,893,2432,25,2,\"[2432, 2433]\"\r\nCP-3.zip,1,CP,1143,3361,177,1,[3361]\r\nCP-8.zip,1,CP,1343,3726,56,2,\"[3726, 3727]\"\r\nNCP-2.zip,2,NCP,115,1371,118,2,\"[1371, 1372]\"\r\nNCP-11.zip,2,NCP,31,1195,57,2,\"[1194, 1195]\"\r\nCP-1.zip,1,CP,1071,3114,57,2,\"[3113, 3114]\"\r\nNCP-23.zip,2,NCP,951,2494,38,1,[2494]\r\nNormal-1.zip,0,Normal,1706,967,64,2,\"[967, 968]\"\r\nNCP-8.zip,2,NCP,262,1671,58,2,\"[1670, 1671]\"\r\nNormal-10.zip,0,Normal,1943,398,94,1,[398]\r\nNCP-8.zip,2,NCP,257,1661,64,2,\"[1660, 1661]\"\r\nNormal-24.zip,0,Normal,2644,154,39,1,[154]\r\nNCP-15.zip,2,NCP,407,1964,52,2,\"[1963, 1964]\"\r\nNormal-26.zip,0,Normal,3883,5395,61,1,[5395]\r\nNCP-9.zip,2,NCP,2685,2698,52,1,[2698]\r\nNCP-30.zip,2,NCP,992,2545,213,1,[2545]\r\nCP-21.zip,1,CP,596,2958,255,1,[2958]\r\nCP-7.zip,1,CP,1314,3664,30,2,\"[3663, 3664]\"\r\nNCP-16.zip,2,NCP,432,2018,54,2,\"[2017, 2018]\"\r\nNCP-14.zip,2,NCP,371,1894,59,2,\"[1893, 1894]\"\r\nNCP-7.zip,2,NCP,2482,2685,45,1,[2685]\r\nNormal-1.zip,0,Normal,1679,834,66,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-29.zip,1,CP,3824,5768,23,1,[5768]\r\nNormal-2.zip,0,Normal,1753,1089,66,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNormal-7.zip,0,Normal,1859,314,85,1,[314]\r\nNCP-21.zip,2,NCP,578,2313,130,2,\"[2313, 2314]\"\r\nCP-10.zip,1,CP,1402,3866,55,3,\"[3865, 3866, 3867]\"\r\nNormal-4.zip,0,Normal,791,226,138,1,[226]\r\nNormal-13.zip,0,Normal,2039,494,101,1,[494]\r\nNormal-15.zip,0,Normal,2115,570,94,1,[570]\r\nCP-12.zip,1,CP,1470,4021,54,2,\"[4020, 4021]\"\r\nCP-24.zip,1,CP,695,3057,201,1,[3057]\r\nNormal-12.zip,0,Normal,1994,449,95,1,[449]\r\nNormal-5.zip,0,Normal,804,239,325,1,[239]\r\nCP-17.zip,1,CP,1623,4311,23,1,[4311]\r\nNormal-18.zip,0,Normal,2208,663,95,1,[663]\r\nNCP-19.zip,2,NCP,526,2209,58,2,\"[2208, 2209]\"\r\nNCP-16.zip,2,NCP,45,1224,64,2,\"[1223, 1224]\"\r\nNormal-1.zip,0,Normal,1679,838,70,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-2.zip,1,CP,11,3161,244,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNormal-19.zip,0,Normal,2239,694,89,1,[694]\r\nNCP-7.zip,2,NCP,243,1631,145,3,\"[1631, 1632, 1633]\"\r\nNCP-7.zip,2,NCP,243,1633,61,3,\"[1631, 1632, 1633]\"\r\nCP-18.zip,1,CP,1780,3561,63,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-11.zip,1,CP,1429,3926,52,2,\"[3926, 3927]\"\r\nNCP-7.zip,2,NCP,237,1619,146,2,\"[1619, 1620]\"\r\nCP-7.zip,1,CP,1319,3674,61,2,\"[3674, 3675]\"\r\nNCP-28.zip,2,NCP,829,2342,36,1,[2342]\r\nNormal-18.zip,0,Normal,2186,641,84,1,[641]\r\nNormal-16.zip,0,Normal,2127,582,84,1,[582]\r\nCP-5.zip,1,CP,1197,3415,191,1,[3415]\r\nCP-10.zip,1,CP,1414,3893,63,3,\"[3891, 3892, 3893]\"\r\nNCP-14.zip,2,NCP,384,1920,127,2,\"[1920, 1921]\"\r\nCP-7.zip,1,CP,1317,3671,116,3,\"[3670, 3671, 3672]\"\r\nNCP-22.zip,2,NCP,81,1295,125,2,\"[1295, 1296]\"\r\nCP-3.zip,1,CP,1156,3374,173,1,[3374]\r\nNormal-2.zip,0,Normal,1761,1129,60,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-8.zip,2,NCP,252,1651,58,2,\"[1650, 1651]\"\r\nNCP-25.zip,2,NCP,3959,5472,44,1,[5472]\r\nNormal-11.zip,0,Normal,1988,443,90,1,[443]\r\nCP-30.zip,1,CP,3833,5777,23,1,[5777]\r\nNCP-26.zip,2,NCP,3985,5491,50,1,[5491]\r\nCP-20.zip,1,CP,2668,3255,28,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-14.zip,0,Normal,2077,532,92,1,[532]\r\nNormal-14.zip,0,Normal,2059,514,95,1,[514]\r\nCP-29.zip,1,CP,3829,5773,26,1,[5773]\r\nNCP-15.zip,2,NCP,402,1954,62,2,\"[1953, 1954]\"\r\nCP-29.zip,1,CP,3800,5744,29,1,[5744]\r\nCP-9.zip,1,CP,1383,3821,71,2,\"[3821, 3822]\"\r\nNCP-6.zip,2,NCP,225,1594,135,2,\"[1594, 1595]\"\r\nCP-27.zip,1,CP,3759,5703,23,1,[5703]\r\nCP-4.zip,1,CP,1190,3408,173,1,[3408]\r\nNCP-29.zip,2,NCP,889,2427,38,2,\"[2427, 2428]\"\r\nNCP-14.zip,2,NCP,375,1902,40,3,\"[1901, 1902, 1903]\"\r\nNormal-19.zip,0,Normal,2238,693,91,1,[693]\r\nNCP-2.zip,2,NCP,1273,2714,56,1,[2714]\r\nNCP-18.zip,2,NCP,497,2151,53,2,\"[2150, 2151]\"\r\nCP-25.zip,1,CP,715,3077,609,1,[3077]\r\nCP-7.zip,1,CP,1264,3482,126,1,[3482]\r\nCP-1.zip,1,CP,10,3157,46,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNormal-20.zip,0,Normal,2266,721,94,1,[721]\r\nCP-11.zip,1,CP,1433,3935,62,2,\"[3934, 3935]\"\r\nNCP-18.zip,2,NCP,511,2179,56,2,\"[2178, 2179]\"\r\nCP-3.zip,1,CP,1138,3356,158,1,[3356]\r\nNormal-20.zip,0,Normal,2249,704,66,1,[704]\r\nNormal-6.zip,0,Normal,1809,264,94,1,[264]\r\nCP-14.zip,1,CP,1547,4210,142,3,\"[4210, 4211, 4212]\"\r\nCP-21.zip,1,CP,586,2948,174,1,[2948]\r\nCP-23.zip,1,CP,650,3012,102,1,[3012]\r\nCP-14.zip,1,CP,1522,4149,61,2,\"[4148, 4149]\"\r\nNCP-8.zip,2,NCP,250,1646,144,2,\"[1646, 1647]\"\r\nNormal-26.zip,0,Normal,3884,5397,298,2,\"[5396, 5397]\"\r\nCP-28.zip,1,CP,3773,5717,20,1,[5717]\r\nNormal-21.zip,0,Normal,2309,764,88,1,[764]\r\nNCP-12.zip,2,NCP,326,1801,50,2,\"[1800, 1801]\"\r\nNormal-1.zip,0,Normal,1729,1017,74,2,\"[1017, 1018]\"\r\nNormal-1.zip,0,Normal,1684,871,68,5,\"[870, 871, 873, 874, 875]\"\r\nCP-15.zip,1,CP,1567,4254,118,2,\"[4254, 4255]\"\r\nNCP-4.zip,2,NCP,163,1470,154,2,\"[1470, 1471]\"\r\nNormal-1.zip,0,Normal,1705,966,69,2,\"[965, 966]\"\r\nCP-11.zip,1,CP,1446,3966,63,2,\"[3965, 3966]\"\r\nNCP-6.zip,2,NCP,225,1595,57,2,\"[1594, 1595]\"\r\nNCP-11.zip,2,NCP,293,1732,52,2,\"[1731, 1732]\"\r\nNCP-28.zip,2,NCP,839,2354,209,1,[2354]\r\nNCP-18.zip,2,NCP,513,2182,163,2,\"[2182, 2183]\"\r\nNormal-8.zip,0,Normal,1889,344,87,1,[344]\r\nCP-2.zip,1,CP,1112,3330,154,1,[3330]\r\nNormal-26.zip,0,Normal,3874,5386,28,1,[5386]\r\nCP-29.zip,1,CP,3813,5757,21,1,[5757]\r\nCP-7.zip,1,CP,1317,3670,229,3,\"[3670, 3671, 3672]\"\r\nNCP-20.zip,2,NCP,553,2264,58,2,\"[2263, 2264]\"\r\nCP-29.zip,1,CP,3820,5764,31,1,[5764]\r\nNCP-17.zip,2,NCP,482,2120,58,2,\"[2119, 2120]\"\r\nNCP-7.zip,2,NCP,233,1610,86,2,\"[1610, 1612]\"\r\nNCP-18.zip,2,NCP,500,2157,68,2,\"[2156, 2157]\"\r\nNormal-4.zip,0,Normal,799,234,118,1,[234]\r\nNCP-23.zip,2,NCP,94,1325,64,2,\"[1324, 1325]\"\r\nCP-18.zip,1,CP,1780,3563,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-23.zip,2,NCP,902,2444,45,1,[2444]\r\nCP-2.zip,1,CP,11,3162,260,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-3.zip,2,NCP,135,1415,58,2,\"[1414, 1415]\"\r\nCP-8.zip,1,CP,1350,3745,55,1,[3745]\r\nNormal-14.zip,0,Normal,2065,520,81,1,[520]\r\nNCP-5.zip,2,NCP,188,1521,57,2,\"[1520, 1521]\"\r\nNormal-2.zip,0,Normal,1745,1061,60,3,\"[1060, 1061, 1062]\"\r\nNCP-15.zip,2,NCP,424,2002,64,2,\"[2001, 2002]\"\r\nNormal-4.zip,0,Normal,790,225,126,1,[225]\r\nNCP-4.zip,2,NCP,142,1429,59,2,\"[1428, 1429]\"\r\nCP-7.zip,1,CP,1310,3653,51,2,\"[3653, 3654]\"\r\nCP-14.zip,1,CP,1537,4182,53,3,\"[4182, 4183, 4184]\"\r\nCP-17.zip,1,CP,1625,4313,26,1,[4313]\r\nNormal-1.zip,0,Normal,1680,843,64,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNCP-11.zip,2,NCP,311,1769,134,2,\"[1769, 1770]\"\r\nCP-1.zip,1,CP,1075,3118,553,2,\"[3118, 3119]\"\r\nNormal-4.zip,0,Normal,770,205,116,1,[205]\r\nCP-7.zip,1,CP,1311,3655,160,3,\"[3655, 3656, 3657]\"\r\nNormal-1.zip,0,Normal,1724,1005,55,1,[1005]\r\nNCP-20.zip,2,NCP,563,2285,59,2,\"[2284, 2285]\"\r\nNCP-4.zip,2,NCP,163,1471,65,2,\"[1470, 1471]\"\r\nNormal-15.zip,0,Normal,2114,569,101,1,[569]\r\nNormal-12.zip,0,Normal,2016,471,89,1,[471]\r\nCP-23.zip,1,CP,657,3019,343,1,[3019]\r\nNormal-1.zip,0,Normal,1729,1018,74,2,\"[1017, 1018]\"\r\nCP-18.zip,1,CP,1780,3558,73,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-5.zip,2,NCP,183,1511,52,2,\"[1510, 1511]\"\r\nCP-1.zip,1,CP,1074,3117,61,1,[3117]\r\nNormal-8.zip,0,Normal,1870,325,88,1,[325]\r\nCP-6.zip,1,CP,1254,3472,125,1,[3472]\r\nCP-21.zip,1,CP,2775,3306,43,1,[3306]\r\nCP-16.zip,1,CP,1587,4275,20,1,[4275]\r\nNCP-26.zip,2,NCP,3984,5490,54,1,[5490]\r\nCP-27.zip,1,CP,3747,5691,20,1,[5691]\r\nCP-13.zip,1,CP,1495,4088,48,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-9.zip,1,CP,1384,3823,66,2,\"[3823, 3824]\"\r\nNCP-1.zip,2,NCP,100,1338,58,2,\"[1337, 1338]\"\r\nNCP-27.zip,2,NCP,1025,2595,252,1,[2595]\r\nNCP-18.zip,2,NCP,510,2177,43,2,\"[2176, 2177]\"\r\nNCP-11.zip,2,NCP,298,1743,61,2,\"[1742, 1743]\"\r\nNormal-17.zip,0,Normal,2174,629,88,1,[629]\r\nCP-23.zip,1,CP,677,3039,309,1,[3039]\r\nNormal-21.zip,0,Normal,2284,739,80,1,[739]\r\nNormal-18.zip,0,Normal,2193,648,85,1,[648]\r\nNCP-27.zip,2,NCP,1015,2579,39,1,[2579]\r\nNCP-6.zip,2,NCP,214,1572,144,2,\"[1572, 1573]\"\r\nCP-6.zip,1,CP,1248,3466,141,1,[3466]\r\nNormal-27.zip,0,Normal,3901,5433,66,1,[5433]\r\nCP-13.zip,1,CP,1519,4142,68,2,\"[4141, 4142]\"\r\nNCP-14.zip,2,NCP,385,1922,64,1,[1922]\r\nCP-7.zip,1,CP,1311,3657,67,3,\"[3655, 3656, 3657]\"\r\nCP-14.zip,1,CP,1547,4212,58,3,\"[4210, 4211, 4212]\"\r\nCP-4.zip,1,CP,1186,3404,204,1,[3404]\r\nNCP-4.zip,2,NCP,165,1474,131,2,\"[1474, 1475]\"\r\nCP-1.zip,1,CP,10,3160,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-3.zip,1,CP,1157,3375,204,1,[3375]\r\nNCP-11.zip,2,NCP,307,1762,57,2,\"[1761, 1762]\"\r\nCP-11.zip,1,CP,1441,3952,53,3,\"[3951, 3952, 3953]\"\r\nNCP-21.zip,2,NCP,63,1259,139,2,\"[1259, 1260]\"\r\nNormal-6.zip,0,Normal,1806,261,100,1,[261]\r\nCP-17.zip,1,CP,1627,4315,26,1,[4315]\r\nNormal-14.zip,0,Normal,2064,519,91,1,[519]\r\nNCP-5.zip,2,NCP,180,1505,57,2,\"[1504, 1505]\"\r\nNormal-16.zip,0,Normal,2134,589,72,1,[589]\r\nNormal-14.zip,0,Normal,2063,518,99,1,[518]\r\nCP-11.zip,1,CP,1451,3975,51,2,\"[3975, 3976]\"\r\nNormal-24.zip,0,Normal,2647,157,34,1,[157]\r\nNCP-21.zip,2,NCP,66,1265,58,1,[1265]\r\nNormal-25.zip,0,Normal,3843,5355,180,1,[5355]\r\nCP-25.zip,1,CP,729,3091,106,1,[3091]\r\nCP-20.zip,1,CP,2668,3256,53,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-6.zip,2,NCP,200,1544,123,2,\"[1544, 1545]\"\r\nNormal-1.zip,0,Normal,1685,879,65,4,\"[877, 878, 879, 880]\"\r\nNCP-24.zip,2,NCP,972,2515,120,1,[2515]\r\nCP-14.zip,1,CP,1547,4211,58,3,\"[4210, 4211, 4212]\"\r\nCP-18.zip,1,CP,1775,3530,58,4,\"[3530, 3531, 3532, 3533]\"\r\nCP-11.zip,1,CP,1427,3921,43,2,\"[3921, 3922]\"\r\nCP-18.zip,1,CP,1776,3534,64,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-13.zip,2,NCP,368,1888,54,2,\"[1887, 1888]\"\r\nCP-23.zip,1,CP,644,3006,134,1,[3006]\r\nCP-7.zip,1,CP,1312,3659,65,2,\"[3658, 3659]\"\r\nNCP-4.zip,2,NCP,139,1422,132,2,\"[1422, 1423]\"\r\nNCP-15.zip,2,NCP,422,1998,63,2,\"[1997, 1998]\"\r\nCP-10.zip,1,CP,1391,3842,59,4,\"[3839, 3840, 3841, 3842]\"\r\nCP-11.zip,1,CP,1441,3953,53,3,\"[3951, 3952, 3953]\"\r\nNCP-4.zip,2,NCP,154,1452,110,2,\"[1452, 1453]\"\r\nNCP-6.zip,2,NCP,202,1549,67,2,\"[1548, 1549]\"\r\nCP-11.zip,1,CP,1436,3941,45,2,\"[3940, 3941]\"\r\nNCP-16.zip,2,NCP,431,2016,67,2,\"[2015, 2016]\"\r\nNormal-26.zip,0,Normal,3870,5382,30,1,[5382]\r\nNormal-17.zip,0,Normal,2159,614,89,1,[614]\r\nCP-11.zip,1,CP,1427,3922,43,2,\"[3921, 3922]\"\r\nCP-6.zip,1,CP,1228,3446,307,1,[3446]\r\nNCP-15.zip,2,NCP,422,1997,156,2,\"[1997, 1998]\"\r\nNormal-1.zip,0,Normal,1679,836,67,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-16.zip,1,CP,1604,4292,22,1,[4292]\r\nCP-4.zip,1,CP,1179,3397,153,1,[3397]\r\nNCP-6.zip,2,NCP,221,1586,125,2,\"[1586, 1587]\"\r\nCP-18.zip,1,CP,1780,3564,41,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-4.zip,2,NCP,139,1423,56,2,\"[1422, 1423]\"\r\nNormal-1.zip,0,Normal,1685,880,65,4,\"[877, 878, 879, 880]\"\r\nCP-18.zip,1,CP,1780,3557,73,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-7.zip,1,CP,1269,3487,172,1,[3487]\r\nNormal-1.zip,0,Normal,1680,841,69,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-13.zip,1,CP,1491,4074,113,3,\"[4074, 4075, 4076]\"\r\nNCP-13.zip,2,NCP,344,1840,63,2,\"[1839, 1840]\"\r\nNCP-17.zip,2,NCP,476,2108,53,2,\"[2107, 2108]\"\r\nNormal-12.zip,0,Normal,1997,452,104,1,[452]\r\nNormal-2.zip,0,Normal,1745,1062,60,3,\"[1060, 1061, 1062]\"\r\nNormal-19.zip,0,Normal,2224,679,82,1,[679]\r\nCP-2.zip,1,CP,1101,3319,187,1,[3319]\r\nNormal-26.zip,0,Normal,3873,5385,25,1,[5385]\r\nCP-15.zip,1,CP,1578,4266,22,1,[4266]\r\nNormal-22.zip,0,Normal,2591,101,37,1,[101]\r\nNormal-11.zip,0,Normal,1966,421,90,1,[421]\r\nNCP-17.zip,2,NCP,480,2115,139,2,\"[2115, 2116]\"\r\nCP-19.zip,1,CP,2,3503,34,1,[3503]\r\nCP-16.zip,1,CP,1616,4304,29,1,[4304]\r\nCP-10.zip,1,CP,1410,3883,51,2,\"[3883, 3884]\"\r\nCP-24.zip,1,CP,701,3063,66,1,[3063]\r\nNCP-6.zip,2,NCP,200,1545,52,2,\"[1544, 1545]\"\r\nCP-1.zip,1,CP,10,3155,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-4.zip,2,NCP,160,1464,146,2,\"[1464, 1465]\"\r\nNormal-8.zip,0,Normal,1890,345,99,1,[345]\r\nNCP-9.zip,2,NCP,2694,2660,39,1,[2660]\r\nCP-30.zip,1,CP,3930,5628,62,2,\"[5628, 5629]\"\r\nCP-25.zip,1,CP,9,3149,290,4,\"[3148, 3149, 3150, 3151]\"\r\nNormal-13.zip,0,Normal,2022,477,92,1,[477]\r\nNormal-1.zip,0,Normal,1680,842,69,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNCP-7.zip,2,NCP,229,1603,65,2,\"[1602, 1603]\"\r\nNormal-1.zip,0,Normal,1712,979,70,1,[979]\r\nNormal-12.zip,0,Normal,2002,457,96,1,[457]\r\nCP-6.zip,1,CP,1233,3451,150,1,[3451]\r\nNCP-18.zip,2,NCP,489,2135,58,2,\"[2134, 2135]\"\r\nCP-7.zip,1,CP,1310,3654,51,2,\"[3653, 3654]\"\r\nCP-22.zip,1,CP,636,2998,102,1,[2998]\r\nNCP-21.zip,2,NCP,70,1273,51,2,\"[1272, 1273]\"\r\nNormal-23.zip,0,Normal,2603,113,41,1,[113]\r\nCP-8.zip,1,CP,1323,3683,62,2,\"[3682, 3683]\"\r\nNormal-20.zip,0,Normal,2274,729,85,1,[729]\r\nNCP-29.zip,2,NCP,889,2428,121,2,\"[2427, 2428]\"\r\nNCP-1.zip,2,NCP,1040,2611,113,1,[2611]\r\nNormal-21.zip,0,Normal,2298,753,80,1,[753]\r\nCP-19.zip,1,CP,1792,3215,71,2,\"[3214, 3215]\"\r\nNormal-27.zip,0,Normal,3916,5459,77,1,[5459]\r\nNormal-21.zip,0,Normal,2311,766,91,1,[766]\r\nNCP-13.zip,2,NCP,343,1837,130,2,\"[1837, 1838]\"\r\nNCP-26.zip,2,NCP,3989,5513,45,1,[5513]\r\nCP-13.zip,1,CP,1495,4086,112,4,\"[4086, 4087, 4088, 4089]\"\r\nNormal-5.zip,0,Normal,812,247,126,1,[247]\r\nNormal-15.zip,0,Normal,2098,553,84,1,[553]\r\nNormal-16.zip,0,Normal,2119,574,93,1,[574]\r\nCP-25.zip,1,CP,731,3093,82,1,[3093]\r\nCP-16.zip,1,CP,1597,4285,23,1,[4285]\r\nCP-26.zip,1,CP,3726,5662,232,1,[5662]\r\nCP-4.zip,1,CP,1183,3401,294,1,[3401]\r\nCP-10.zip,1,CP,1391,3839,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNCP-23.zip,2,NCP,901,2443,320,1,[2443]\r\nNormal-11.zip,0,Normal,1957,412,78,1,[412]\r\nNCP-17.zip,2,NCP,474,2104,48,2,\"[2103, 2104]\"\r\nNCP-9.zip,2,NCP,2698,2664,57,1,[2664]\r\nNCP-7.zip,2,NCP,233,1612,45,2,\"[1610, 1612]\"\r\nNCP-9.zip,2,NCP,2686,2699,48,1,[2699]\r\nCP-18.zip,1,CP,1776,3537,75,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nCP-3.zip,1,CP,1158,3376,193,1,[3376]\r\nCP-27.zip,1,CP,3755,5699,23,1,[5699]\r\nCP-13.zip,1,CP,1509,4120,59,3,\"[4118, 4119, 4120]\"\r\nNCP-29.zip,2,NCP,910,2452,76,1,[2452]\r\nCP-2.zip,1,CP,11,3166,274,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-16.zip,2,NCP,433,2020,51,2,\"[2019, 2020]\"\r\nNormal-26.zip,0,Normal,3863,5375,231,1,[5375]\r\nNormal-7.zip,0,Normal,1851,306,102,1,[306]\r\nNCP-23.zip,2,NCP,917,2459,272,1,[2459]\r\nNCP-26.zip,2,NCP,3986,5492,42,1,[5492]\r\nCP-12.zip,1,CP,1478,4037,53,2,\"[4037, 4038]\"\r\nNCP-2.zip,2,NCP,115,1372,50,2,\"[1371, 1372]\"\r\nNCP-13.zip,2,NCP,362,1875,151,2,\"[1875, 1876]\"\r\nNormal-22.zip,0,Normal,2592,102,39,1,[102]\r\nCP-9.zip,1,CP,1357,3758,61,3,\"[3758, 3759, 3760]\"\r\nNormal-6.zip,0,Normal,1825,280,81,1,[280]\r\nNormal-4.zip,0,Normal,775,210,134,1,[210]\r\nNCP-13.zip,2,NCP,365,1881,117,2,\"[1881, 1882]\"\r\nCP-24.zip,1,CP,709,3071,302,1,[3071]\r\nCP-17.zip,1,CP,1630,4318,23,1,[4318]\r\nCP-15.zip,1,CP,1557,4232,43,2,\"[4232, 4233]\"\r\nNCP-23.zip,2,NCP,956,2499,156,1,[2499]\r\nCP-2.zip,1,CP,1106,3324,164,1,[3324]\r\nNormal-9.zip,0,Normal,1895,350,92,1,[350]\r\nCP-21.zip,1,CP,599,2961,68,1,[2961]\r\nNCP-16.zip,2,NCP,448,2051,58,2,\"[2050, 2051]\"\r\nCP-5.zip,1,CP,1206,3424,176,1,[3424]\r\nCP-26.zip,1,CP,3648,5540,170,1,[5540]\r\nCP-1.zip,1,CP,1091,3309,354,1,[3309]\r\nNCP-10.zip,2,NCP,2713,2706,39,1,[2706]\r\nNCP-30.zip,2,NCP,949,2492,42,1,[2492]\r\nNCP-17.zip,2,NCP,480,2116,58,2,\"[2115, 2116]\"\r\nCP-7.zip,1,CP,1306,3643,48,3,\"[3642, 3643, 3644]\"\r\nNormal-7.zip,0,Normal,1840,295,108,1,[295]\r\nCP-18.zip,1,CP,1780,3562,63,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-1.zip,2,NCP,1011,2575,111,2,\"[2574, 2575]\"\r\nNormal-16.zip,0,Normal,2132,587,97,1,[587]\r\nCP-29.zip,1,CP,3814,5758,29,1,[5758]\r\nCP-18.zip,1,CP,1768,3175,175,1,[3175]\r\nNormal-13.zip,0,Normal,2028,483,89,1,[483]\r\nNCP-16.zip,2,NCP,454,2062,139,2,\"[2062, 2063]\"\r\nCP-8.zip,1,CP,1333,3706,52,2,\"[3705, 3706]\"\r\nCP-25.zip,1,CP,737,3099,84,1,[3099]\r\nNCP-9.zip,2,NCP,2683,2653,46,1,[2653]\r\nNormal-11.zip,0,Normal,1958,413,90,1,[413]\r\nNormal-7.zip,0,Normal,1855,310,86,1,[310]\r\nNCP-10.zip,2,NCP,282,1710,120,2,\"[1710, 1711]\"\r\nNCP-8.zip,2,NCP,252,1650,139,2,\"[1650, 1651]\"\r\nNCP-3.zip,2,NCP,133,1411,41,2,\"[1410, 1411]\"\r\nCP-21.zip,1,CP,588,2950,116,1,[2950]\r\nNormal-15.zip,0,Normal,2094,549,78,1,[549]\r\nNCP-20.zip,2,NCP,562,2282,113,2,\"[2282, 2283]\"\r\nNormal-5.zip,0,Normal,806,241,104,1,[241]\r\nCP-3.zip,1,CP,1145,3363,169,1,[3363]\r\nNCP-28.zip,2,NCP,847,2365,53,1,[2365]\r\nNCP-4.zip,2,NCP,143,1431,54,2,\"[1430, 1431]\"\r\nNCP-15.zip,2,NCP,407,1963,124,2,\"[1963, 1964]\"\r\nNormal-6.zip,0,Normal,1817,272,85,1,[272]\r\nCP-32.zip,1,CP,1089,3224,90,1,[3224]\r\nNCP-22.zip,2,NCP,834,2347,194,2,\"[2347, 2348]\"\r\nCP-9.zip,1,CP,1381,3816,66,3,\"[3815, 3816, 3817]\"\r\nNormal-8.zip,0,Normal,1866,321,75,1,[321]\r\nNCP-22.zip,2,NCP,86,1306,50,2,\"[1305, 1306]\"\r\nCP-26.zip,1,CP,3725,5660,251,2,\"[5660, 5661]\"\r\nNCP-18.zip,2,NCP,497,2150,126,2,\"[2150, 2151]\"\r\nNCP-27.zip,2,NCP,1043,2615,45,1,[2615]\r\nCP-4.zip,1,CP,1167,3385,149,1,[3385]\r\nNormal-4.zip,0,Normal,782,217,340,1,[217]\r\nNCP-15.zip,2,NCP,421,1995,161,2,\"[1995, 1996]\"\r\nNormal-9.zip,0,Normal,1897,352,88,1,[352]\r\nNCP-13.zip,2,NCP,365,1882,50,2,\"[1881, 1882]\"\r\nCP-1.zip,1,CP,1067,3106,62,1,[3106]\r\nCP-22.zip,1,CP,642,3004,128,1,[3004]\r\nCP-20.zip,1,CP,2668,3258,52,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-10.zip,1,CP,1406,3875,60,2,\"[3874, 3875]\"\r\nCP-1.zip,1,CP,10,3158,285,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-21.zip,2,NCP,60,1254,59,2,\"[1253, 1254]\"\r\nNormal-26.zip,0,Normal,3884,5396,62,2,\"[5396, 5397]\"\r\nNCP-25.zip,2,NCP,3710,5537,66,1,[5537]\r\nCP-9.zip,1,CP,1371,3795,60,3,\"[3794, 3795, 3796]\"\r\nCP-20.zip,1,CP,2450,2928,92,2,\"[2928, 2929]\"\r\nNCP-4.zip,2,NCP,166,1476,139,2,\"[1476, 1477]\"\r\nNCP-20.zip,2,NCP,554,2266,54,2,\"[2265, 2266]\"\r\nNCP-18.zip,2,NCP,491,2139,62,2,\"[2138, 2139]\"\r\nCP-2.zip,1,CP,1098,3316,171,1,[3316]\r\nCP-12.zip,1,CP,1465,4010,67,2,\"[4009, 4010]\"\r\nNCP-20.zip,2,NCP,548,2254,61,2,\"[2253, 2254]\"\r\nNormal-16.zip,0,Normal,2150,605,88,1,[605]\r\nNCP-16.zip,2,NCP,451,2056,51,3,\"[2056, 2057, 2058]\"\r\nNormal-11.zip,0,Normal,1965,420,88,1,[420]\r\nNCP-1.zip,2,NCP,101,1339,136,2,\"[1339, 1340]\"\r\nNormal-12.zip,0,Normal,2008,463,92,1,[463]\r\nCP-10.zip,1,CP,1402,3867,55,3,\"[3865, 3866, 3867]\"\r\nNCP-2.zip,2,NCP,122,1386,62,2,\"[1385, 1386]\"\r\nCP-20.zip,1,CP,2457,2941,108,1,[2941]\r\nNCP-14.zip,2,NCP,38,1208,137,2,\"[1208, 1209]\"\r\nNormal-10.zip,0,Normal,1933,388,103,1,[388]\r\nCP-1.zip,1,CP,10,3152,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-20.zip,2,NCP,562,2283,48,2,\"[2282, 2283]\"\r\nNCP-12.zip,2,NCP,335,1819,55,2,\"[1818, 1819]\"\r\nNCP-21.zip,2,NCP,579,2316,63,2,\"[2315, 2316]\"\r\nNormal-7.zip,0,Normal,1856,311,80,1,[311]\r\nNCP-18.zip,2,NCP,506,2169,51,2,\"[2168, 2169]\"\r\nCP-8.zip,1,CP,1339,3719,59,2,\"[3718, 3719]\"\r\nCP-18.zip,1,CP,1652,4340,25,1,[4340]\r\nNCP-11.zip,2,NCP,296,1737,139,2,\"[1737, 1738]\"\r\nNormal-8.zip,0,Normal,1886,341,84,1,[341]\r\nNCP-8.zip,2,NCP,250,1647,60,2,\"[1646, 1647]\"\r\nCP-14.zip,1,CP,1537,4184,53,3,\"[4182, 4183, 4184]\"\r\nNCP-17.zip,2,NCP,486,2128,64,2,\"[2127, 2128]\"\r\nCP-8.zip,1,CP,1335,3711,62,3,\"[3709, 3710, 3711]\"\r\nCP-27.zip,1,CP,3739,5683,19,1,[5683]\r\nNCP-25.zip,2,NCP,3950,5464,41,1,[5464]\r\nCP-12.zip,1,CP,1474,4029,62,2,\"[4029, 4030]\"\r\nNormal-10.zip,0,Normal,1946,401,93,1,[401]\r\nNCP-30.zip,2,NCP,947,2490,41,1,[2490]\r\nNCP-14.zip,2,NCP,371,1893,141,2,\"[1893, 1894]\"\r\nNCP-8.zip,2,NCP,2676,2694,54,1,[2694]\r\nNCP-1.zip,2,NCP,1011,2574,117,2,\"[2574, 2575]\"\r\nNormal-9.zip,0,Normal,1906,361,93,1,[361]\r\nNCP-4.zip,2,NCP,147,1439,72,2,\"[1438, 1439]\"\r\nCP-12.zip,1,CP,1485,4058,49,3,\"[4056, 4057, 4058]\"\r\nNormal-7.zip,0,Normal,1838,293,86,1,[293]\r\nCP-25.zip,1,CP,9,3150,72,4,\"[3148, 3149, 3150, 3151]\"\r\nNCP-12.zip,2,NCP,330,1809,64,2,\"[1808, 1809]\"\r\nNCP-8.zip,2,NCP,267,1681,54,2,\"[1680, 1681]\"\r\nNCP-20.zip,2,NCP,553,2263,137,2,\"[2263, 2264]\"\r\nNCP-29.zip,2,NCP,893,2433,24,2,\"[2432, 2433]\"\r\nNCP-21.zip,2,NCP,582,2321,128,2,\"[2321, 2322]\"\r\nNormal-24.zip,0,Normal,2642,152,38,1,[152]\r\nCP-25.zip,1,CP,726,3088,183,1,[3088]\r\nNCP-5.zip,2,NCP,171,1487,60,2,\"[1486, 1487]\"\r\nCP-22.zip,1,CP,632,2994,132,1,[2994]\r\nNormal-7.zip,0,Normal,1850,305,99,1,[305]\r\nNCP-30.zip,2,NCP,945,2488,45,1,[2488]\r\nNormal-19.zip,0,Normal,2244,699,98,1,[699]\r\nCP-1.zip,1,CP,1073,3116,52,1,[3116]\r\nNormal-21.zip,0,Normal,2310,765,91,1,[765]\r\nCP-1.zip,1,CP,10,3153,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-1.zip,1,CP,1075,3119,70,2,\"[3118, 3119]\"\r\nCP-12.zip,1,CP,1470,4020,54,2,\"[4020, 4021]\"\r\nNCP-26.zip,2,NCP,3997,5519,56,1,[5519]\r\nNCP-10.zip,2,NCP,274,1694,160,2,\"[1694, 1695]\"\r\nNormal-15.zip,0,Normal,2089,544,98,1,[544]\r\nCP-24.zip,1,CP,681,3043,102,1,[3043]\r\nNCP-20.zip,2,NCP,573,2305,63,2,\"[2304, 2305]\"\r\nCP-15.zip,1,CP,1557,4233,43,2,\"[4232, 4233]\"\r\nNCP-30.zip,2,NCP,990,2543,59,1,[2543]\r\nCP-7.zip,1,CP,1305,3640,20,2,\"[3640, 3641]\"\r\nNCP-5.zip,2,NCP,183,1510,123,2,\"[1510, 1511]\"\r\nCP-15.zip,1,CP,1582,4270,20,1,[4270]\r\nCP-29.zip,1,CP,3817,5761,25,1,[5761]\r\nNCP-20.zip,2,NCP,56,1245,164,2,\"[1245, 1246]\"\r\nNCP-21.zip,2,NCP,58,1250,55,2,\"[1249, 1250]\"\r\nCP-8.zip,1,CP,1335,3710,62,3,\"[3709, 3710, 3711]\"\r\nNormal-3.zip,0,Normal,1766,1149,60,3,\"[1149, 1150, 1151]\"\r\nNCP-10.zip,2,NCP,2716,2709,49,1,[2709]\r\nCP-10.zip,1,CP,1402,3865,131,3,\"[3865, 3866, 3867]\"\r\nCP-10.zip,1,CP,1391,3841,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNormal-22.zip,0,Normal,2594,104,42,1,[104]\r\nCP-26.zip,1,CP,3733,5675,174,3,\"[5673, 5674, 5675]\"\r\nNormal-25.zip,0,Normal,3715,5345,30,1,[5345]\r\nNormal-3.zip,0,Normal,762,197,363,1,[197]\r\nNCP-15.zip,2,NCP,420,1994,71,2,\"[1993, 1994]\"\r\nNormal-12.zip,0,Normal,1996,451,90,1,[451]\r\nNCP-22.zip,2,NCP,885,2423,195,2,\"[2422, 2423]\"\r\nNCP-29.zip,2,NCP,921,2463,36,1,[2463]\r\nNormal-25.zip,0,Normal,3848,5360,192,1,[5360]\r\nCP-28.zip,1,CP,3776,5720,30,1,[5720]\r\nNCP-15.zip,2,NCP,402,1953,148,2,\"[1953, 1954]\"\r\nNormal-19.zip,0,Normal,2232,687,99,1,[687]\r\nCP-11.zip,1,CP,1447,3968,63,2,\"[3967, 3968]\"\r\nNormal-17.zip,0,Normal,2176,631,91,1,[631]\r\nNCP-12.zip,2,NCP,315,1778,46,2,\"[1777, 1778]\"\r\nCP-2.zip,1,CP,1102,3320,182,1,[3320]\r\nNCP-14.zip,2,NCP,373,1897,122,2,\"[1897, 1898]\"\r\nCP-4.zip,1,CP,1175,3393,189,1,[3393]\r\nNCP-14.zip,2,NCP,392,1934,143,2,\"[1934, 1935]\"\r\nCP-8.zip,1,CP,1321,3679,58,2,\"[3678, 3679]\"\r\nNCP-16.zip,2,NCP,430,2013,152,2,\"[2013, 2014]\"\r\nNCP-26.zip,2,NCP,3988,5512,53,1,[5512]\r\nNormal-22.zip,0,Normal,2316,771,92,1,[771]\r\nCP-14.zip,1,CP,1531,4170,59,2,\"[4169, 4170]\"\r\nNormal-3.zip,0,Normal,748,183,261,1,[183]\r\nNCP-23.zip,2,NCP,943,2486,334,1,[2486]\r\nNormal-18.zip,0,Normal,2202,657,82,1,[657]\r\nCP-27.zip,1,CP,3735,5679,26,1,[5679]\r\nNCP-15.zip,2,NCP,409,1967,153,2,\"[1967, 1968]\"\r\nCP-4.zip,1,CP,1171,3389,180,1,[3389]\r\nCP-11.zip,1,CP,1452,3977,56,2,\"[3977, 3978]\"\r\nNormal-1.zip,0,Normal,1684,875,71,5,\"[870, 871, 873, 874, 875]\"\r\nCP-8.zip,1,CP,1333,3705,52,2,\"[3705, 3706]\"\r\nNCP-3.zip,2,NCP,135,1414,138,2,\"[1414, 1415]\"\r\nNCP-25.zip,2,NCP,3965,5506,53,1,[5506]\r\nNCP-8.zip,2,NCP,258,1662,135,2,\"[1662, 1663]\"\r\nNormal-10.zip,0,Normal,1926,381,87,1,[381]\r\nCP-16.zip,1,CP,1596,4284,22,1,[4284]\r\nCP-14.zip,1,CP,1554,4226,41,2,\"[4226, 4227]\"\r\nCP-26.zip,1,CP,3645,5605,38,1,[5605]\r\nCP-2.zip,1,CP,1110,3328,143,1,[3328]\r\nNCP-22.zip,2,NCP,81,1296,53,2,\"[1295, 1296]\"\r\nNormal-1.zip,0,Normal,1685,877,65,4,\"[877, 878, 879, 880]\"\r\nNCP-29.zip,2,NCP,923,2465,19,1,[2465]\r\nNCP-14.zip,2,NCP,399,1948,149,2,\"[1948, 1949]\"\r\nNCP-18.zip,2,NCP,510,2176,102,2,\"[2176, 2177]\"\r\nNCP-20.zip,2,NCP,558,2274,51,2,\"[2273, 2274]\"\r\nNormal-2.zip,0,Normal,1762,1131,70,2,\"[1130, 1131]\"\r\nCP-19.zip,1,CP,2434,2898,102,3,\"[2898, 2899, 2900]\"\r\nNormal-19.zip,0,Normal,2219,674,106,1,[674]\r\nNormal-8.zip,0,Normal,1869,324,94,1,[324]\r\nNCP-21.zip,2,NCP,70,1272,120,2,\"[1272, 1273]\"\r\nNCP-10.zip,2,NCP,2710,2703,48,1,[2703]\r\nNormal-9.zip,0,Normal,1904,359,94,1,[359]\r\nNCP-20.zip,2,NCP,564,2287,60,2,\"[2286, 2287]\"\r\nNCP-15.zip,2,NCP,424,2001,161,2,\"[2001, 2002]\"\r\nCP-14.zip,1,CP,1529,4166,42,3,\"[4165, 4166, 4167]\"\r\nNormal-16.zip,0,Normal,2138,593,72,1,[593]\r\nCP-16.zip,1,CP,1613,4301,27,1,[4301]\r\nCP-24.zip,1,CP,697,3059,114,1,[3059]\r\nCP-10.zip,1,CP,1390,3836,215,3,\"[3836, 3837, 3838]\"\r\nNormal-6.zip,0,Normal,1805,260,79,1,[260]\r\nCP-10.zip,1,CP,1390,3837,56,3,\"[3836, 3837, 3838]\"\r\nCP-3.zip,1,CP,1150,3368,214,1,[3368]\r\nCP-2.zip,1,CP,1116,3334,183,1,[3334]\r\nNormal-14.zip,0,Normal,2057,512,78,1,[512]\r\nNCP-19.zip,2,NCP,532,2223,58,2,\"[2222, 2223]\"\r\nCP-29.zip,1,CP,3810,5754,24,1,[5754]\r\nCP-14.zip,1,CP,1539,4188,131,3,\"[4188, 4189, 4190]\"\r\nCP-10.zip,1,CP,1385,3826,64,2,\"[3825, 3826]\"\r\nNCP-29.zip,2,NCP,929,2471,21,1,[2471]\r\nNCP-28.zip,2,NCP,856,2377,229,2,\"[2376, 2377]\"\r\nNCP-15.zip,2,NCP,408,1966,55,2,\"[1965, 1966]\"\r\nCP-7.zip,1,CP,1319,3675,61,2,\"[3674, 3675]\"\r\nNCP-1.zip,2,NCP,1022,2591,48,1,[2591]\r\nNormal-20.zip,0,Normal,2254,709,75,1,[709]\r\nNCP-22.zip,2,NCP,862,2385,33,1,[2385]\r\nCP-29.zip,1,CP,3812,5756,27,1,[5756]\r\nCP-11.zip,1,CP,1447,3967,63,2,\"[3967, 3968]\"\r\nCP-15.zip,1,CP,1556,4230,40,2,\"[4230, 4231]\"\r\nCP-1.zip,1,CP,1080,3125,64,1,[3125]\r\nNormal-4.zip,0,Normal,778,213,114,1,[213]\r\nCP-14.zip,1,CP,1529,4167,42,3,\"[4165, 4166, 4167]\"\r\nCP-2.zip,1,CP,11,3167,283,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-20.zip,2,NCP,549,2256,36,2,\"[2255, 2256]\"\r\nNCP-3.zip,2,NCP,1292,2733,66,1,[2733]\r\nNormal-13.zip,0,Normal,2047,502,93,1,[502]\r\nNCP-20.zip,2,NCP,549,2255,83,2,\"[2255, 2256]\"\r\nCP-15.zip,1,CP,1563,4246,122,3,\"[4245, 4246, 4247]\"\r\nNCP-25.zip,2,NCP,3956,5469,49,1,[5469]\r\nNCP-22.zip,2,NCP,833,2346,484,1,[2346]\r\nCP-12.zip,1,CP,1487,4061,163,3,\"[4061, 4062, 4063]\"\r\nCP-7.zip,1,CP,1306,3642,52,3,\"[3642, 3643, 3644]\"\r\nNCP-17.zip,2,NCP,47,1228,58,2,\"[1227, 1228]\"\r\nCP-8.zip,1,CP,1338,3716,67,2,\"[3716, 3717]\"\r\nNormal-25.zip,0,Normal,3711,5341,27,1,[5341]\r\nNCP-16.zip,2,NCP,452,2059,63,1,[2059]\r\nNormal-23.zip,0,Normal,2604,114,36,1,[114]\r\nNCP-28.zip,2,NCP,849,2368,224,1,[2368]\r\nNCP-29.zip,2,NCP,886,2424,52,1,[2424]\r\nNCP-28.zip,2,NCP,875,2408,218,1,[2408]\r\nNCP-20.zip,2,NCP,573,2304,151,2,\"[2304, 2305]\"\r\nNCP-22.zip,2,NCP,83,1300,70,2,\"[1299, 1300]\"\r\nNormal-14.zip,0,Normal,2056,511,84,1,[511]\r\nNormal-7.zip,0,Normal,1844,299,93,1,[299]\r\nCP-13.zip,1,CP,1494,4083,154,3,\"[4083, 4084, 4085]\"\r\nCP-5.zip,1,CP,1201,3419,171,1,[3419]\r\nNCP-23.zip,2,NCP,897,2438,40,1,[2438]\r\nNormal-27.zip,0,Normal,3914,5456,55,2,\"[5456, 5457]\"\r\nCP-9.zip,1,CP,1354,3751,181,3,\"[3751, 3752, 3753]\"\r\nNCP-29.zip,2,NCP,899,2440,34,2,\"[2440, 2441]\"\r\nCP-10.zip,1,CP,1414,3891,151,3,\"[3891, 3892, 3893]\"\r\nCP-14.zip,1,CP,1543,4202,57,3,\"[4200, 4201, 4202]\"\r\nNormal-25.zip,0,Normal,3837,5349,208,1,[5349]\r\nNCP-10.zip,2,NCP,272,1691,64,2,\"[1690, 1691]\"\r\nNormal-9.zip,0,Normal,1905,360,93,1,[360]\r\nCP-8.zip,1,CP,1340,3721,64,2,\"[3720, 3721]\"\r\nNCP-5.zip,2,NCP,19,1170,146,2,\"[1170, 1171]\"\r\nNormal-2.zip,0,Normal,1738,1041,75,1,[1041]\r\nNCP-2.zip,2,NCP,108,1354,58,2,\"[1353, 1354]\"\r\nNormal-25.zip,0,Normal,3844,5356,201,1,[5356]\r\nCP-20.zip,1,CP,2459,2945,108,1,[2945]\r\nCP-10.zip,1,CP,1414,3892,63,3,\"[3891, 3892, 3893]\"\r\nNormal-18.zip,0,Normal,2201,656,66,1,[656]\r\nNCP-21.zip,2,NCP,78,1289,166,2,\"[1289, 1290]\"\r\nCP-18.zip,1,CP,1776,3539,76,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-1.zip,2,NCP,1010,2572,126,2,\"[2572, 2573]\"\r\nCP-11.zip,1,CP,1441,3951,203,3,\"[3951, 3952, 3953]\"\r\nCP-13.zip,1,CP,1512,4125,50,2,\"[4125, 4126]\"\r\nCP-30.zip,1,CP,3934,5640,53,3,\"[5638, 5639, 5640]\"\r\nNCP-4.zip,2,NCP,143,1430,128,2,\"[1430, 1431]\"\r\nNormal-17.zip,0,Normal,2166,621,93,1,[621]\r\nNCP-22.zip,2,NCP,83,1299,167,2,\"[1299, 1300]\"\r\nCP-29.zip,1,CP,3804,5748,29,1,[5748]\r\nCP-22.zip,1,CP,624,2986,90,1,[2986]\r\nNCP-7.zip,2,NCP,231,1607,58,2,\"[1606, 1607]\"\r\nNCP-8.zip,2,NCP,258,1663,57,2,\"[1662, 1663]\"\r\nNormal-10.zip,0,Normal,1956,411,89,1,[411]\r\nNCP-4.zip,2,NCP,165,1475,55,2,\"[1474, 1475]\"\r\nNormal-2.zip,0,Normal,1753,1091,60,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nCP-6.zip,1,CP,1247,3465,218,1,[3465]\r\nCP-17.zip,1,CP,1644,4332,23,1,[4332]\r\nNCP-5.zip,2,NCP,188,1520,134,2,\"[1520, 1521]\"\r\nCP-13.zip,1,CP,1509,4118,233,3,\"[4118, 4119, 4120]\"\r\nCP-19.zip,1,CP,2434,2899,102,3,\"[2898, 2899, 2900]\"\r\nNormal-27.zip,0,Normal,3914,5457,55,2,\"[5456, 5457]\"\r\nNCP-3.zip,2,NCP,133,1410,100,2,\"[1410, 1411]\"\r\nCP-24.zip,1,CP,690,3052,134,1,[3052]\r\nNCP-6.zip,2,NCP,208,1560,134,2,\"[1560, 1561]\"\r\nNormal-26.zip,0,Normal,3872,5384,29,1,[5384]\r\nCP-7.zip,1,CP,1258,3476,202,1,[3476]\r\nNCP-4.zip,2,NCP,154,1453,47,2,\"[1452, 1453]\"\r\nCP-8.zip,1,CP,1335,3709,207,3,\"[3709, 3710, 3711]\"\r\nCP-7.zip,1,CP,1305,3641,50,2,\"[3640, 3641]\"\r\nCP-25.zip,1,CP,716,3078,640,1,[3078]\r\nNormal-2.zip,0,Normal,1761,1125,45,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-14.zip,2,NCP,38,1209,57,2,\"[1208, 1209]\"\r\nNormal-1.zip,0,Normal,1685,878,65,4,\"[877, 878, 879, 880]\"\r\nNCP-17.zip,2,NCP,467,2090,58,2,\"[2089, 2090]\"\r\nCP-14.zip,1,CP,1539,4189,54,3,\"[4188, 4189, 4190]\"\r\nNCP-16.zip,2,NCP,454,2063,58,2,\"[2062, 2063]\"\r\nCP-13.zip,1,CP,1491,4076,48,3,\"[4074, 4075, 4076]\"\r\nNormal-4.zip,0,Normal,794,229,341,1,[229]\r\nNCP-19.zip,2,NCP,521,2199,58,2,\"[2198, 2199]\"\r\nCP-7.zip,1,CP,1311,3656,67,3,\"[3655, 3656, 3657]\"\r\nNormal-22.zip,0,Normal,2584,94,44,1,[94]\r\nCP-23.zip,1,CP,678,3040,46,1,[3040]\r\nCP-14.zip,1,CP,1539,4190,54,3,\"[4188, 4189, 4190]\"\r\nCP-30.zip,1,CP,3937,5644,55,2,\"[5643, 5644]\"\r\nNCP-15.zip,2,NCP,427,2007,132,2,\"[2007, 2008]\"\r\nNCP-28.zip,2,NCP,843,2358,279,1,[2358]\r\nNCP-14.zip,2,NCP,375,1903,49,3,\"[1901, 1902, 1903]\"\r\nNCP-11.zip,2,NCP,306,1759,153,2,\"[1759, 1760]\"\r\nNCP-16.zip,2,NCP,44,1221,124,2,\"[1221, 1222]\"\r\nNCP-8.zip,2,NCP,256,1659,58,2,\"[1658, 1659]\"\r\nCP-8.zip,1,CP,1338,3717,67,2,\"[3716, 3717]\"\r\nCP-18.zip,1,CP,1780,3553,67,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-7.zip,1,CP,1267,3485,151,1,[3485]\r\nCP-13.zip,1,CP,1509,4119,118,3,\"[4118, 4119, 4120]\"\r\nNormal-3.zip,0,Normal,1766,1151,62,3,\"[1149, 1150, 1151]\"\r\nCP-10.zip,1,CP,1405,3873,60,2,\"[3872, 3873]\"\r\nCP-1.zip,1,CP,1079,3124,63,1,[3124]\r\nCP-18.zip,1,CP,1780,3559,69,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNormal-7.zip,0,Normal,1852,307,94,1,[307]\r\nCP-5.zip,1,CP,1195,3413,247,1,[3413]\r\nNCP-20.zip,2,NCP,556,2270,53,2,\"[2269, 2270]\"\r\nNCP-2.zip,2,NCP,108,1353,139,2,\"[1353, 1354]\"\r\nNCP-16.zip,2,NCP,445,2045,58,2,\"[2044, 2045]\"\r\nCP-13.zip,1,CP,1512,4126,50,2,\"[4125, 4126]\"\r\nNCP-21.zip,2,NCP,64,1262,55,2,\"[1261, 1262]\"\r\nCP-5.zip,1,CP,1211,3429,143,1,[3429]\r\nNCP-1.zip,2,NCP,1042,2614,143,2,\"[2613, 2614]\"\r\nNCP-21.zip,2,NCP,73,1280,55,3,\"[1278, 1279, 1280]\"\r\nCP-9.zip,1,CP,1364,3776,133,3,\"[3776, 3777, 3778]\"\r\nNCP-21.zip,2,NCP,58,1249,131,2,\"[1249, 1250]\"\r\nCP-20.zip,1,CP,2668,3250,44,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-21.zip,2,NCP,73,1279,57,3,\"[1278, 1279, 1280]\"\r\nCP-26.zip,1,CP,3733,5674,159,3,\"[5673, 5674, 5675]\"\r\nNormal-19.zip,0,Normal,2247,702,86,1,[702]\r\nNCP-28.zip,2,NCP,867,2394,161,1,[2394]\r\nCP-22.zip,1,CP,633,2995,114,1,[2995]\r\nCP-9.zip,1,CP,1371,3796,60,3,\"[3794, 3795, 3796]\"\r\nNCP-22.zip,2,NCP,86,1305,117,2,\"[1305, 1306]\"\r\nNCP-14.zip,2,NCP,40,1213,63,2,\"[1212, 1213]\"\r\nNormal-26.zip,0,Normal,3892,5415,72,1,[5415]\r\nCP-7.zip,1,CP,1306,3644,237,3,\"[3642, 3643, 3644]\"\r\nCP-24.zip,1,CP,702,3064,78,1,[3064]\r\nNCP-26.zip,2,NCP,3975,5483,44,1,[5483]\r\nCP-4.zip,1,CP,1164,3382,193,1,[3382]\r\nNormal-11.zip,0,Normal,1960,415,98,1,[415]\r\nCP-5.zip,1,CP,1203,3421,231,1,[3421]\r\nCP-19.zip,1,CP,2434,2900,104,3,\"[2898, 2899, 2900]\"\r\nNCP-29.zip,2,NCP,890,2429,203,1,[2429]\r\nNCP-16.zip,2,NCP,448,2050,139,2,\"[2050, 2051]\"\r\nCP-18.zip,1,CP,1780,3555,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-12.zip,1,CP,1457,3991,69,1,[3991]\r\nNormal-3.zip,0,Normal,756,191,106,1,[191]\r\nNCP-29.zip,2,NCP,900,2442,506,1,[2442]\r\nNCP-17.zip,2,NCP,476,2107,127,2,\"[2107, 2108]\"\r\nCP-28.zip,1,CP,3794,5738,26,1,[5738]\r\nCP-23.zip,1,CP,669,3031,70,1,[3031]\r\nNormal-9.zip,0,Normal,1911,366,96,1,[366]\r\nNormal-9.zip,0,Normal,1919,374,99,1,[374]\r\nNCP-12.zip,2,NCP,335,1818,129,2,\"[1818, 1819]\"\r\nCP-18.zip,1,CP,1651,4339,31,1,[4339]\r\nNormal-4.zip,0,Normal,798,233,122,1,[233]\r\nNCP-18.zip,2,NCP,508,2173,61,2,\"[2172, 2173]\"\r\nNCP-21.zip,2,NCP,67,1266,168,2,\"[1266, 1267]\"\r\nNCP-6.zip,2,NCP,214,1573,60,2,\"[1572, 1573]\"\r\nCP-10.zip,1,CP,1405,3872,60,2,\"[3872, 3873]\"\r\nNCP-6.zip,2,NCP,208,1561,56,2,\"[1560, 1561]\"\r\nNCP-14.zip,2,NCP,373,1898,52,2,\"[1897, 1898]\"\r\nNCP-3.zip,2,NCP,1281,2722,65,1,[2722]\r\nCP-24.zip,1,CP,707,3069,72,1,[3069]\r\nNCP-28.zip,2,NCP,831,2344,278,1,[2344]\r\nNormal-17.zip,0,Normal,2179,634,101,1,[634]\r\nNCP-21.zip,2,NCP,60,1253,141,2,\"[1253, 1254]\"\r\nNCP-8.zip,2,NCP,259,1665,65,2,\"[1664, 1665]\"\r\nNCP-11.zip,2,NCP,311,1770,55,2,\"[1769, 1770]\"\r\nNCP-27.zip,2,NCP,1050,2623,46,2,\"[2623, 2624]\"\r\nNCP-18.zip,2,NCP,490,2137,62,2,\"[2136, 2137]\"\r\nNormal-27.zip,0,Normal,3900,5431,64,2,\"[5431, 5432]\"\r\nNormal-15.zip,0,Normal,2110,565,83,1,[565]\r\nNCP-13.zip,2,NCP,368,1887,129,2,\"[1887, 1888]\"\r\nNCP-27.zip,2,NCP,817,2326,120,1,[2326]\r\nCP-15.zip,1,CP,1567,4255,59,2,\"[4254, 4255]\"\r\nNCP-5.zip,2,NCP,178,1500,124,2,\"[1500, 1501]\"\r\nNCP-13.zip,2,NCP,345,1841,147,2,\"[1841, 1842]\"\r\nNormal-2.zip,0,Normal,1761,1128,60,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nCP-8.zip,1,CP,1343,3727,56,2,\"[3726, 3727]\"\r\nNCP-30.zip,2,NCP,936,2478,21,1,[2478]\r\nNCP-11.zip,2,NCP,306,1760,64,2,\"[1759, 1760]\"\r\nNCP-17.zip,2,NCP,487,2129,167,2,\"[2129, 2130]\"\r\nCP-30.zip,1,CP,3930,5629,62,2,\"[5628, 5629]\"\r\nNCP-9.zip,2,NCP,2692,2700,48,1,[2700]\r\nNCP-20.zip,2,NCP,556,2269,125,2,\"[2269, 2270]\"\r\nCP-18.zip,1,CP,1775,3531,58,4,\"[3530, 3531, 3532, 3533]\"\r\nNCP-23.zip,2,NCP,896,2437,39,1,[2437]\r\nCP-21.zip,1,CP,5,3509,275,1,[3509]\r\nNormal-19.zip,0,Normal,2217,672,71,1,[672]\r\nNCP-1.zip,2,NCP,1010,2573,126,2,\"[2572, 2573]\"\r\nNCP-1.zip,2,NCP,100,1337,139,2,\"[1337, 1338]\"\r\nNCP-26.zip,2,NCP,3998,5495,41,1,[5495]\r\nCP-25.zip,1,CP,711,3073,112,1,[3073]\r\nCP-24.zip,1,CP,699,3061,64,1,[3061]\r\nCP-4.zip,1,CP,1173,3391,201,1,[3391]\r\nCP-27.zip,1,CP,3740,5684,23,1,[5684]\r\nCP-16.zip,1,CP,1590,4278,20,1,[4278]\r\nNormal-2.zip,0,Normal,1762,1130,70,2,\"[1130, 1131]\"\r\nNormal-1.zip,0,Normal,1679,833,66,6,\"[833, 834, 835, 836, 837, 838]\"\r\nNCP-29.zip,2,NCP,928,2470,25,1,[2470]\r\nCP-18.zip,1,CP,1775,3533,57,4,\"[3530, 3531, 3532, 3533]\"\r\nNormal-3.zip,0,Normal,766,201,94,1,[201]\r\nNormal-11.zip,0,Normal,1964,419,100,1,[419]\r\nNCP-9.zip,2,NCP,2690,2657,48,1,[2657]\r\nNCP-21.zip,2,NCP,78,1290,69,2,\"[1289, 1290]\"\r\nNormal-16.zip,0,Normal,2147,602,95,1,[602]\r\nNCP-19.zip,2,NCP,544,2246,62,2,\"[2245, 2246]\"\r\nNormal-27.zip,0,Normal,3900,5432,64,2,\"[5431, 5432]\"\r\nNormal-8.zip,0,Normal,1860,315,92,1,[315]\r\nCP-21.zip,1,CP,601,2963,104,1,[2963]\r\nCP-2.zip,1,CP,11,3164,287,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nCP-15.zip,1,CP,1563,4245,241,3,\"[4245, 4246, 4247]\"\r\nCP-19.zip,1,CP,1789,3205,59,4,\"[3204, 3205, 3206, 3207]\"\r\nCP-4.zip,1,CP,1176,3394,161,1,[3394]\r\nCP-10.zip,1,CP,1397,3855,60,2,\"[3854, 3855]\"\r\nCP-16.zip,1,CP,1594,4282,26,1,[4282]\r\nCP-1.zip,1,CP,1077,3121,74,2,\"[3121, 3122]\"\r\nCP-29.zip,1,CP,3819,5763,31,1,[5763]\r\nCP-12.zip,1,CP,1468,4016,54,3,\"[4015, 4016, 4017]\"\r\nCP-3.zip,1,CP,1139,3357,332,1,[3357]\r\nNormal-14.zip,0,Normal,2070,525,104,1,[525]\r\nNormal-1.zip,0,Normal,1672,798,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-11.zip,1,CP,1435,3939,46,2,\"[3938, 3939]\"\r\nCP-30.zip,1,CP,4019,5568,38,1,[5568]\r\nCP-18.zip,1,CP,1777,3540,67,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nCP-23.zip,1,CP,666,3028,192,1,[3028]\r\nNormal-1.zip,0,Normal,1703,959,70,2,\"[959, 960]\"\r\nCP-3.zip,1,CP,1133,3351,213,1,[3351]\r\nCP-13.zip,1,CP,1504,4107,64,1,[4107]\r\nNormal-3.zip,0,Normal,745,180,105,1,[180]\r\nNormal-26.zip,0,Normal,3869,5381,27,1,[5381]\r\nCP-18.zip,1,CP,1774,3528,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-21.zip,0,Normal,2301,756,88,1,[756]\r\nCP-18.zip,1,CP,1771,3519,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-22.zip,1,CP,643,3005,126,1,[3005]\r\nCP-26.zip,1,CP,3723,5658,43,1,[5658]\r\nNormal-8.zip,0,Normal,1884,339,82,1,[339]\r\nCP-15.zip,1,CP,1586,4274,23,1,[4274]\r\nCP-8.zip,1,CP,1349,3743,58,3,\"[3742, 3743, 3744]\"\r\nNormal-22.zip,0,Normal,2586,96,30,1,[96]\r\nNormal-4.zip,0,Normal,785,220,292,1,[220]\r\nCP-19.zip,1,CP,2428,2887,124,1,[2887]\r\nNCP-13.zip,2,NCP,352,1856,58,2,\"[1855, 1856]\"\r\nNCP-2.zip,2,NCP,109,1355,143,2,\"[1355, 1356]\"\r\nCP-13.zip,1,CP,1493,4080,125,3,\"[4080, 4081, 4082]\"\r\nCP-4.zip,1,CP,1191,3409,220,1,[3409]\r\nCP-17.zip,1,CP,1642,4330,25,1,[4330]\r\nCP-7.zip,1,CP,1304,3635,232,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-27.zip,2,NCP,1058,2635,46,1,[2635]\r\nNormal-14.zip,0,Normal,2071,526,103,1,[526]\r\nCP-26.zip,1,CP,3719,5650,55,3,\"[5649, 5650, 5651]\"\r\nNormal-24.zip,0,Normal,2663,173,48,1,[173]\r\nNCP-3.zip,2,NCP,1298,2739,60,1,[2739]\r\nCP-19.zip,1,CP,2430,2891,102,2,\"[2891, 2892]\"\r\nCP-12.zip,1,CP,1458,3993,69,3,\"[3992, 3993, 3994]\"\r\nNormal-1.zip,0,Normal,1677,823,64,4,\"[823, 824, 825, 826]\"\r\nCP-12.zip,1,CP,1469,4018,47,2,\"[4018, 4019]\"\r\nCP-7.zip,1,CP,1268,3486,336,1,[3486]\r\nNormal-18.zip,0,Normal,2203,658,75,1,[658]\r\nCP-21.zip,1,CP,593,2955,100,1,[2955]\r\nNormal-16.zip,0,Normal,2143,598,87,1,[598]\r\nNCP-20.zip,2,NCP,552,2261,146,2,\"[2261, 2262]\"\r\nNCP-11.zip,2,NCP,309,1766,69,2,\"[1766, 1765]\"\r\nNCP-19.zip,2,NCP,520,2197,55,2,\"[2196, 2197]\"\r\nCP-14.zip,1,CP,1550,4217,64,2,\"[4217, 4218]\"\r\nNCP-26.zip,2,NCP,3976,5484,32,1,[5484]\r\nNCP-31.zip,2,NCP,998,2555,44,1,[2555]\r\nNCP-2.zip,2,NCP,107,1351,146,2,\"[1351, 1352]\"\r\nNormal-16.zip,0,Normal,2136,591,83,1,[591]\r\nCP-12.zip,1,CP,1463,4006,49,2,\"[4005, 4006]\"\r\nNCP-4.zip,2,NCP,156,1457,58,2,\"[1456, 1457]\"\r\nNCP-1.zip,2,NCP,1002,2561,58,1,[2561]\r\nNormal-1.zip,0,Normal,1672,801,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNormal-14.zip,0,Normal,2078,533,73,1,[533]\r\nNCP-5.zip,2,NCP,185,1514,121,2,\"[1514, 1515]\"\r\nCP-14.zip,1,CP,1530,4168,60,1,[4168]\r\nNCP-15.zip,2,NCP,413,1976,128,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-5.zip,1,CP,1224,3442,204,1,[3442]\r\nCP-5.zip,1,CP,1215,3433,165,1,[3433]\r\nNormal-26.zip,0,Normal,3886,5399,76,1,[5399]\r\nNormal-24.zip,0,Normal,2640,150,41,1,[150]\r\nNCP-28.zip,2,NCP,836,2351,52,1,[2351]\r\nNCP-4.zip,2,NCP,146,1436,123,2,\"[1436, 1437]\"\r\nNormal-17.zip,0,Normal,2155,610,89,1,[610]\r\nCP-30.zip,1,CP,3939,5547,38,1,[5547]\r\nCP-19.zip,1,CP,1784,3590,112,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-10.zip,1,CP,1399,3859,45,2,\"[3858, 3859]\"\r\nNCP-19.zip,2,NCP,519,2194,126,2,\"[2194, 2195]\"\r\nNCP-11.zip,2,NCP,297,1739,144,2,\"[1739, 1741]\"\r\nNCP-22.zip,2,NCP,88,1309,170,2,\"[1309, 1310]\"\r\nCP-18.zip,1,CP,1778,3547,65,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-30.zip,2,NCP,968,2511,61,1,[2511]\r\nCP-9.zip,1,CP,1360,3769,67,3,\"[3767, 3768, 3769]\"\r\nCP-26.zip,1,CP,3638,5597,285,1,[5597]\r\nNCP-13.zip,2,NCP,353,1857,167,2,\"[1857, 1858]\"\r\nCP-30.zip,1,CP,3932,5634,71,2,\"[5634, 5635]\"\r\nNCP-21.zip,2,NCP,62,1257,144,2,\"[1257, 1258]\"\r\nCP-2.zip,1,CP,1127,3345,278,1,[3345]\r\nNCP-12.zip,2,NCP,337,1823,58,2,\"[1822, 1823]\"\r\nNCP-14.zip,2,NCP,390,1931,53,2,\"[1930, 1931]\"\r\nNCP-15.zip,2,NCP,417,1988,58,2,\"[1987, 1988]\"\r\nCP-24.zip,1,CP,689,3051,58,1,[3051]\r\nCP-9.zip,1,CP,1377,3808,58,2,\"[3808, 3809]\"\r\nCP-13.zip,1,CP,1505,4110,54,3,\"[4108, 4109, 4110]\"\r\nCP-13.zip,1,CP,1492,4078,58,3,\"[4077, 4078, 4079]\"\r\nNCP-4.zip,2,NCP,159,1463,61,2,\"[1462, 1463]\"\r\nNCP-6.zip,2,NCP,220,1585,67,2,\"[1584, 1585]\"\r\nNCP-29.zip,2,NCP,884,2421,23,1,[2421]\r\nNormal-3.zip,0,Normal,757,192,110,1,[192]\r\nCP-21.zip,1,CP,4,3505,298,4,\"[3505, 3506, 3507, 3508]\"\r\nCP-16.zip,1,CP,1608,4296,23,1,[4296]\r\nCP-4.zip,1,CP,1169,3387,171,1,[3387]\r\nNormal-4.zip,0,Normal,797,232,112,1,[232]\r\nNCP-19.zip,2,NCP,540,2238,54,2,\"[2237, 2238]\"\r\nNormal-14.zip,0,Normal,2068,523,81,1,[523]\r\nNormal-11.zip,0,Normal,1985,440,96,1,[440]\r\nCP-9.zip,1,CP,1353,3748,140,3,\"[3748, 3749, 3750]\"\r\nNCP-6.zip,2,NCP,224,1592,136,2,\"[1592, 1593]\"\r\nCP-10.zip,1,CP,1397,3854,60,2,\"[3854, 3855]\"\r\nNCP-12.zip,2,NCP,318,1784,63,2,\"[1783, 1784]\"\r\nNCP-21.zip,2,NCP,59,1251,122,2,\"[1251, 1252]\"\r\nNormal-17.zip,0,Normal,2184,639,86,1,[639]\r\nNCP-18.zip,2,NCP,493,2143,56,2,\"[2142, 2143]\"\r\nNCP-25.zip,2,NCP,3954,5467,42,1,[5467]\r\nNormal-2.zip,0,Normal,1763,1137,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nCP-23.zip,1,CP,675,3037,124,1,[3037]\r\nCP-9.zip,1,CP,1365,3780,60,3,\"[3779, 3780, 3781]\"\r\nCP-6.zip,1,CP,1256,3474,140,1,[3474]\r\nNCP-16.zip,2,NCP,441,2037,49,2,\"[2036, 2037]\"\r\nNCP-7.zip,2,NCP,2484,2643,46,1,[2643]\r\nCP-20.zip,1,CP,2771,3302,37,1,[3302]\r\nNCP-10.zip,2,NCP,2714,2707,53,1,[2707]\r\nNormal-4.zip,0,Normal,772,207,363,1,[207]\r\nNCP-16.zip,2,NCP,440,2035,53,2,\"[2034, 2035]\"\r\nCP-17.zip,1,CP,1646,4334,26,1,[4334]\r\nNCP-11.zip,2,NCP,284,1713,139,2,\"[1713, 1714]\"\r\nCP-23.zip,1,CP,656,3018,575,1,[3018]\r\nCP-2.zip,1,CP,1104,3322,164,1,[3322]\r\nNCP-22.zip,2,NCP,85,1303,139,2,\"[1303, 1304]\"\r\nCP-30.zip,1,CP,3933,5637,38,2,\"[5636, 5637]\"\r\nNormal-7.zip,0,Normal,1839,294,94,1,[294]\r\nNCP-6.zip,2,NCP,223,1590,132,2,\"[1590, 1591]\"\r\nCP-2.zip,1,CP,1119,3337,157,1,[3337]\r\nCP-11.zip,1,CP,1431,3931,61,2,\"[3930, 3931]\"\r\nCP-7.zip,1,CP,1304,3634,47,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-11.zip,2,NCP,299,1745,58,2,\"[1744, 1745]\"\r\nNCP-15.zip,2,NCP,405,1960,60,2,\"[1959, 1960]\"\r\nNCP-20.zip,2,NCP,574,2307,58,2,\"[2306, 2307]\"\r\nCP-10.zip,1,CP,1412,3887,66,2,\"[3887, 3888]\"\r\nNCP-4.zip,2,NCP,167,1479,60,2,\"[1478, 1479]\"\r\nNCP-4.zip,2,NCP,157,1459,49,2,\"[1458, 1459]\"\r\nNCP-13.zip,2,NCP,349,1849,135,2,\"[1849, 1850]\"\r\nCP-18.zip,1,CP,1771,3520,51,4,\"[3518, 3519, 3520, 3521]\"\r\nNCP-14.zip,2,NCP,372,1895,109,2,\"[1895, 1896]\"\r\nNCP-18.zip,2,NCP,503,2162,146,2,\"[2162, 2163]\"\r\nNCP-6.zip,2,NCP,199,1543,58,2,\"[1542, 1543]\"\r\nCP-18.zip,1,CP,1662,4350,19,1,[4350]\r\nCP-9.zip,1,CP,1377,3809,57,2,\"[3808, 3809]\"\r\nNormal-1.zip,0,Normal,1727,1009,63,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-20.zip,2,NCP,566,2290,160,2,\"[2290, 2291]\"\r\nCP-29.zip,1,CP,3821,5765,29,1,[5765]\r\nNCP-5.zip,2,NCP,190,1525,64,2,\"[1524, 1525]\"\r\nNormal-2.zip,0,Normal,1746,1064,68,2,\"[1063, 1064]\"\r\nCP-27.zip,1,CP,3744,5688,17,1,[5688]\r\nCP-2.zip,1,CP,1111,3329,204,1,[3329]\r\nNormal-10.zip,0,Normal,1948,403,98,1,[403]\r\nNCP-12.zip,2,NCP,338,1824,150,2,\"[1824, 1825]\"\r\nNCP-13.zip,2,NCP,348,1847,112,2,\"[1847, 1848]\"\r\nCP-24.zip,1,CP,700,3062,86,1,[3062]\r\nCP-18.zip,1,CP,1655,4343,23,1,[4343]\r\nCP-27.zip,1,CP,3736,5680,16,1,[5680]\r\nNormal-24.zip,0,Normal,2654,164,31,1,[164]\r\nNCP-13.zip,2,NCP,359,1869,145,2,\"[1869, 1870]\"\r\nNCP-16.zip,2,NCP,437,2027,142,2,\"[2027, 2028]\"\r\nCP-27.zip,1,CP,3741,5685,17,1,[5685]\r\nCP-24.zip,1,CP,693,3055,273,1,[3055]\r\nCP-24.zip,1,CP,682,3044,149,1,[3044]\r\nNormal-17.zip,0,Normal,2175,630,80,1,[630]\r\nNCP-6.zip,2,NCP,223,1591,56,2,\"[1590, 1591]\"\r\nNCP-2.zip,2,NCP,1051,2626,178,2,\"[2625, 2626]\"\r\nCP-11.zip,1,CP,1454,3982,125,3,\"[3982, 3983, 3984]\"\r\nNormal-20.zip,0,Normal,2253,708,70,1,[708]\r\nNormal-20.zip,0,Normal,2252,707,84,1,[707]\r\nNormal-21.zip,0,Normal,2308,763,85,1,[763]\r\nNCP-18.zip,2,NCP,516,2189,57,2,\"[2188, 2189]\"\r\nNCP-12.zip,2,NCP,313,1774,62,2,\"[1773, 1774]\"\r\nCP-2.zip,1,CP,1126,3344,204,1,[3344]\r\nNormal-20.zip,0,Normal,2257,712,83,1,[712]\r\nNCP-6.zip,2,NCP,203,1551,59,2,\"[1550, 1551]\"\r\nCP-13.zip,1,CP,1503,4106,64,3,\"[4104, 4105, 4106]\"\r\nNormal-20.zip,0,Normal,2280,735,82,1,[735]\r\nCP-19.zip,1,CP,2443,2915,112,3,\"[2915, 2916, 2917]\"\r\nCP-20.zip,1,CP,2451,2930,136,1,[2930]\r\nCP-1.zip,1,CP,1093,3311,173,1,[3311]\r\nCP-13.zip,1,CP,1518,4138,160,3,\"[4138, 4139, 4140]\"\r\nCP-20.zip,1,CP,2773,3304,30,1,[3304]\r\nNCP-15.zip,2,NCP,414,1981,51,2,\"[1980, 1981]\"\r\nNCP-23.zip,2,NCP,96,1328,145,2,\"[1328, 1329]\"\r\nCP-11.zip,1,CP,1422,3909,59,3,\"[3908, 3909, 3910]\"\r\nNormal-20.zip,0,Normal,2258,713,74,1,[713]\r\nNCP-29.zip,2,NCP,882,2417,52,2,\"[2417, 2418]\"\r\nNormal-2.zip,0,Normal,1737,1038,79,4,\"[1037, 1038, 1039, 1040]\"\r\nNormal-13.zip,0,Normal,2025,480,101,1,[480]\r\nNCP-5.zip,2,NCP,173,1490,139,2,\"[1490, 1491]\"\r\nCP-6.zip,1,CP,1257,3475,155,1,[3475]\r\nNCP-23.zip,2,NCP,952,2495,379,1,[2495]\r\nNormal-1.zip,0,Normal,1700,954,64,2,\"[953, 954]\"\r\nNCP-17.zip,2,NCP,465,2085,31,3,\"[2084, 2085, 2086]\"\r\nNormal-16.zip,0,Normal,2122,577,85,1,[577]\r\nCP-13.zip,1,CP,1502,4102,73,2,\"[4102, 4103]\"\r\nNormal-17.zip,0,Normal,2153,608,82,1,[608]\r\nNormal-24.zip,0,Normal,2650,160,40,1,[160]\r\nNCP-27.zip,2,NCP,1031,2602,231,2,\"[2601, 2602]\"\r\nNCP-14.zip,2,NCP,393,1937,62,2,\"[1936, 1937]\"\r\nCP-5.zip,1,CP,12,3169,233,2,\"[3168, 3169]\"\r\nNormal-11.zip,0,Normal,1986,441,88,1,[441]\r\nCP-19.zip,1,CP,2433,2897,108,1,[2897]\r\nNCP-4.zip,2,NCP,151,1447,54,2,\"[1446, 1447]\"\r\nNCP-13.zip,2,NCP,370,1891,128,2,\"[1891, 1892]\"\r\nNormal-17.zip,0,Normal,2168,623,89,1,[623]\r\nNCP-29.zip,2,NCP,880,2415,312,1,[2415]\r\nNCP-12.zip,2,NCP,338,1825,63,2,\"[1824, 1825]\"\r\nNormal-23.zip,0,Normal,2634,144,37,1,[144]\r\nNCP-14.zip,2,NCP,396,1942,170,2,\"[1942, 1943]\"\r\nNCP-16.zip,2,NCP,439,2032,162,2,\"[2032, 2033]\"\r\nNCP-8.zip,2,NCP,266,1678,137,2,\"[1678, 1679]\"\r\nCP-11.zip,1,CP,1423,3911,204,3,\"[3911, 3912, 3913]\"\r\nCP-11.zip,1,CP,1454,3984,53,3,\"[3982, 3983, 3984]\"\r\nCP-28.zip,1,CP,3792,5736,20,1,[5736]\r\nNormal-1.zip,0,Normal,1727,1011,66,4,\"[1009, 1010, 1011, 1012]\"\r\nNormal-19.zip,0,Normal,2234,689,89,1,[689]\r\nNCP-13.zip,2,NCP,35,1203,58,2,\"[1202, 1203]\"\r\nNCP-18.zip,2,NCP,51,1236,59,2,\"[1235, 1236]\"\r\nNCP-2.zip,2,NCP,113,1368,58,2,\"[1367, 1368]\"\r\nNormal-2.zip,0,Normal,1757,1107,68,4,\"[1105, 1106, 1107, 1108]\"\r\nNCP-12.zip,2,NCP,319,1785,158,2,\"[1785, 1787]\"\r\nNormal-22.zip,0,Normal,2322,777,88,1,[777]\r\nCP-21.zip,1,CP,584,2946,116,1,[2946]\r\nCP-9.zip,1,CP,1365,3781,60,3,\"[3779, 3780, 3781]\"\r\nNCP-12.zip,2,NCP,322,1792,120,2,\"[1792, 1793]\"\r\nNormal-2.zip,0,Normal,1763,1140,75,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-21.zip,2,NCP,59,1252,52,2,\"[1251, 1252]\"\r\nNCP-5.zip,2,NCP,170,1485,59,2,\"[1484, 1485]\"\r\nNCP-21.zip,2,NCP,72,1276,129,2,\"[1276, 1277]\"\r\nNCP-22.zip,2,NCP,887,2425,38,1,[2425]\r\nCP-2.zip,1,CP,1117,3335,155,1,[3335]\r\nNormal-2.zip,0,Normal,1763,1134,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nCP-18.zip,1,CP,1778,3550,64,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nCP-23.zip,1,CP,664,3026,78,1,[3026]\r\nCP-23.zip,1,CP,668,3030,102,1,[3030]\r\nNCP-13.zip,2,NCP,355,1862,53,2,\"[1861, 1862]\"\r\nNCP-13.zip,2,NCP,358,1867,160,2,\"[1867, 1868]\"\r\nCP-14.zip,1,CP,1550,4218,64,2,\"[4217, 4218]\"\r\nCP-26.zip,1,CP,3729,5667,207,3,\"[5665, 5666, 5667]\"\r\nCP-21.zip,1,CP,603,2965,88,1,[2965]\r\nNCP-13.zip,2,NCP,370,1892,54,2,\"[1891, 1892]\"\r\nNCP-13.zip,2,NCP,35,1202,139,2,\"[1202, 1203]\"\r\nCP-3.zip,1,CP,1155,3373,171,1,[3373]\r\nNormal-10.zip,0,Normal,1927,382,99,1,[382]\r\nCP-15.zip,1,CP,1574,4262,26,1,[4262]\r\nCP-13.zip,1,CP,1498,4096,60,2,\"[4095, 4096]\"\r\nNCP-6.zip,2,NCP,205,1555,53,2,\"[1554, 1555]\"\r\nNCP-11.zip,2,NCP,301,1748,147,2,\"[1748, 1749]\"\r\nNCP-11.zip,2,NCP,303,1752,139,2,\"[1752, 1753]\"\r\nCP-12.zip,1,CP,1468,4017,54,3,\"[4015, 4016, 4017]\"\r\nNormal-14.zip,0,Normal,2081,536,93,1,[536]\r\nNormal-2.zip,0,Normal,1763,1141,75,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-22.zip,2,NCP,859,2380,299,2,\"[2380, 2381]\"\r\nNormal-26.zip,0,Normal,3885,5398,63,1,[5398]\r\nCP-13.zip,1,CP,1505,4109,54,3,\"[4108, 4109, 4110]\"\r\nNCP-1.zip,2,NCP,103,1343,150,2,\"[1343, 1344]\"\r\nNCP-14.zip,2,NCP,396,1943,71,2,\"[1942, 1943]\"\r\nNCP-22.zip,2,NCP,871,2402,293,2,\"[2401, 2402]\"\r\nNormal-10.zip,0,Normal,1951,406,105,1,[406]\r\nCP-11.zip,1,CP,1434,3936,63,2,\"[3936, 3937]\"\r\nCP-26.zip,1,CP,3724,5659,51,1,[5659]\r\nCP-12.zip,1,CP,1471,4022,56,2,\"[4022, 4023]\"\r\nNormal-21.zip,0,Normal,2304,759,110,1,[759]\r\nCP-28.zip,1,CP,3777,5721,26,1,[5721]\r\nNCP-28.zip,2,NCP,837,2352,57,1,[2352]\r\nNormal-2.zip,0,Normal,1763,1133,72,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-8.zip,0,Normal,1873,328,104,1,[328]\r\nCP-12.zip,1,CP,1458,3992,165,3,\"[3992, 3993, 3994]\"\r\nNCP-7.zip,2,NCP,230,1604,139,2,\"[1604, 1605]\"\r\nCP-30.zip,1,CP,4042,5591,37,1,[5591]\r\nNormal-4.zip,0,Normal,774,209,134,1,[209]\r\nNormal-19.zip,0,Normal,2228,683,85,1,[683]\r\nNormal-18.zip,0,Normal,2206,661,77,1,[661]\r\nCP-17.zip,1,CP,1628,4316,23,1,[4316]\r\nNormal-11.zip,0,Normal,1969,424,90,1,[424]\r\nNormal-20.zip,0,Normal,2259,714,97,1,[714]\r\nCP-17.zip,1,CP,1640,4328,25,1,[4328]\r\nNCP-8.zip,2,NCP,254,1654,139,2,\"[1654, 1655]\"\r\nNormal-16.zip,0,Normal,2140,595,88,1,[595]\r\nCP-6.zip,1,CP,1249,3467,144,1,[3467]\r\nNCP-23.zip,2,NCP,92,1321,37,2,\"[1320, 1321]\"\r\nCP-18.zip,1,CP,1657,4345,24,1,[4345]\r\nNCP-17.zip,2,NCP,484,2124,58,2,\"[2123, 2124]\"\r\nNormal-2.zip,0,Normal,1743,1057,73,2,\"[1056, 1057]\"\r\nCP-18.zip,1,CP,1778,3545,66,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-30.zip,2,NCP,966,2509,279,1,[2509]\r\nCP-9.zip,1,CP,1376,3807,60,2,\"[3806, 3807]\"\r\nNormal-1.zip,0,Normal,1716,987,71,2,\"[987, 988]\"\r\nCP-7.zip,1,CP,1302,3602,42,4,\"[3602, 3603, 3604, 3605]\"\r\nNCP-18.zip,2,NCP,50,1233,141,2,\"[1233, 1234]\"\r\nCP-32.zip,1,CP,1781,3572,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-5.zip,2,NCP,192,1528,135,2,\"[1528, 1529]\"\r\nNCP-7.zip,2,NCP,2489,2646,40,1,[2646]\r\nCP-11.zip,1,CP,1434,3937,63,2,\"[3936, 3937]\"\r\nCP-23.zip,1,CP,645,3007,124,1,[3007]\r\nNormal-10.zip,0,Normal,1941,396,91,1,[396]\r\nNormal-12.zip,0,Normal,2001,456,86,1,[456]\r\nNormal-3.zip,0,Normal,761,196,120,1,[196]\r\nCP-7.zip,1,CP,1265,3483,166,1,[3483]\r\nNCP-3.zip,2,NCP,1287,2728,66,1,[2728]\r\nNCP-28.zip,2,NCP,835,2350,52,2,\"[2349, 2350]\"\r\nNCP-19.zip,2,NCP,543,2243,128,2,\"[2243, 2244]\"\r\nCP-21.zip,1,CP,4,3507,259,4,\"[3505, 3506, 3507, 3508]\"\r\nCP-17.zip,1,CP,1633,4321,26,1,[4321]\r\nNCP-20.zip,2,NCP,565,2289,57,2,\"[2288, 2289]\"\r\nNCP-22.zip,2,NCP,878,2412,46,2,\"[2412, 2413]\"\r\nCP-14.zip,1,CP,1520,4144,57,3,\"[4143, 4144, 4145]\"\r\nNormal-23.zip,0,Normal,2620,130,36,1,[130]\r\nNCP-23.zip,2,NCP,958,2501,133,1,[2501]\r\nCP-13.zip,1,CP,1513,4128,60,2,\"[4127, 4128]\"\r\nNCP-24.zip,2,NCP,98,1332,139,2,\"[1332, 1333]\"\r\nCP-9.zip,1,CP,1375,3804,60,2,\"[3804, 3805]\"\r\nNCP-2.zip,2,NCP,1051,2625,88,2,\"[2625, 2626]\"\r\nNCP-31.zip,2,NCP,999,2556,41,1,[2556]\r\nCP-18.zip,1,CP,1781,3575,78,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,278,1703,57,2,\"[1702, 1703]\"\r\nNCP-12.zip,2,NCP,313,1773,147,2,\"[1773, 1774]\"\r\nNCP-14.zip,2,NCP,381,1915,60,2,\"[1914, 1915]\"\r\nNCP-11.zip,2,NCP,295,1735,236,2,\"[1735, 1736]\"\r\nCP-11.zip,1,CP,1440,3948,196,3,\"[3948, 3949, 3950]\"\r\nCP-19.zip,1,CP,1795,3597,41,2,\"[3596, 3597]\"\r\nCP-12.zip,1,CP,1467,4013,60,2,\"[4013, 4014]\"\r\nNCP-12.zip,2,NCP,322,1793,51,2,\"[1792, 1793]\"\r\nCP-9.zip,1,CP,1353,3750,59,3,\"[3748, 3749, 3750]\"\r\nCP-19.zip,1,CP,1784,3591,50,4,\"[3590, 3591, 3592, 3593]\"\r\nNCP-9.zip,2,NCP,2699,2665,51,1,[2665]\r\nNCP-12.zip,2,NCP,331,1810,158,2,\"[1810, 1811]\"\r\nNCP-12.zip,2,NCP,334,1817,59,2,\"[1816, 1817]\"\r\nNCP-1.zip,2,NCP,1009,2571,29,2,\"[2570, 2571]\"\r\nCP-30.zip,1,CP,4041,5590,31,1,[5590]\r\nCP-24.zip,1,CP,705,3067,168,1,[3067]\r\nNormal-24.zip,0,Normal,2665,175,33,1,[175]\r\nNCP-12.zip,2,NCP,332,1813,70,2,\"[1812, 1813]\"\r\nCP-11.zip,1,CP,1444,3962,58,3,\"[3960, 3961, 3962]\"\r\nCP-22.zip,1,CP,614,2976,100,1,[2976]\r\nNormal-23.zip,0,Normal,2630,140,38,1,[140]\r\nNormal-8.zip,0,Normal,1876,331,97,1,[331]\r\nNCP-1.zip,2,NCP,1001,2559,141,1,[2559]\r\nNCP-22.zip,2,NCP,845,2361,148,4,\"[2360, 2361, 2362, 2363]\"\r\nCP-26.zip,1,CP,3646,5606,36,1,[5606]\r\nNormal-9.zip,0,Normal,1907,362,92,1,[362]\r\nNormal-1.zip,0,Normal,1672,800,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-12.zip,2,NCP,333,1815,68,2,\"[1814, 1815]\"\r\nCP-17.zip,1,CP,1634,4322,23,1,[4322]\r\nNormal-12.zip,0,Normal,2009,464,93,1,[464]\r\nCP-26.zip,1,CP,3731,5670,215,1,[5670]\r\nNormal-25.zip,0,Normal,3714,5344,22,1,[5344]\r\nNormal-19.zip,0,Normal,2231,686,85,1,[686]\r\nNCP-23.zip,2,NCP,940,2483,22,1,[2483]\r\nNormal-25.zip,0,Normal,3851,5363,201,1,[5363]\r\nNCP-6.zip,2,NCP,209,1562,139,2,\"[1562, 1563]\"\r\nNCP-13.zip,2,NCP,347,1846,53,2,\"[1845, 1846]\"\r\nNCP-11.zip,2,NCP,312,1772,62,2,\"[1771, 1772]\"\r\nCP-5.zip,1,CP,1196,3414,186,1,[3414]\r\nNCP-21.zip,2,NCP,74,1282,54,2,\"[1281, 1282]\"\r\nCP-23.zip,1,CP,662,3024,114,1,[3024]\r\nNCP-7.zip,2,NCP,23,1177,151,2,\"[1177, 1178]\"\r\nCP-16.zip,1,CP,1591,4279,23,1,[4279]\r\nNormal-12.zip,0,Normal,1995,450,95,1,[450]\r\nNormal-20.zip,0,Normal,2264,719,82,1,[719]\r\nNCP-30.zip,2,NCP,948,2491,365,1,[2491]\r\nNormal-12.zip,0,Normal,1998,453,99,1,[453]\r\nNCP-19.zip,2,NCP,522,2201,58,2,\"[2200, 2201]\"\r\nCP-13.zip,1,CP,1510,4121,60,2,\"[4121, 4122]\"\r\nNCP-15.zip,2,NCP,406,1962,61,2,\"[1961, 1962]\"\r\nNCP-4.zip,2,NCP,162,1468,148,2,\"[1468, 1469]\"\r\nCP-11.zip,1,CP,1431,3930,61,2,\"[3930, 3931]\"\r\nCP-15.zip,1,CP,1569,4257,20,1,[4257]\r\nCP-9.zip,1,CP,1379,3813,52,2,\"[3812, 3813]\"\r\nNCP-30.zip,2,NCP,981,2525,40,2,\"[2525, 2526]\"\r\nNCP-8.zip,2,NCP,2679,2650,42,1,[2650]\r\nNCP-25.zip,2,NCP,3951,5465,43,1,[5465]\r\nNCP-7.zip,2,NCP,2460,2684,36,1,[2684]\r\nCP-25.zip,1,CP,734,3096,106,1,[3096]\r\nNCP-6.zip,2,NCP,209,1563,58,2,\"[1562, 1563]\"\r\nNormal-22.zip,0,Normal,2593,103,38,1,[103]\r\nNCP-16.zip,2,NCP,438,2029,149,2,\"[2029, 2030]\"\r\nCP-7.zip,1,CP,1304,3638,43,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNormal-8.zip,0,Normal,1885,340,101,1,[340]\r\nNCP-17.zip,2,NCP,484,2123,137,2,\"[2123, 2124]\"\r\nNCP-20.zip,2,NCP,565,2288,135,2,\"[2288, 2289]\"\r\nNCP-5.zip,2,NCP,185,1515,51,2,\"[1514, 1515]\"\r\nNCP-29.zip,2,NCP,877,2411,65,1,[2411]\r\nNCP-6.zip,2,NCP,216,1577,58,2,\"[1576, 1577]\"\r\nNormal-24.zip,0,Normal,2658,168,37,1,[168]\r\nCP-28.zip,1,CP,3779,5723,26,1,[5723]\r\nNormal-15.zip,0,Normal,2090,545,83,1,[545]\r\nNormal-2.zip,0,Normal,1750,1077,69,3,\"[1074, 1077, 1078]\"\r\nNCP-24.zip,2,NCP,98,1333,58,2,\"[1332, 1333]\"\r\nCP-5.zip,1,CP,1199,3417,180,1,[3417]\r\nCP-3.zip,1,CP,1146,3364,161,1,[3364]\r\nCP-11.zip,1,CP,1449,3971,50,2,\"[3971, 3972]\"\r\nNormal-3.zip,0,Normal,1767,1154,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-22.zip,0,Normal,2585,95,41,1,[95]\r\nCP-29.zip,1,CP,3816,5760,29,1,[5760]\r\nNCP-21.zip,2,NCP,62,1258,60,2,\"[1257, 1258]\"\r\nNCP-2.zip,2,NCP,1056,2632,473,1,[2632]\r\nNCP-19.zip,2,NCP,525,2206,144,2,\"[2206, 2207]\"\r\nNormal-22.zip,0,Normal,2600,110,41,1,[110]\r\nCP-3.zip,1,CP,1161,3379,310,1,[3379]\r\nNCP-12.zip,2,NCP,316,1779,139,2,\"[1779, 1780]\"\r\nNCP-28.zip,2,NCP,868,2396,200,2,\"[2395, 2396]\"\r\nCP-7.zip,1,CP,1301,3600,52,4,\"[3598, 3599, 3600, 3601]\"\r\nNCP-11.zip,2,NCP,301,1749,62,2,\"[1748, 1749]\"\r\nNormal-9.zip,0,Normal,1917,372,96,1,[372]\r\nNCP-20.zip,2,NCP,571,2300,163,2,\"[2300, 2301]\"\r\nNormal-3.zip,0,Normal,1767,1152,68,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-1.zip,0,Normal,1716,988,71,2,\"[987, 988]\"\r\nNCP-28.zip,2,NCP,842,2357,42,1,[2357]\r\nNCP-27.zip,2,NCP,309,1765,162,2,\"[1766, 1765]\"\r\nCP-12.zip,1,CP,1479,4040,60,3,\"[4039, 4040, 4041]\"\r\nNCP-6.zip,2,NCP,22,1175,163,2,\"[1175, 1176]\"\r\nNCP-28.zip,2,NCP,868,2395,51,2,\"[2395, 2396]\"\r\nCP-14.zip,1,CP,1532,4171,50,2,\"[4171, 4172]\"\r\nNormal-11.zip,0,Normal,1984,439,86,1,[439]\r\nNormal-24.zip,0,Normal,2643,153,39,1,[153]\r\nCP-20.zip,1,CP,2765,3296,42,1,[3296]\r\nNormal-2.zip,0,Normal,1763,1132,72,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-2.zip,2,NCP,109,1356,60,2,\"[1355, 1356]\"\r\nNCP-7.zip,2,NCP,241,1628,55,2,\"[1627, 1628]\"\r\nNormal-22.zip,0,Normal,2587,97,44,1,[97]\r\nCP-20.zip,1,CP,2753,3284,37,1,[3284]\r\nNormal-1.zip,0,Normal,1670,790,63,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-15.zip,0,Normal,2103,558,88,1,[558]\r\nCP-13.zip,1,CP,1503,4104,64,3,\"[4104, 4105, 4106]\"\r\nNormal-21.zip,0,Normal,2313,768,94,1,[768]\r\nCP-9.zip,1,CP,1382,3818,200,3,\"[3818, 3819, 3820]\"\r\nNormal-2.zip,0,Normal,1756,1102,64,4,\"[1101, 1102, 1103, 1104]\"\r\nNCP-12.zip,2,NCP,334,1816,140,2,\"[1816, 1817]\"\r\nCP-13.zip,1,CP,1518,4140,67,3,\"[4138, 4139, 4140]\"\r\nCP-13.zip,1,CP,1492,4077,139,3,\"[4077, 4078, 4079]\"\r\nNormal-11.zip,0,Normal,1982,437,99,1,[437]\r\nNCP-6.zip,2,NCP,213,1570,159,2,\"[1570, 1571]\"\r\nCP-18.zip,1,CP,1779,3551,59,2,\"[3551, 3552]\"\r\nNCP-12.zip,2,NCP,321,1790,122,2,\"[1790, 1791]\"\r\nNCP-4.zip,2,NCP,159,1462,144,2,\"[1462, 1463]\"\r\nCP-24.zip,1,CP,684,3046,161,1,[3046]\r\nCP-29.zip,1,CP,3828,5772,26,1,[5772]\r\nCP-12.zip,1,CP,1462,4004,51,3,\"[4002, 4003, 4004]\"\r\nNormal-1.zip,0,Normal,1707,969,65,2,\"[969, 970]\"\r\nCP-24.zip,1,CP,685,3047,168,1,[3047]\r\nNCP-16.zip,2,NCP,444,2043,61,2,\"[2042, 2043]\"\r\nCP-19.zip,1,CP,2430,2892,106,2,\"[2891, 2892]\"\r\nNormal-25.zip,0,Normal,3857,5369,222,1,[5369]\r\nCP-28.zip,1,CP,3774,5718,20,1,[5718]\r\nCP-21.zip,1,CP,591,2953,124,1,[2953]\r\nNormal-1.zip,0,Normal,1670,792,66,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNCP-14.zip,2,NCP,387,1925,54,2,\"[1924, 1925]\"\r\nCP-10.zip,1,CP,14,3515,115,1,[3515]\r\nNCP-4.zip,2,NCP,16,1164,113,2,\"[1164, 1165]\"\r\nNormal-17.zip,0,Normal,2162,617,96,1,[617]\r\nCP-13.zip,1,CP,1513,4127,60,2,\"[4127, 4128]\"\r\nNCP-11.zip,2,NCP,300,1746,139,2,\"[1746, 1747]\"\r\nNCP-21.zip,2,NCP,577,2312,61,2,\"[2311, 2312]\"\r\nNormal-8.zip,0,Normal,1875,330,93,1,[330]\r\nNormal-27.zip,0,Normal,3906,5439,62,1,[5439]\r\nNCP-7.zip,2,NCP,249,1645,58,2,\"[1644, 1645]\"\r\nNCP-20.zip,2,NCP,552,2262,61,2,\"[2261, 2262]\"\r\nNCP-9.zip,2,NCP,2701,2667,56,1,[2667]\r\nNCP-15.zip,2,NCP,417,1987,139,2,\"[1987, 1988]\"\r\nNCP-9.zip,2,NCP,2705,2671,56,1,[2671]\r\nNormal-3.zip,0,Normal,1767,1160,71,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-15.zip,1,CP,1585,4273,23,1,[4273]\r\nCP-27.zip,1,CP,3742,5686,17,1,[5686]\r\nCP-14.zip,1,CP,1521,4146,57,2,\"[4146, 4147]\"\r\nNormal-1.zip,0,Normal,1703,960,70,2,\"[959, 960]\"\r\nCP-21.zip,1,CP,6,3510,36,1,[3510]\r\nNCP-19.zip,2,NCP,54,1242,62,2,\"[1241, 1242]\"\r\nNCP-5.zip,2,NCP,17,1166,143,2,\"[1166, 1167]\"\r\nNCP-15.zip,2,NCP,413,1977,47,4,\"[1975, 1976, 1977, 1979]\"\r\nNCP-22.zip,2,NCP,845,2360,53,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-2.zip,2,NCP,120,1381,139,2,\"[1381, 1382]\"\r\nCP-5.zip,1,CP,1207,3425,189,1,[3425]\r\nCP-27.zip,1,CP,3758,5702,23,1,[5702]\r\nCP-16.zip,1,CP,1592,4280,25,1,[4280]\r\nCP-21.zip,1,CP,4,3506,275,4,\"[3505, 3506, 3507, 3508]\"\r\nNCP-21.zip,2,NCP,72,1277,55,2,\"[1276, 1277]\"\r\nNCP-17.zip,2,NCP,475,2105,156,2,\"[2105, 2106]\"\r\nNCP-13.zip,2,NCP,358,1868,67,2,\"[1867, 1868]\"\r\nNormal-3.zip,0,Normal,764,199,130,1,[199]\r\nCP-9.zip,1,CP,1358,3763,63,3,\"[3761, 3762, 3763]\"\r\nNCP-4.zip,2,NCP,169,1483,56,2,\"[1482, 1483]\"\r\nNormal-1.zip,0,Normal,1707,970,65,2,\"[969, 970]\"\r\nNCP-18.zip,2,NCP,502,2160,140,2,\"[2160, 2161]\"\r\nCP-18.zip,1,CP,1781,3568,67,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,2727,2683,44,1,[2683]\r\nCP-26.zip,1,CP,3719,5651,277,3,\"[5649, 5650, 5651]\"\r\nCP-11.zip,1,CP,1422,3910,58,3,\"[3908, 3909, 3910]\"\r\nNCP-4.zip,2,NCP,168,1480,139,2,\"[1480, 1481]\"\r\nCP-8.zip,1,CP,1329,3695,89,3,\"[3695, 3696, 3697]\"\r\nCP-12.zip,1,CP,1463,4005,49,2,\"[4005, 4006]\"\r\nNormal-27.zip,0,Normal,3915,5458,70,1,[5458]\r\nNormal-18.zip,0,Normal,2209,664,82,1,[664]\r\nCP-13.zip,1,CP,1492,4079,58,3,\"[4077, 4078, 4079]\"\r\nCP-30.zip,1,CP,3830,5774,29,1,[5774]\r\nCP-8.zip,1,CP,1329,3696,45,3,\"[3695, 3696, 3697]\"\r\nNormal-16.zip,0,Normal,2139,594,87,1,[594]\r\nNCP-14.zip,2,NCP,393,1936,149,2,\"[1936, 1937]\"\r\nCP-21.zip,1,CP,4,3508,290,4,\"[3505, 3506, 3507, 3508]\"\r\nNormal-2.zip,0,Normal,1737,1037,79,4,\"[1037, 1038, 1039, 1040]\"\r\nNCP-25.zip,2,NCP,3708,5535,59,1,[5535]\r\nCP-7.zip,1,CP,1301,3601,276,4,\"[3598, 3599, 3600, 3601]\"\r\nNCP-7.zip,2,NCP,249,1644,139,2,\"[1644, 1645]\"\r\nNCP-12.zip,2,NCP,339,1827,51,2,\"[1826, 1827]\"\r\nNCP-2.zip,2,NCP,1275,2716,68,1,[2716]\r\nNCP-13.zip,2,NCP,354,1860,73,2,\"[1859, 1860]\"\r\nNormal-2.zip,0,Normal,1757,1105,71,4,\"[1105, 1106, 1107, 1108]\"\r\nNCP-27.zip,2,NCP,1016,2582,108,3,\"[2580, 2581, 2582]\"\r\nCP-18.zip,1,CP,1777,3541,62,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-1.zip,2,NCP,1008,2569,387,1,[2569]\r\nCP-7.zip,1,CP,1315,3665,59,2,\"[3665, 3666]\"\r\nCP-27.zip,1,CP,3737,5681,17,1,[5681]\r\nNormal-9.zip,0,Normal,1914,369,88,1,[369]\r\nNormal-1.zip,0,Normal,1672,802,75,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-8.zip,2,NCP,25,1181,129,2,\"[1181, 1183]\"\r\nCP-19.zip,1,CP,1789,3207,64,4,\"[3204, 3205, 3206, 3207]\"\r\nCP-11.zip,1,CP,1444,3960,139,3,\"[3960, 3961, 3962]\"\r\nNCP-4.zip,2,NCP,145,1435,58,2,\"[1434, 1435]\"\r\nCP-23.zip,1,CP,659,3021,594,1,[3021]\r\nNormal-25.zip,0,Normal,3716,5346,31,1,[5346]\r\nNormal-10.zip,0,Normal,1936,391,82,1,[391]\r\nNCP-22.zip,2,NCP,821,2331,30,1,[2331]\r\nCP-13.zip,1,CP,1505,4108,54,3,\"[4108, 4109, 4110]\"\r\nNCP-15.zip,2,NCP,411,1972,62,2,\"[1971, 1972]\"\r\nCP-7.zip,1,CP,1304,3633,18,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-27.zip,2,NCP,1048,2621,44,2,\"[2620, 2621]\"\r\nCP-21.zip,1,CP,595,2957,306,1,[2957]\r\nNCP-22.zip,2,NCP,861,2384,197,1,[2384]\r\nCP-7.zip,1,CP,1302,3604,39,4,\"[3602, 3603, 3604, 3605]\"\r\nNCP-17.zip,2,NCP,472,2099,151,2,\"[2099, 2100]\"\r\nNCP-8.zip,2,NCP,26,1185,36,2,\"[1184, 1185]\"\r\nNormal-27.zip,0,Normal,3903,5435,75,1,[5435]\r\nNormal-25.zip,0,Normal,3840,5352,210,1,[5352]\r\nNCP-8.zip,2,NCP,266,1679,58,2,\"[1678, 1679]\"\r\nNormal-16.zip,0,Normal,2120,575,84,1,[575]\r\nNormal-16.zip,0,Normal,2128,583,76,1,[583]\r\nCP-11.zip,1,CP,1449,3972,50,2,\"[3971, 3972]\"\r\nCP-7.zip,1,CP,1304,3636,47,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNormal-22.zip,0,Normal,2597,107,41,1,[107]\r\nNCP-10.zip,2,NCP,2726,2682,50,1,[2682]\r\nNormal-7.zip,0,Normal,1849,304,87,1,[304]\r\nNormal-13.zip,0,Normal,2040,495,95,1,[495]\r\nNormal-16.zip,0,Normal,2125,580,83,1,[580]\r\nCP-25.zip,1,CP,740,3102,193,1,[3102]\r\nNCP-22.zip,2,NCP,871,2401,281,2,\"[2401, 2402]\"\r\nNCP-9.zip,2,NCP,2704,2670,56,1,[2670]\r\nNCP-12.zip,2,NCP,33,1198,147,2,\"[1198, 1199]\"\r\nCP-18.zip,1,CP,1663,4351,26,1,[4351]\r\nNormal-3.zip,0,Normal,1767,1157,28,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-2.zip,0,Normal,1735,1031,76,2,\"[1030, 1031]\"\r\nNormal-10.zip,0,Normal,1938,393,66,1,[393]\r\nNCP-24.zip,2,NCP,975,2518,484,1,[2518]\r\nCP-18.zip,1,CP,1774,3523,65,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-14.zip,2,NCP,381,1914,143,2,\"[1914, 1915]\"\r\nNCP-12.zip,2,NCP,33,1199,62,2,\"[1198, 1199]\"\r\nNCP-13.zip,2,NCP,352,1855,138,2,\"[1855, 1856]\"\r\nNCP-12.zip,2,NCP,333,1814,162,2,\"[1814, 1815]\"\r\nNCP-23.zip,2,NCP,904,2446,667,1,[2446]\r\nNCP-24.zip,2,NCP,985,2531,508,1,[2531]\r\nNCP-6.zip,2,NCP,228,1600,161,2,\"[1600, 1601]\"\r\nNCP-15.zip,2,NCP,414,1980,121,2,\"[1980, 1981]\"\r\nNCP-1.zip,2,NCP,103,1344,63,2,\"[1343, 1344]\"\r\nNormal-3.zip,0,Normal,1767,1155,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-8.zip,1,CP,1349,3744,58,3,\"[3742, 3743, 3744]\"\r\nNCP-8.zip,2,NCP,261,1669,65,2,\"[1668, 1669]\"\r\nNormal-21.zip,0,Normal,2300,755,98,1,[755]\r\nNCP-13.zip,2,NCP,354,1859,177,2,\"[1859, 1860]\"\r\nCP-23.zip,1,CP,665,3027,116,1,[3027]\r\nCP-15.zip,1,CP,1561,4242,49,2,\"[4241, 4242]\"\r\nCP-9.zip,1,CP,1376,3806,60,2,\"[3806, 3807]\"\r\nNormal-1.zip,0,Normal,1727,1012,66,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-28.zip,2,NCP,835,2349,46,2,\"[2349, 2350]\"\r\nCP-8.zip,1,CP,1349,3742,142,3,\"[3742, 3743, 3744]\"\r\nNormal-20.zip,0,Normal,2277,732,95,1,[732]\r\nNCP-28.zip,2,NCP,876,2409,52,1,[2409]\r\nNormal-15.zip,0,Normal,2101,556,85,1,[556]\r\nCP-11.zip,1,CP,1444,3961,58,3,\"[3960, 3961, 3962]\"\r\nNCP-2.zip,2,NCP,1276,2717,61,1,[2717]\r\nNormal-3.zip,0,Normal,1767,1153,68,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-13.zip,0,Normal,2051,506,86,1,[506]\r\nNormal-2.zip,0,Normal,1734,1029,66,2,\"[1028, 1029]\"\r\nNormal-26.zip,0,Normal,3871,5383,22,1,[5383]\r\nNCP-1.zip,2,NCP,1009,2570,39,2,\"[2570, 2571]\"\r\nNormal-2.zip,0,Normal,1763,1139,65,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-13.zip,2,NCP,359,1870,61,2,\"[1869, 1870]\"\r\nNormal-19.zip,0,Normal,2220,675,78,1,[675]\r\nCP-9.zip,1,CP,1382,3819,60,3,\"[3818, 3819, 3820]\"\r\nCP-20.zip,1,CP,2752,3283,26,1,[3283]\r\nCP-13.zip,1,CP,1510,4122,60,2,\"[4121, 4122]\"\r\nNCP-16.zip,2,NCP,440,2034,125,2,\"[2034, 2035]\"\r\nCP-12.zip,1,CP,1458,3994,69,3,\"[3992, 3993, 3994]\"\r\nNCP-11.zip,2,NCP,284,1714,58,2,\"[1713, 1714]\"\r\nNCP-11.zip,2,NCP,303,1753,58,2,\"[1752, 1753]\"\r\nNCP-6.zip,2,NCP,205,1554,126,2,\"[1554, 1555]\"\r\nCP-14.zip,1,CP,1535,4179,53,2,\"[4178, 4179]\"\r\nNormal-27.zip,0,Normal,3910,5446,66,2,\"[5445, 5446]\"\r\nNormal-3.zip,0,Normal,742,177,107,1,[177]\r\nNormal-22.zip,0,Normal,2589,99,37,1,[99]\r\nNCP-22.zip,2,NCP,88,1310,71,2,\"[1309, 1310]\"\r\nCP-14.zip,1,CP,1521,4147,57,2,\"[4146, 4147]\"\r\nCP-26.zip,1,CP,3729,5666,179,3,\"[5665, 5666, 5667]\"\r\nCP-28.zip,1,CP,3793,5737,29,1,[5737]\r\nNormal-3.zip,0,Normal,767,202,358,1,[202]\r\nNCP-5.zip,2,NCP,198,1540,144,2,\"[1540, 1541]\"\r\nCP-27.zip,1,CP,3738,5682,19,1,[5682]\r\nCP-27.zip,1,CP,3750,5694,28,1,[5694]\r\nCP-10.zip,1,CP,1416,3898,58,2,\"[3897, 3898]\"\r\nCP-8.zip,1,CP,1322,3680,56,2,\"[3680, 3681]\"\r\nNormal-23.zip,0,Normal,2607,117,38,1,[117]\r\nNCP-3.zip,2,NCP,138,1420,124,2,\"[1420, 1421]\"\r\nCP-11.zip,1,CP,1425,3916,185,3,\"[3916, 3917, 3918]\"\r\nCP-15.zip,1,CP,1581,4269,19,1,[4269]\r\nCP-24.zip,1,CP,706,3068,124,1,[3068]\r\nCP-18.zip,1,CP,1666,4354,23,1,[4354]\r\nNCP-4.zip,2,NCP,161,1466,135,2,\"[1466, 1467]\"\r\nNormal-7.zip,0,Normal,1847,302,102,1,[302]\r\nCP-19.zip,1,CP,1784,3593,69,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-21.zip,1,CP,605,2967,157,1,[2967]\r\nCP-5.zip,1,CP,12,3168,291,2,\"[3168, 3169]\"\r\nNormal-9.zip,0,Normal,1909,364,102,1,[364]\r\nNCP-22.zip,2,NCP,850,2369,52,1,[2369]\r\nCP-24.zip,1,CP,687,3049,135,1,[3049]\r\nNCP-1.zip,2,NCP,1033,2604,39,1,[2604]\r\nNormal-2.zip,0,Normal,1750,1074,65,3,\"[1074, 1077, 1078]\"\r\nCP-9.zip,1,CP,1365,3779,200,3,\"[3779, 3780, 3781]\"\r\nNCP-18.zip,2,NCP,502,2161,59,2,\"[2160, 2161]\"\r\nNormal-3.zip,0,Normal,1767,1162,76,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-1.zip,0,Normal,1672,799,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNormal-3.zip,0,Normal,747,182,100,1,[182]\r\nNCP-12.zip,2,NCP,319,1787,66,2,\"[1785, 1787]\"\r\nNCP-15.zip,2,NCP,405,1959,143,2,\"[1959, 1960]\"\r\nCP-18.zip,1,CP,1781,3574,64,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-21.zip,1,CP,600,2962,202,1,[2962]\r\nCP-12.zip,1,CP,1479,4039,60,3,\"[4039, 4040, 4041]\"\r\nNCP-27.zip,2,NCP,827,2340,173,1,[2340]\r\nNCP-24.zip,2,NCP,983,2528,67,1,[2528]\r\nCP-11.zip,1,CP,1424,3915,60,2,\"[3914, 3915]\"\r\nCP-2.zip,1,CP,1105,3323,220,1,[3323]\r\nCP-10.zip,1,CP,1412,3888,66,2,\"[3887, 3888]\"\r\nNCP-18.zip,2,NCP,495,2147,65,2,\"[2146, 2147]\"\r\nNCP-3.zip,2,NCP,134,1412,128,2,\"[1412, 1413]\"\r\nNormal-10.zip,0,Normal,1940,395,74,1,[395]\r\nNormal-17.zip,0,Normal,2163,618,89,1,[618]\r\nCP-9.zip,1,CP,1358,3761,249,3,\"[3761, 3762, 3763]\"\r\nCP-23.zip,1,CP,658,3020,273,1,[3020]\r\nNCP-12.zip,2,NCP,341,1830,129,3,\"[1830, 1832, 1834]\"\r\nCP-14.zip,1,CP,1520,4145,57,3,\"[4143, 4144, 4145]\"\r\nCP-19.zip,1,CP,1783,3588,62,2,\"[3588, 3589]\"\r\nNormal-3.zip,0,Normal,1767,1158,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-7.zip,1,CP,1301,3598,55,4,\"[3598, 3599, 3600, 3601]\"\r\nNormal-6.zip,0,Normal,1810,265,85,1,[265]\r\nNCP-12.zip,2,NCP,321,1791,51,2,\"[1790, 1791]\"\r\nNCP-12.zip,2,NCP,341,1834,54,3,\"[1830, 1832, 1834]\"\r\nCP-11.zip,1,CP,1435,3938,46,2,\"[3938, 3939]\"\r\nNormal-26.zip,0,Normal,3876,5388,30,1,[5388]\r\nNormal-16.zip,0,Normal,2123,578,90,1,[578]\r\nNormal-6.zip,0,Normal,1816,271,76,1,[271]\r\nNCP-26.zip,2,NCP,3992,5516,48,1,[5516]\r\nCP-18.zip,1,CP,1777,3544,66,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-5.zip,2,NCP,173,1491,58,2,\"[1490, 1491]\"\r\nNCP-11.zip,2,NCP,312,1771,148,2,\"[1771, 1772]\"\r\nNCP-19.zip,2,NCP,525,2207,61,2,\"[2206, 2207]\"\r\nNormal-3.zip,0,Normal,752,187,103,1,[187]\r\nNCP-7.zip,2,NCP,23,1178,63,2,\"[1177, 1178]\"\r\nCP-27.zip,1,CP,3762,5706,26,1,[5706]\r\nCP-18.zip,1,CP,1659,4347,26,1,[4347]\r\nCP-20.zip,1,CP,2667,3248,46,3,\"[3246, 3247, 3248]\"\r\nNormal-24.zip,0,Normal,2653,163,39,1,[163]\r\nNormal-4.zip,0,Normal,801,236,107,1,[236]\r\nNormal-20.zip,0,Normal,2272,727,79,1,[727]\r\nNCP-30.zip,2,NCP,988,2539,56,2,\"[2538, 2539]\"\r\nCP-18.zip,1,CP,1774,3527,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-17.zip,0,Normal,2165,620,95,1,[620]\r\nCP-12.zip,1,CP,1479,4041,60,3,\"[4039, 4040, 4041]\"\r\nNormal-21.zip,0,Normal,2299,754,90,1,[754]\r\nCP-22.zip,1,CP,637,2999,118,1,[2999]\r\nNCP-6.zip,2,NCP,217,1578,139,2,\"[1578, 1579]\"\r\nCP-30.zip,1,CP,3919,5544,73,4,\"[5543, 5544, 5545, 5546]\"\r\nCP-13.zip,1,CP,1511,4123,57,2,\"[4123, 4124]\"\r\nNormal-13.zip,0,Normal,2035,490,82,1,[490]\r\nCP-10.zip,1,CP,1417,3899,59,1,[3899]\r\nNCP-8.zip,2,NCP,261,1668,155,2,\"[1668, 1669]\"\r\nCP-20.zip,1,CP,2667,3247,92,3,\"[3246, 3247, 3248]\"\r\nCP-26.zip,1,CP,3636,5595,290,1,[5595]\r\nNormal-2.zip,0,Normal,1763,1136,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-9.zip,0,Normal,1913,368,88,1,[368]\r\nCP-9.zip,1,CP,1375,3805,58,2,\"[3804, 3805]\"\r\nCP-16.zip,1,CP,1606,4294,26,1,[4294]\r\nCP-18.zip,1,CP,1777,3543,68,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNormal-21.zip,0,Normal,2287,742,77,1,[742]\r\nCP-11.zip,1,CP,1422,3908,140,3,\"[3908, 3909, 3910]\"\r\nNCP-22.zip,2,NCP,859,2381,268,2,\"[2380, 2381]\"\r\nNormal-24.zip,0,Normal,2645,155,38,1,[155]\r\nCP-7.zip,1,CP,1302,3605,201,4,\"[3602, 3603, 3604, 3605]\"\r\nCP-23.zip,1,CP,646,3008,128,1,[3008]\r\nCP-11.zip,1,CP,1425,3918,49,3,\"[3916, 3917, 3918]\"\r\nCP-18.zip,1,CP,1781,3569,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-16.zip,2,NCP,436,2025,146,2,\"[2025, 2026]\"\r\nNCP-18.zip,2,NCP,503,2163,61,2,\"[2162, 2163]\"\r\nNCP-4.zip,2,NCP,167,1478,143,2,\"[1478, 1479]\"\r\nNormal-26.zip,0,Normal,3880,5392,32,1,[5392]\r\nNCP-25.zip,2,NCP,3709,5536,65,1,[5536]\r\nNormal-2.zip,0,Normal,1734,1028,66,2,\"[1028, 1029]\"\r\nNormal-17.zip,0,Normal,2169,624,92,1,[624]\r\nNCP-20.zip,2,NCP,546,2249,134,2,\"[2249, 2250]\"\r\nNCP-4.zip,2,NCP,146,1437,52,2,\"[1436, 1437]\"\r\nNCP-26.zip,2,NCP,3995,5493,47,1,[5493]\r\nCP-20.zip,1,CP,2763,3294,119,1,[3294]\r\nNCP-13.zip,2,NCP,349,1850,57,2,\"[1849, 1850]\"\r\nCP-26.zip,1,CP,3644,5604,284,1,[5604]\r\nCP-8.zip,1,CP,1327,3690,253,3,\"[3690, 3691, 3692]\"\r\nCP-20.zip,1,CP,2770,3301,38,1,[3301]\r\nCP-12.zip,1,CP,1471,4023,55,2,\"[4022, 4023]\"\r\nNormal-27.zip,0,Normal,3912,5453,68,1,[5453]\r\nNCP-23.zip,2,NCP,93,1322,157,2,\"[1322, 1323]\"\r\nCP-18.zip,1,CP,1781,3576,64,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-13.zip,2,NCP,347,1845,126,2,\"[1845, 1846]\"\r\nCP-20.zip,1,CP,2454,2935,120,2,\"[2935, 2936]\"\r\nNormal-1.zip,0,Normal,1670,788,58,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-8.zip,0,Normal,1880,335,83,1,[335]\r\nNormal-10.zip,0,Normal,1937,392,90,1,[392]\r\nCP-20.zip,1,CP,2768,3299,38,1,[3299]\r\nNormal-18.zip,0,Normal,2212,667,89,1,[667]\r\nNormal-1.zip,0,Normal,1677,826,65,4,\"[823, 824, 825, 826]\"\r\nCP-26.zip,1,CP,3721,5654,43,2,\"[5654, 5655]\"\r\nNCP-16.zip,2,NCP,439,2033,66,2,\"[2032, 2033]\"\r\nNormal-13.zip,0,Normal,2031,486,81,1,[486]\r\nCP-19.zip,1,CP,1783,3589,62,2,\"[3588, 3589]\"\r\nCP-2.zip,1,CP,1121,3339,156,1,[3339]\r\nCP-22.zip,1,CP,612,2974,84,1,[2974]\r\nNormal-26.zip,0,Normal,3867,5379,29,1,[5379]\r\nNCP-1.zip,2,NCP,102,1342,56,2,\"[1341, 1342]\"\r\nNCP-18.zip,2,NCP,493,2142,133,2,\"[2142, 2143]\"\r\nNCP-12.zip,2,NCP,339,1826,120,2,\"[1826, 1827]\"\r\nNormal-14.zip,0,Normal,2085,540,95,1,[540]\r\nNCP-27.zip,2,NCP,238,1622,57,2,\"[1621, 1622]\"\r\nNormal-2.zip,0,Normal,1737,1039,80,4,\"[1037, 1038, 1039, 1040]\"\r\nCP-30.zip,1,CP,3919,5546,70,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-1.zip,2,NCP,1012,2576,249,1,[2576]\r\nNCP-17.zip,2,NCP,463,2080,144,2,\"[2080, 2081]\"\r\nNCP-2.zip,2,NCP,127,1400,58,2,\"[1399, 1400]\"\r\nNormal-21.zip,0,Normal,2291,746,96,1,[746]\r\nNCP-8.zip,2,NCP,25,1183,45,2,\"[1181, 1183]\"\r\nCP-9.zip,1,CP,1382,3820,60,3,\"[3818, 3819, 3820]\"\r\nNCP-30.zip,2,NCP,967,2510,168,1,[2510]\r\nNormal-27.zip,0,Normal,3910,5445,66,2,\"[5445, 5446]\"\r\nNCP-4.zip,2,NCP,156,1456,138,2,\"[1456, 1457]\"\r\nCP-12.zip,1,CP,1464,4007,63,2,\"[4007, 4008]\"\r\nNCP-4.zip,2,NCP,162,1469,62,2,\"[1468, 1469]\"\r\nCP-13.zip,1,CP,1493,4081,53,3,\"[4080, 4081, 4082]\"\r\nCP-16.zip,1,CP,1602,4290,17,1,[4290]\r\nNCP-6.zip,2,NCP,216,1576,139,2,\"[1576, 1577]\"\r\nCP-25.zip,1,CP,723,3085,104,1,[3085]\r\nNCP-15.zip,2,NCP,411,1971,149,2,\"[1971, 1972]\"\r\nNCP-15.zip,2,NCP,425,2003,139,2,\"[2003, 2004]\"\r\nCP-24.zip,1,CP,688,3050,127,1,[3050]\r\nNormal-13.zip,0,Normal,2033,488,77,1,[488]\r\nNCP-23.zip,2,NCP,96,1329,61,2,\"[1328, 1329]\"\r\nNormal-5.zip,0,Normal,803,238,343,1,[238]\r\nCP-16.zip,1,CP,1595,4283,23,1,[4283]\r\nNCP-27.zip,2,NCP,238,1621,134,2,\"[1621, 1622]\"\r\nNCP-19.zip,2,NCP,529,2214,141,3,\"[2214, 2215, 2217]\"\r\nCP-25.zip,1,CP,710,3072,78,1,[3072]\r\nNormal-19.zip,0,Normal,2243,698,86,1,[698]\r\nCP-11.zip,1,CP,1440,3949,51,3,\"[3948, 3949, 3950]\"\r\nCP-7.zip,1,CP,1260,3478,235,1,[3478]\r\nNormal-1.zip,0,Normal,1672,797,76,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-26.zip,1,CP,3719,5649,52,3,\"[5649, 5650, 5651]\"\r\nNCP-23.zip,2,NCP,969,2512,68,1,[2512]\r\nNCP-5.zip,2,NCP,186,1516,113,2,\"[1516, 1517]\"\r\nCP-13.zip,1,CP,1507,4114,62,2,\"[4113, 4114]\"\r\nCP-19.zip,1,CP,2443,2916,310,3,\"[2915, 2916, 2917]\"\r\nCP-13.zip,1,CP,1503,4105,64,3,\"[4104, 4105, 4106]\"\r\nNormal-10.zip,0,Normal,1934,389,85,1,[389]\r\nCP-20.zip,1,CP,2760,3291,281,1,[3291]\r\nNormal-19.zip,0,Normal,2242,697,86,1,[697]\r\nNCP-22.zip,2,NCP,864,2388,214,2,\"[2388, 2389]\"\r\nNCP-14.zip,2,NCP,377,1906,147,2,\"[1906, 1907]\"\r\nCP-29.zip,1,CP,3818,5762,29,1,[5762]\r\nCP-23.zip,1,CP,676,3038,291,1,[3038]\r\nNCP-14.zip,2,NCP,389,1928,150,2,\"[1928, 1929]\"\r\nCP-27.zip,1,CP,3761,5705,16,1,[5705]\r\nNCP-27.zip,2,NCP,1016,2581,179,3,\"[2580, 2581, 2582]\"\r\nNormal-22.zip,0,Normal,2321,776,90,1,[776]\r\nCP-7.zip,1,CP,1304,3639,212,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-16.zip,2,NCP,438,2030,62,2,\"[2029, 2030]\"\r\nNCP-2.zip,2,NCP,107,1352,61,2,\"[1351, 1352]\"\r\nNCP-11.zip,2,NCP,295,1736,97,2,\"[1735, 1736]\"\r\nCP-2.zip,1,CP,1122,3340,229,1,[3340]\r\nNormal-25.zip,0,Normal,3849,5361,205,1,[5361]\r\nCP-4.zip,1,CP,1189,3407,284,1,[3407]\r\nNCP-4.zip,2,NCP,152,1449,61,2,\"[1448, 1449]\"\r\nNormal-13.zip,0,Normal,2044,499,103,1,[499]\r\nNormal-2.zip,0,Normal,1756,1103,65,4,\"[1101, 1102, 1103, 1104]\"\r\nCP-9.zip,1,CP,1379,3812,52,2,\"[3812, 3813]\"\r\nCP-20.zip,1,CP,2454,2936,116,2,\"[2935, 2936]\"\r\nNCP-3.zip,2,NCP,1294,2735,62,1,[2735]\r\nCP-6.zip,1,CP,1230,3448,37,1,[3448]\r\nNormal-5.zip,0,Normal,815,250,120,1,[250]\r\nCP-13.zip,1,CP,1488,4066,66,3,\"[4064, 4065, 4066]\"\r\nNCP-7.zip,2,NCP,241,1627,131,2,\"[1627, 1628]\"\r\nNCP-6.zip,2,NCP,220,1584,160,2,\"[1584, 1585]\"\r\nNCP-30.zip,2,NCP,982,2527,242,1,[2527]\r\nNormal-2.zip,0,Normal,1735,1030,76,2,\"[1030, 1031]\"\r\nCP-18.zip,1,CP,1781,3573,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-26.zip,1,CP,3642,5601,29,1,[5601]\r\nNCP-5.zip,2,NCP,186,1517,48,2,\"[1516, 1517]\"\r\nNormal-7.zip,0,Normal,1846,301,105,1,[301]\r\nCP-6.zip,1,CP,1252,3470,180,1,[3470]\r\nNCP-8.zip,2,NCP,254,1655,58,2,\"[1654, 1655]\"\r\nNCP-17.zip,2,NCP,460,2075,45,2,\"[2074, 2075]\"\r\nNCP-3.zip,2,NCP,138,1421,52,2,\"[1420, 1421]\"\r\nCP-29.zip,1,CP,3798,5742,21,1,[5742]\r\nNCP-14.zip,2,NCP,389,1929,63,2,\"[1928, 1929]\"\r\nNCP-22.zip,2,NCP,858,2379,52,1,[2379]\r\nNCP-10.zip,2,NCP,2721,2677,37,1,[2677]\r\nNCP-29.zip,2,NCP,882,2418,257,2,\"[2417, 2418]\"\r\nNCP-18.zip,2,NCP,495,2146,156,2,\"[2146, 2147]\"\r\nNormal-18.zip,0,Normal,2210,665,88,1,[665]\r\nCP-7.zip,1,CP,1304,3632,18,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-18.zip,2,NCP,512,2180,149,2,\"[2180, 2181]\"\r\nNormal-1.zip,0,Normal,1672,803,75,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-21.zip,1,CP,2774,3305,31,1,[3305]\r\nCP-9.zip,1,CP,1372,3797,193,3,\"[3797, 3798, 3799]\"\r\nCP-22.zip,1,CP,615,2977,104,1,[2977]\r\nCP-12.zip,1,CP,1469,4019,47,2,\"[4018, 4019]\"\r\nCP-18.zip,1,CP,1774,3522,65,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-17.zip,2,NCP,472,2100,63,2,\"[2099, 2100]\"\r\nNormal-14.zip,0,Normal,2069,524,81,1,[524]\r\nCP-18.zip,1,CP,1774,3529,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-27.zip,2,NCP,1031,2601,216,2,\"[2601, 2602]\"\r\nNCP-22.zip,2,NCP,857,2378,53,1,[2378]\r\nNormal-3.zip,0,Normal,1767,1156,139,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-24.zip,0,Normal,2641,151,41,1,[151]\r\nNCP-9.zip,2,NCP,2696,2662,44,1,[2662]\r\nCP-17.zip,1,CP,1620,4308,24,1,[4308]\r\nNCP-4.zip,2,NCP,149,1443,66,2,\"[1442, 1443]\"\r\nCP-13.zip,1,CP,1488,4064,158,3,\"[4064, 4065, 4066]\"\r\nNormal-22.zip,0,Normal,2315,770,82,1,[770]\r\nNCP-12.zip,2,NCP,316,1780,58,2,\"[1779, 1780]\"\r\nCP-9.zip,1,CP,1360,3767,67,3,\"[3767, 3768, 3769]\"\r\nNCP-18.zip,2,NCP,512,2181,62,2,\"[2180, 2181]\"\r\nNCP-20.zip,2,NCP,547,2252,66,2,\"[2251, 2252]\"\r\nNormal-10.zip,0,Normal,1942,397,81,1,[397]\r\nNCP-5.zip,2,NCP,198,1541,60,2,\"[1540, 1541]\"\r\nNCP-6.zip,2,NCP,199,1542,138,2,\"[1542, 1543]\"\r\nCP-17.zip,1,CP,1631,4319,23,1,[4319]\r\nNCP-13.zip,2,NCP,353,1858,69,2,\"[1857, 1858]\"\r\nNCP-17.zip,2,NCP,463,2081,60,2,\"[2080, 2081]\"\r\nNCP-1.zip,2,NCP,1019,2585,363,1,[2585]\r\nNCP-22.zip,2,NCP,845,2362,48,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-15.zip,2,NCP,425,2004,58,2,\"[2003, 2004]\"\r\nNCP-28.zip,2,NCP,873,2405,52,2,\"[2405, 2406]\"\r\nNCP-4.zip,2,NCP,152,1448,145,2,\"[1448, 1449]\"\r\nNCP-19.zip,2,NCP,543,2244,54,2,\"[2243, 2244]\"\r\nNormal-14.zip,0,Normal,2062,517,84,1,[517]\r\nNCP-17.zip,2,NCP,465,2086,61,3,\"[2084, 2085, 2086]\"\r\nNormal-25.zip,0,Normal,3717,5347,25,1,[5347]\r\nCP-4.zip,1,CP,1178,3396,133,1,[3396]\r\nCP-22.zip,1,CP,620,2982,64,1,[2982]\r\nNormal-1.zip,0,Normal,1677,825,65,4,\"[823, 824, 825, 826]\"\r\nNormal-9.zip,0,Normal,1908,363,81,1,[363]\r\nCP-30.zip,1,CP,3940,5646,33,1,[5646]\r\nNCP-30.zip,2,NCP,942,2485,45,1,[2485]\r\nCP-18.zip,1,CP,1781,3578,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-9.zip,1,CP,1358,3762,126,3,\"[3761, 3762, 3763]\"\r\nCP-27.zip,1,CP,3764,5708,23,1,[5708]\r\nNCP-8.zip,2,NCP,2673,2692,48,1,[2692]\r\nNCP-19.zip,2,NCP,534,2226,49,2,\"[2225, 2226]\"\r\nCP-11.zip,1,CP,1440,3950,51,3,\"[3948, 3949, 3950]\"\r\nNCP-17.zip,2,NCP,465,2084,145,3,\"[2084, 2085, 2086]\"\r\nNCP-19.zip,2,NCP,522,2200,137,2,\"[2200, 2201]\"\r\nCP-12.zip,1,CP,1468,4015,54,3,\"[4015, 4016, 4017]\"\r\nCP-13.zip,1,CP,1498,4095,60,2,\"[4095, 4096]\"\r\nCP-18.zip,1,CP,1778,3548,65,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNormal-1.zip,0,Normal,1670,791,66,6,\"[787, 788, 789, 790, 791, 792]\"\r\nCP-16.zip,1,CP,1611,4299,19,1,[4299]\r\nNormal-14.zip,0,Normal,2080,535,100,1,[535]\r\nNCP-25.zip,2,NCP,3968,5477,44,1,[5477]\r\nNormal-3.zip,0,Normal,755,190,107,1,[190]\r\nNormal-16.zip,0,Normal,2151,606,93,1,[606]\r\nNCP-4.zip,2,NCP,168,1481,58,2,\"[1480, 1481]\"\r\nNormal-21.zip,0,Normal,2289,744,77,1,[744]\r\nNCP-6.zip,2,NCP,224,1593,57,2,\"[1592, 1593]\"\r\nCP-13.zip,1,CP,1502,4103,73,2,\"[4102, 4103]\"\r\nNCP-22.zip,2,NCP,865,2390,34,2,\"[2390, 2391]\"\r\nCP-28.zip,1,CP,3787,5731,27,1,[5731]\r\nNCP-5.zip,2,NCP,170,1484,141,2,\"[1484, 1485]\"\r\nNormal-20.zip,0,Normal,2271,726,81,1,[726]\r\nNCP-7.zip,2,NCP,2485,2644,46,1,[2644]\r\nNCP-17.zip,2,NCP,475,2106,63,2,\"[2105, 2106]\"\r\nNCP-21.zip,2,NCP,74,1281,127,2,\"[1281, 1282]\"\r\nCP-13.zip,1,CP,1507,4113,62,2,\"[4113, 4114]\"\r\nCP-18.zip,1,CP,1781,3570,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-12.zip,1,CP,1462,4002,193,3,\"[4002, 4003, 4004]\"\r\nNormal-1.zip,0,Normal,1672,796,76,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-12.zip,2,NCP,337,1822,139,2,\"[1822, 1823]\"\r\nCP-9.zip,1,CP,1353,3749,60,3,\"[3748, 3749, 3750]\"\r\nNormal-15.zip,0,Normal,2087,542,83,1,[542]\r\nNCP-12.zip,2,NCP,331,1811,66,2,\"[1810, 1811]\"\r\nCP-22.zip,1,CP,617,2979,110,1,[2979]\r\nCP-18.zip,1,CP,1771,3518,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-26.zip,1,CP,3730,5668,212,2,\"[5668, 5669]\"\r\nNormal-24.zip,0,Normal,2660,170,38,1,[170]\r\nNormal-11.zip,0,Normal,1967,422,97,1,[422]\r\nNCP-4.zip,2,NCP,149,1442,159,2,\"[1442, 1443]\"\r\nCP-30.zip,1,CP,3834,5778,26,1,[5778]\r\nNCP-19.zip,2,NCP,540,2237,127,2,\"[2237, 2238]\"\r\nNormal-26.zip,0,Normal,3862,5374,188,1,[5374]\r\nNormal-7.zip,0,Normal,1842,297,77,1,[297]\r\nNormal-26.zip,0,Normal,3868,5380,30,1,[5380]\r\nNormal-12.zip,0,Normal,2003,458,85,1,[458]\r\nNCP-5.zip,2,NCP,17,1167,58,2,\"[1166, 1167]\"\r\nNCP-2.zip,2,NCP,117,1375,130,2,\"[1375, 1376]\"\r\nCP-13.zip,1,CP,1511,4124,57,2,\"[4123, 4124]\"\r\nCP-18.zip,1,CP,1778,3546,66,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-19.zip,2,NCP,529,2217,58,3,\"[2214, 2215, 2217]\"\r\nCP-14.zip,1,CP,1520,4143,57,3,\"[4143, 4144, 4145]\"\r\nNormal-16.zip,0,Normal,2131,586,95,1,[586]\r\nNCP-28.zip,2,NCP,873,2406,228,2,\"[2405, 2406]\"\r\nNCP-3.zip,2,NCP,137,1418,126,2,\"[1418, 1419]\"\r\nNCP-10.zip,2,NCP,279,1705,58,2,\"[1704, 1705]\"\r\nCP-28.zip,1,CP,3796,5740,28,1,[5740]\r\nNCP-19.zip,2,NCP,54,1241,147,2,\"[1241, 1242]\"\r\nCP-28.zip,1,CP,3768,5712,19,1,[5712]\r\nNCP-2.zip,2,NCP,120,1382,58,2,\"[1381, 1382]\"\r\nCP-16.zip,1,CP,1603,4291,22,1,[4291]\r\nCP-2.zip,1,CP,1118,3336,173,1,[3336]\r\nNCP-30.zip,2,NCP,939,2482,49,1,[2482]\r\nNormal-8.zip,0,Normal,1874,329,90,1,[329]\r\nNormal-3.zip,0,Normal,746,181,110,1,[181]\r\nCP-21.zip,1,CP,608,2970,86,1,[2970]\r\nNormal-22.zip,0,Normal,2601,111,37,1,[111]\r\nNCP-4.zip,2,NCP,16,1165,48,2,\"[1164, 1165]\"\r\nNCP-1.zip,2,NCP,1036,2607,441,1,[2607]\r\nNCP-19.zip,2,NCP,528,2213,59,2,\"[2212, 2213]\"\r\nNCP-6.zip,2,NCP,217,1579,58,2,\"[1578, 1579]\"\r\nCP-10.zip,1,CP,1416,3897,58,2,\"[3897, 3898]\"\r\nCP-30.zip,1,CP,4043,5592,41,1,[5592]\r\nCP-30.zip,1,CP,3933,5636,69,2,\"[5636, 5637]\"\r\nCP-20.zip,1,CP,2667,3246,24,3,\"[3246, 3247, 3248]\"\r\nNormal-1.zip,0,Normal,1677,824,64,4,\"[823, 824, 825, 826]\"\r\nCP-18.zip,1,CP,1779,3552,59,2,\"[3551, 3552]\"\r\nNormal-25.zip,0,Normal,3855,5367,209,1,[5367]\r\nCP-24.zip,1,CP,691,3053,72,1,[3053]\r\nCP-6.zip,1,CP,1239,3457,134,1,[3457]\r\nCP-21.zip,1,CP,602,2964,84,1,[2964]\r\nNCP-1.zip,2,NCP,105,1348,61,2,\"[1347, 1348]\"\r\nCP-3.zip,1,CP,1151,3369,158,1,[3369]\r\nNCP-15.zip,2,NCP,413,1975,110,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-8.zip,1,CP,1327,3691,64,3,\"[3690, 3691, 3692]\"\r\nCP-6.zip,1,CP,1237,3455,178,1,[3455]\r\nNormal-11.zip,0,Normal,1959,414,97,1,[414]\r\nNormal-25.zip,0,Normal,3713,5343,27,1,[5343]\r\nCP-21.zip,1,CP,597,2959,305,1,[2959]\r\nCP-9.zip,1,CP,1356,3757,60,2,\"[3756, 3757]\"\r\nNCP-7.zip,2,NCP,2483,2686,40,1,[2686]\r\nNCP-27.zip,2,NCP,1048,2620,58,2,\"[2620, 2621]\"\r\nNormal-3.zip,0,Normal,1767,1159,28,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-5.zip,1,CP,1219,3437,179,1,[3437]\r\nNCP-4.zip,2,NCP,145,1434,139,2,\"[1434, 1435]\"\r\nCP-15.zip,1,CP,1575,4263,20,1,[4263]\r\nNCP-18.zip,2,NCP,516,2188,135,2,\"[2188, 2189]\"\r\nCP-9.zip,1,CP,1360,3768,67,3,\"[3767, 3768, 3769]\"\r\nCP-13.zip,1,CP,1488,4065,66,3,\"[4064, 4065, 4066]\"\r\nCP-1.zip,1,CP,1077,3122,74,2,\"[3121, 3122]\"\r\nNormal-14.zip,0,Normal,2084,539,92,1,[539]\r\nNormal-3.zip,0,Normal,1767,1163,76,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-2.zip,0,Normal,1746,1063,68,2,\"[1063, 1064]\"\r\nNCP-12.zip,2,NCP,332,1812,167,2,\"[1812, 1813]\"\r\nNormal-12.zip,0,Normal,1990,445,97,1,[445]\r\nCP-7.zip,1,CP,1301,3599,294,4,\"[3598, 3599, 3600, 3601]\"\r\nCP-1.zip,1,CP,1070,3112,104,1,[3112]\r\nCP-13.zip,1,CP,1493,4082,53,3,\"[4080, 4081, 4082]\"\r\nNCP-19.zip,2,NCP,520,2196,129,2,\"[2196, 2197]\"\r\nNCP-3.zip,2,NCP,137,1419,53,2,\"[1418, 1419]\"\r\nNCP-30.zip,2,NCP,937,2479,22,1,[2479]\r\nNCP-22.zip,2,NCP,865,2391,260,2,\"[2390, 2391]\"\r\nNCP-7.zip,2,NCP,230,1605,58,2,\"[1604, 1605]\"\r\nCP-7.zip,1,CP,1302,3603,207,4,\"[3602, 3603, 3604, 3605]\"\r\nCP-16.zip,1,CP,1588,4276,20,1,[4276]\r\nNormal-18.zip,0,Normal,2195,650,79,1,[650]\r\nNormal-17.zip,0,Normal,2173,628,96,1,[628]\r\nNCP-22.zip,2,NCP,878,2413,117,2,\"[2412, 2413]\"\r\nNormal-18.zip,0,Normal,2188,643,88,1,[643]\r\nCP-18.zip,1,CP,1774,3526,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-6.zip,0,Normal,1815,270,91,1,[270]\r\nCP-5.zip,1,CP,1208,3426,321,1,[3426]\r\nNCP-6.zip,2,NCP,22,1176,68,2,\"[1175, 1176]\"\r\nNCP-15.zip,2,NCP,413,1979,54,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-18.zip,1,CP,1771,3521,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-4.zip,1,CP,1172,3390,195,1,[3390]\r\nCP-26.zip,1,CP,3721,5655,206,2,\"[5654, 5655]\"\r\nCP-27.zip,1,CP,3754,5698,21,1,[5698]\r\nCP-19.zip,1,CP,1784,3592,69,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-9.zip,1,CP,1372,3799,49,3,\"[3797, 3798, 3799]\"\r\nNCP-2.zip,2,NCP,113,1367,137,2,\"[1367, 1368]\"\r\nNormal-22.zip,0,Normal,2318,773,105,1,[773]\r\nCP-18.zip,1,CP,1770,3517,57,1,[3517]\r\nNormal-21.zip,0,Normal,2293,748,88,1,[748]\r\nNormal-22.zip,0,Normal,2595,105,43,1,[105]\r\nNCP-18.zip,2,NCP,50,1234,59,2,\"[1233, 1234]\"\r\nNormal-2.zip,0,Normal,1757,1106,71,4,\"[1105, 1106, 1107, 1108]\"\r\nCP-8.zip,1,CP,1327,3692,64,3,\"[3690, 3691, 3692]\"\r\nCP-18.zip,1,CP,1781,3577,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-30.zip,1,CP,3932,5635,67,2,\"[5634, 5635]\"\r\nNCP-20.zip,2,NCP,566,2291,67,2,\"[2290, 2291]\"\r\nNCP-10.zip,2,NCP,2715,2708,51,1,[2708]\r\nCP-23.zip,1,CP,660,3022,82,1,[3022]\r\nNormal-9.zip,0,Normal,1916,371,106,1,[371]\r\nCP-20.zip,1,CP,2757,3288,211,1,[3288]\r\nNormal-7.zip,0,Normal,1845,300,99,1,[300]\r\nNormal-13.zip,0,Normal,2050,505,74,1,[505]\r\nCP-1.zip,1,CP,1092,3310,216,1,[3310]\r\nNormal-2.zip,0,Normal,1763,1135,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-9.zip,0,Normal,1898,353,72,1,[353]\r\nNCP-21.zip,2,NCP,576,2310,124,1,[2310]\r\nNormal-1.zip,0,Normal,1701,956,70,2,\"[955, 956]\"\r\nNormal-17.zip,0,Normal,2178,633,85,1,[633]\r\nCP-8.zip,1,CP,1322,3681,56,2,\"[3680, 3681]\"\r\nNormal-6.zip,0,Normal,1802,257,107,1,[257]\r\nNCP-20.zip,2,NCP,547,2251,159,2,\"[2251, 2252]\"\r\nNCP-3.zip,2,NCP,1285,2726,66,1,[2726]\r\nNormal-7.zip,0,Normal,1828,283,96,1,[283]\r\nNCP-20.zip,2,NCP,546,2250,57,2,\"[2249, 2250]\"\r\nNormal-2.zip,0,Normal,1750,1078,69,3,\"[1074, 1077, 1078]\"\r\nNormal-9.zip,0,Normal,1892,347,77,1,[347]\r\nNCP-19.zip,2,NCP,534,2225,115,2,\"[2225, 2226]\"\r\nCP-29.zip,1,CP,3806,5750,20,1,[5750]\r\nNCP-13.zip,2,NCP,355,1861,125,2,\"[1861, 1862]\"\r\nNormal-6.zip,0,Normal,1813,268,80,1,[268]\r\nNormal-2.zip,0,Normal,1756,1101,66,4,\"[1101, 1102, 1103, 1104]\"\r\nCP-20.zip,1,CP,2759,3290,36,1,[3290]\r\nNormal-17.zip,0,Normal,2183,638,110,1,[638]\r\nNCP-6.zip,2,NCP,228,1601,67,2,\"[1600, 1601]\"\r\nNCP-5.zip,2,NCP,197,1539,53,2,\"[1538, 1539]\"\r\nCP-28.zip,1,CP,3766,5710,24,1,[5710]\r\nCP-10.zip,1,CP,1399,3858,45,2,\"[3858, 3859]\"\r\nNormal-14.zip,0,Normal,2074,529,82,1,[529]\r\nNormal-2.zip,0,Normal,1733,1026,71,2,\"[1026, 1027]\"\r\nNCP-11.zip,2,NCP,300,1747,58,2,\"[1746, 1747]\"\r\nCP-17.zip,1,CP,1650,4338,31,1,[4338]\r\nCP-20.zip,1,CP,2455,2937,116,1,[2937]\r\nNormal-20.zip,0,Normal,2279,734,78,1,[734]\r\nCP-8.zip,1,CP,1329,3697,45,3,\"[3695, 3696, 3697]\"\r\nNCP-16.zip,2,NCP,444,2042,146,2,\"[2042, 2043]\"\r\nNormal-12.zip,0,Normal,1999,454,78,1,[454]\r\nCP-17.zip,1,CP,1624,4312,20,1,[4312]\r\nNCP-10.zip,2,NCP,2720,2676,45,1,[2676]\r\nCP-2.zip,1,CP,1107,3325,183,1,[3325]\r\nCP-18.zip,1,CP,1777,3542,62,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-15.zip,2,NCP,403,1955,110,2,\"[1955, 1956]\"\r\nNCP-3.zip,2,NCP,134,1413,54,2,\"[1412, 1413]\"\r\nCP-13.zip,1,CP,1500,4099,97,1,[4099]\r\nCP-25.zip,1,CP,712,3074,118,1,[3074]\r\nCP-23.zip,1,CP,648,3010,104,1,[3010]\r\nCP-19.zip,1,CP,2443,2917,98,3,\"[2915, 2916, 2917]\"\r\nNCP-16.zip,2,NCP,441,2036,115,2,\"[2036, 2037]\"\r\nNormal-23.zip,0,Normal,2628,138,34,1,[138]\r\nCP-19.zip,1,CP,1795,3596,41,2,\"[3596, 3597]\"\r\nNCP-27.zip,2,NCP,1016,2580,20,3,\"[2580, 2581, 2582]\"\r\nNormal-24.zip,0,Normal,2659,169,39,1,[169]\r\nCP-17.zip,1,CP,1619,4307,29,1,[4307]\r\nNormal-26.zip,0,Normal,3861,5373,211,1,[5373]\r\nNCP-19.zip,2,NCP,519,2195,53,2,\"[2194, 2195]\"\r\nNCP-6.zip,2,NCP,213,1571,66,2,\"[1570, 1571]\"\r\nNormal-25.zip,0,Normal,3860,5372,212,1,[5372]\r\nNCP-5.zip,2,NCP,192,1529,57,2,\"[1528, 1529]\"\r\nCP-3.zip,1,CP,1153,3371,179,1,[3371]\r\nCP-3.zip,1,CP,1159,3377,287,1,[3377]\r\nNCP-30.zip,2,NCP,931,2473,21,1,[2473]\r\nCP-6.zip,1,CP,1255,3473,107,1,[3473]\r\nNCP-4.zip,2,NCP,169,1482,133,2,\"[1482, 1483]\"\r\nNCP-12.zip,2,NCP,340,1828,128,2,\"[1828, 1829]\"\r\nCP-26.zip,1,CP,3729,5665,36,3,\"[5665, 5666, 5667]\"\r\nNormal-11.zip,0,Normal,1976,431,74,1,[431]\r\nCP-9.zip,1,CP,1372,3798,49,3,\"[3797, 3798, 3799]\"\r\nNCP-4.zip,2,NCP,161,1467,57,2,\"[1466, 1467]\"\r\nCP-22.zip,1,CP,613,2975,78,1,[2975]\r\nNCP-17.zip,2,NCP,460,2074,106,2,\"[2074, 2075]\"\r\nNCP-21.zip,2,NCP,577,2311,145,2,\"[2311, 2312]\"\r\nCP-25.zip,1,CP,741,3103,523,1,[3103]\r\nCP-14.zip,1,CP,1532,4172,50,2,\"[4171, 4172]\"\r\nNCP-11.zip,2,NCP,299,1744,139,2,\"[1744, 1745]\"\r\nNCP-14.zip,2,NCP,372,1896,45,2,\"[1895, 1896]\"\r\nCP-9.zip,1,CP,1356,3756,60,2,\"[3756, 3757]\"\r\nNormal-11.zip,0,Normal,1968,423,96,1,[423]\r\nCP-14.zip,1,CP,1525,4156,60,2,\"[4155, 4156]\"\r\nCP-22.zip,1,CP,618,2980,166,1,[2980]\r\nCP-17.zip,1,CP,1639,4327,26,1,[4327]\r\nNormal-19.zip,0,Normal,2245,700,83,1,[700]\r\nCP-13.zip,1,CP,1518,4139,67,3,\"[4138, 4139, 4140]\"\r\nNCP-11.zip,2,NCP,29,1190,132,2,\"[1190, 1191]\"\r\nCP-16.zip,1,CP,1615,4303,29,1,[4303]\r\nCP-29.zip,1,CP,3823,5767,26,1,[5767]\r\nNCP-20.zip,2,NCP,574,2306,139,2,\"[2306, 2307]\"\r\nNCP-12.zip,2,NCP,340,1829,54,2,\"[1828, 1829]\"\r\nNormal-21.zip,0,Normal,2285,740,68,1,[740]\r\nNCP-16.zip,2,NCP,455,2065,56,2,\"[2064, 2065]\"\r\nNCP-16.zip,2,NCP,436,2026,61,2,\"[2025, 2026]\"\r\nNCP-14.zip,2,NCP,383,1918,139,2,\"[1918, 1919]\"\r\nNCP-30.zip,2,NCP,988,2538,287,2,\"[2538, 2539]\"\r\nNCP-7.zip,2,NCP,247,1641,66,2,\"[1640, 1641]\"\r\nCP-15.zip,1,CP,1571,4259,16,1,[4259]\r\nNormal-16.zip,0,Normal,2137,592,94,1,[592]\r\nCP-7.zip,1,CP,1304,3637,218,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nCP-6.zip,1,CP,1235,3453,155,1,[3453]\r\nNormal-4.zip,0,Normal,776,211,353,1,[211]\r\nNormal-18.zip,0,Normal,2189,644,82,1,[644]\r\nNormal-6.zip,0,Normal,1799,254,97,1,[254]\r\nNormal-15.zip,0,Normal,2113,568,93,1,[568]\r\nCP-3.zip,1,CP,1131,3349,157,1,[3349]\r\nNormal-6.zip,0,Normal,1819,274,91,1,[274]\r\nCP-18.zip,1,CP,1781,3571,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-16.zip,2,NCP,455,2064,132,2,\"[2064, 2065]\"\r\nNormal-8.zip,0,Normal,1888,343,99,1,[343]\r\nNCP-20.zip,2,NCP,571,2301,68,2,\"[2300, 2301]\"\r\nNCP-7.zip,2,NCP,247,1640,159,2,\"[1640, 1641]\"\r\nCP-3.zip,1,CP,1137,3355,147,1,[3355]\r\nCP-11.zip,1,CP,1423,3913,53,3,\"[3911, 3912, 3913]\"\r\nNCP-1.zip,2,NCP,105,1347,145,2,\"[1347, 1348]\"\r\nNCP-14.zip,2,NCP,377,1907,62,2,\"[1906, 1907]\"\r\nCP-14.zip,1,CP,1535,4178,53,2,\"[4178, 4179]\"\r\nNormal-9.zip,0,Normal,1900,355,93,1,[355]\r\nCP-2.zip,1,CP,1125,3343,115,1,[3343]\r\nCP-6.zip,1,CP,1243,3461,176,1,[3461]\r\nNCP-6.zip,2,NCP,203,1550,140,2,\"[1550, 1551]\"\r\nNormal-1.zip,0,Normal,1670,789,63,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNCP-29.zip,2,NCP,909,2451,401,1,[2451]\r\nNCP-25.zip,2,NCP,3949,5463,35,1,[5463]\r\nNormal-26.zip,0,Normal,3879,5391,28,1,[5391]\r\nNCP-11.zip,2,NCP,29,1191,56,2,\"[1190, 1191]\"\r\nCP-3.zip,1,CP,1129,3347,158,1,[3347]\r\nNCP-15.zip,2,NCP,406,1961,146,2,\"[1961, 1962]\"\r\nNCP-4.zip,2,NCP,151,1446,129,2,\"[1446, 1447]\"\r\nCP-19.zip,1,CP,1789,3206,64,4,\"[3204, 3205, 3206, 3207]\"\r\nNCP-6.zip,2,NCP,227,1599,61,2,\"[1598, 1599]\"\r\nCP-12.zip,1,CP,1462,4003,51,3,\"[4002, 4003, 4004]\"\r\nCP-3.zip,1,CP,1147,3365,164,1,[3365]\r\nNormal-23.zip,0,Normal,2629,139,36,1,[139]\r\nNormal-1.zip,0,Normal,1700,953,64,2,\"[953, 954]\"\r\nCP-15.zip,1,CP,1561,4241,49,2,\"[4241, 4242]\"\r\nNCP-16.zip,2,NCP,437,2028,60,2,\"[2027, 2028]\"\r\nCP-18.zip,1,CP,1654,4342,23,1,[4342]\r\nNormal-20.zip,0,Normal,2273,728,75,1,[728]\r\nNormal-14.zip,0,Normal,2067,522,94,1,[522]\r\nNCP-29.zip,2,NCP,911,2453,48,1,[2453]\r\nNormal-2.zip,0,Normal,1756,1104,65,4,\"[1101, 1102, 1103, 1104]\"\r\nNormal-11.zip,0,Normal,1989,444,105,1,[444]\r\nNCP-15.zip,2,NCP,403,1956,47,2,\"[1955, 1956]\"\r\nNCP-13.zip,2,NCP,348,1848,48,2,\"[1847, 1848]\"\r\nNCP-28.zip,2,NCP,844,2359,594,1,[2359]\r\nNCP-18.zip,2,NCP,51,1235,141,2,\"[1235, 1236]\"\r\nCP-28.zip,1,CP,3789,5733,26,1,[5733]\r\nNormal-2.zip,0,Normal,1763,1138,65,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-10.zip,2,NCP,278,1702,137,2,\"[1702, 1703]\"\r\nCP-28.zip,1,CP,3770,5714,23,1,[5714]\r\nNCP-23.zip,2,NCP,93,1323,66,2,\"[1322, 1323]\"\r\nNCP-14.zip,2,NCP,390,1930,126,2,\"[1930, 1931]\"\r\nNCP-8.zip,2,NCP,26,1184,82,2,\"[1184, 1185]\"\r\nNormal-2.zip,0,Normal,1763,1142,71,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-6.zip,2,NCP,201,1547,62,2,\"[1546, 1547]\"\r\nNormal-23.zip,0,Normal,2626,136,33,1,[136]\r\nNCP-25.zip,2,NCP,3707,5534,50,1,[5534]\r\nNormal-21.zip,0,Normal,2305,760,104,1,[760]\r\nNormal-6.zip,0,Normal,1818,273,87,1,[273]\r\nCP-22.zip,1,CP,641,3003,136,1,[3003]\r\nNormal-7.zip,0,Normal,1836,291,104,1,[291]\r\nNormal-27.zip,0,Normal,3894,5417,287,1,[5417]\r\nNCP-30.zip,2,NCP,981,2526,23,2,\"[2525, 2526]\"\r\nNCP-1.zip,2,NCP,102,1341,132,2,\"[1341, 1342]\"\r\nNCP-14.zip,2,NCP,387,1924,128,2,\"[1924, 1925]\"\r\nNCP-2.zip,2,NCP,117,1376,55,2,\"[1375, 1376]\"\r\nNCP-5.zip,2,NCP,190,1524,152,2,\"[1524, 1525]\"\r\nCP-26.zip,1,CP,3639,5598,241,1,[5598]\r\nNormal-1.zip,0,Normal,1670,787,58,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-2.zip,0,Normal,1757,1108,68,4,\"[1105, 1106, 1107, 1108]\"\r\nNormal-13.zip,0,Normal,2043,498,84,1,[498]\r\nCP-2.zip,1,CP,1099,3317,198,1,[3317]\r\nCP-7.zip,1,CP,1318,3673,56,1,[3673]\r\nNormal-9.zip,0,Normal,1899,354,88,1,[354]\r\nCP-12.zip,1,CP,1467,4014,60,2,\"[4013, 4014]\"\r\nNCP-5.zip,2,NCP,197,1538,124,2,\"[1538, 1539]\"\r\nCP-26.zip,1,CP,3730,5669,202,2,\"[5668, 5669]\"\r\nNCP-22.zip,2,NCP,845,2363,428,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-2.zip,2,NCP,127,1399,139,2,\"[1399, 1400]\"\r\nNormal-26.zip,0,Normal,3893,5416,63,1,[5416]\r\nNCP-8.zip,2,NCP,2669,2689,37,1,[2689]\r\nCP-18.zip,1,CP,1778,3549,64,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nCP-25.zip,1,CP,722,3084,70,1,[3084]\r\nNCP-4.zip,2,NCP,157,1458,114,2,\"[1458, 1459]\"\r\nNCP-23.zip,2,NCP,92,1320,87,2,\"[1320, 1321]\"\r\nCP-11.zip,1,CP,1424,3914,60,2,\"[3914, 3915]\"\r\nNCP-19.zip,2,NCP,529,2215,33,3,\"[2214, 2215, 2217]\"\r\nCP-24.zip,1,CP,704,3066,417,1,[3066]\r\nNCP-6.zip,2,NCP,201,1546,149,2,\"[1546, 1547]\"\r\nNormal-17.zip,0,Normal,2177,632,88,1,[632]\r\nNCP-14.zip,2,NCP,383,1919,58,2,\"[1918, 1919]\"\r\nNormal-2.zip,0,Normal,1737,1040,80,4,\"[1037, 1038, 1039, 1040]\"\r\nNormal-26.zip,0,Normal,3881,5393,22,1,[5393]\r\nNormal-3.zip,0,Normal,1767,1161,71,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-14.zip,1,CP,1525,4155,60,2,\"[4155, 4156]\"\r\nNCP-12.zip,2,NCP,341,1832,55,3,\"[1830, 1832, 1834]\"\r\nNormal-13.zip,0,Normal,2034,489,91,1,[489]\r\nNCP-26.zip,2,NCP,3978,5485,49,1,[5485]\r\nNCP-22.zip,2,NCP,864,2389,221,2,\"[2388, 2389]\"\r\nNCP-9.zip,2,NCP,2682,2652,47,1,[2652]\r\nNCP-7.zip,2,NCP,2461,2642,42,1,[2642]\r\nNormal-21.zip,0,Normal,2303,758,110,1,[758]\r\nNCP-8.zip,2,NCP,2670,2690,41,1,[2690]\r\nCP-7.zip,1,CP,1315,3666,59,2,\"[3665, 3666]\"\r\nCP-19.zip,1,CP,2449,2927,118,1,[2927]\r\nCP-19.zip,1,CP,1789,3204,59,4,\"[3204, 3205, 3206, 3207]\"\r\nNormal-6.zip,0,Normal,1803,258,100,1,[258]\r\nNormal-1.zip,0,Normal,1675,812,73,1,[812]\r\nNCP-25.zip,2,NCP,3705,5532,63,1,[5532]\r\nNormal-1.zip,0,Normal,1727,1010,63,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-3.zip,2,NCP,1283,2724,70,1,[2724]\r\nCP-18.zip,1,CP,1774,3524,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nCP-18.zip,1,CP,1774,3525,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nCP-30.zip,1,CP,3919,5543,66,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-22.zip,2,NCP,85,1304,58,2,\"[1303, 1304]\"\r\nNormal-18.zip,0,Normal,2192,647,79,1,[647]\r\nCP-30.zip,1,CP,3935,5641,70,1,[5641]\r\nNCP-6.zip,2,NCP,227,1598,146,2,\"[1598, 1599]\"\r\nNormal-20.zip,0,Normal,2250,705,76,1,[705]\r\nCP-12.zip,1,CP,1464,4008,63,2,\"[4007, 4008]\"\r\nCP-29.zip,1,CP,3807,5751,20,1,[5751]\r\nNormal-12.zip,0,Normal,1993,448,97,1,[448]\r\nNCP-19.zip,2,NCP,528,2212,140,2,\"[2212, 2213]\"\r\nNCP-26.zip,2,NCP,3987,5511,60,1,[5511]\r\nNCP-25.zip,2,NCP,3969,5478,50,1,[5478]\r\nCP-17.zip,1,CP,1638,4326,25,1,[4326]\r\nCP-17.zip,1,CP,1643,4331,24,1,[4331]\r\nCP-17.zip,1,CP,1629,4317,23,1,[4317]\r\nCP-11.zip,1,CP,1423,3912,53,3,\"[3911, 3912, 3913]\"\r\nNormal-2.zip,0,Normal,1743,1056,73,2,\"[1056, 1057]\"\r\nNormal-9.zip,0,Normal,1915,370,91,1,[370]\r\nNormal-22.zip,0,Normal,2590,100,41,1,[100]\r\nNCP-11.zip,2,NCP,297,1741,60,2,\"[1739, 1741]\"\r\nCP-30.zip,1,CP,3919,5545,70,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-25.zip,2,NCP,3971,5480,50,1,[5480]\r\nCP-11.zip,1,CP,1454,3983,53,3,\"[3982, 3983, 3984]\"\r\nNormal-21.zip,0,Normal,2282,737,69,1,[737]\r\nNCP-12.zip,2,NCP,318,1783,150,2,\"[1783, 1784]\"\r\nNCP-10.zip,2,NCP,279,1704,139,2,\"[1704, 1705]\"\r\nCP-2.zip,1,CP,1108,3326,135,1,[3326]\r\nNormal-2.zip,0,Normal,1733,1027,71,2,\"[1026, 1027]\"\r\nCP-32.zip,1,CP,1781,3567,67,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,2725,2681,51,1,[2681]\r\nCP-11.zip,1,CP,1425,3917,49,3,\"[3916, 3917, 3918]\"\r\nNormal-1.zip,0,Normal,1701,955,70,2,\"[955, 956]\"\r\nCP-19.zip,1,CP,1787,3195,59,1,[3195]\r\n"
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_fold0_valid.csv",
    "content": "zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids\r\nNormal-12.zip,0,Normal,2015,470,94,1,[470]\r\nNCP-6.zip,2,NCP,206,1557,58,2,\"[1556, 1557]\"\r\nCP-1.zip,1,CP,1096,3314,196,1,[3314]\r\nNCP-16.zip,2,NCP,43,1220,65,2,\"[1219, 1220]\"\r\nNCP-18.zip,2,NCP,499,2155,58,2,\"[2154, 2155]\"\r\nCP-10.zip,1,CP,1409,3881,66,2,\"[3881, 3882]\"\r\nNormal-4.zip,0,Normal,777,212,83,1,[212]\r\nNCP-9.zip,2,NCP,2708,2701,59,1,[2701]\r\nCP-11.zip,1,CP,1432,3933,60,2,\"[3932, 3933]\"\r\nNCP-4.zip,2,NCP,141,1426,129,2,\"[1426, 1427]\"\r\nCP-23.zip,1,CP,673,3035,76,1,[3035]\r\nNCP-29.zip,2,NCP,879,2414,173,1,[2414]\r\nNCP-19.zip,2,NCP,536,2229,145,2,\"[2229, 2230]\"\r\nNCP-18.zip,2,NCP,504,2165,65,2,\"[2164, 2165]\"\r\nNormal-1.zip,0,Normal,1678,829,34,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-8.zip,2,NCP,264,1674,179,2,\"[1674, 1675]\"\r\nNCP-4.zip,2,NCP,155,1454,139,2,\"[1454, 1455]\"\r\nCP-11.zip,1,CP,1418,3900,180,3,\"[3900, 3901, 3902]\"\r\nNCP-5.zip,2,NCP,194,1532,133,2,\"[1532, 1533]\"\r\nNCP-13.zip,2,NCP,361,1873,143,2,\"[1873, 1874]\"\r\nNormal-1.zip,0,Normal,1710,976,78,2,\"[975, 976]\"\r\nNormal-15.zip,0,Normal,2091,546,106,1,[546]\r\nNCP-19.zip,2,NCP,518,2192,135,2,\"[2192, 2193]\"\r\nNormal-18.zip,0,Normal,2190,645,90,1,[645]\r\nNormal-12.zip,0,Normal,2013,468,87,1,[468]\r\nNCP-11.zip,2,NCP,302,1751,62,2,\"[1750, 1751]\"\r\nNormal-15.zip,0,Normal,2109,564,103,1,[564]\r\nNCP-8.zip,2,NCP,264,1675,75,2,\"[1674, 1675]\"\r\nCP-23.zip,1,CP,653,3015,285,1,[3015]\r\nNCP-7.zip,2,NCP,235,1615,139,2,\"[1615, 1616]\"\r\nCP-19.zip,1,CP,1786,3194,77,3,\"[3192, 3193, 3194]\"\r\nCP-1.zip,1,CP,0,3137,37,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-15.zip,2,NCP,423,1999,133,2,\"[1999, 2000]\"\r\nCP-6.zip,1,CP,1232,3450,91,1,[3450]\r\nCP-14.zip,1,CP,1526,4158,51,3,\"[4157, 4158, 4159]\"\r\nCP-4.zip,1,CP,1184,3402,193,1,[3402]\r\nNCP-17.zip,2,NCP,483,2122,56,2,\"[2121, 2122]\"\r\nCP-12.zip,1,CP,1459,3996,69,3,\"[3995, 3996, 3997]\"\r\nCP-17.zip,1,CP,1637,4325,20,1,[4325]\r\nCP-10.zip,1,CP,1411,3885,66,2,\"[3885, 3886]\"\r\nNCP-9.zip,2,NCP,2707,2673,44,1,[2673]\r\nNCP-29.zip,2,NCP,892,2431,20,1,[2431]\r\nCP-26.zip,1,CP,3720,5653,243,2,\"[5652, 5653]\"\r\nNormal-13.zip,0,Normal,2023,478,96,1,[478]\r\nCP-11.zip,1,CP,1439,3947,62,2,\"[3946, 3947]\"\r\nNormal-6.zip,0,Normal,1801,256,89,1,[256]\r\nNCP-16.zip,2,NCP,442,2038,131,2,\"[2038, 2039]\"\r\nNormal-9.zip,0,Normal,1920,375,100,1,[375]\r\nCP-13.zip,1,CP,1489,4067,457,4,\"[4067, 4068, 4069, 4070]\"\r\nCP-9.zip,1,CP,1378,3811,50,2,\"[3810, 3811]\"\r\nNCP-12.zip,2,NCP,336,1821,50,2,\"[1820, 1821]\"\r\nNCP-3.zip,2,NCP,1295,2736,61,1,[2736]\r\nNormal-20.zip,0,Normal,2268,723,85,1,[723]\r\nNormal-20.zip,0,Normal,2281,736,84,1,[736]\r\nCP-1.zip,1,CP,1083,3128,71,2,\"[3128, 3129]\"\r\nCP-14.zip,1,CP,1545,4207,65,2,\"[4206, 4207]\"\r\nNormal-21.zip,0,Normal,2306,761,103,1,[761]\r\nNCP-13.zip,2,NCP,350,1852,47,2,\"[1851, 1852]\"\r\nCP-8.zip,1,CP,1326,3688,53,2,\"[3688, 3689]\"\r\nNCP-7.zip,2,NCP,236,1617,283,2,\"[1617, 1618]\"\r\nNormal-1.zip,0,Normal,1722,1001,73,2,\"[1001, 1002]\"\r\nNCP-5.zip,2,NCP,177,1498,139,2,\"[1498, 1499]\"\r\nNormal-1.zip,0,Normal,1708,971,74,2,\"[971, 972]\"\r\nNCP-8.zip,2,NCP,2680,2651,46,1,[2651]\r\nNCP-20.zip,2,NCP,570,2298,139,2,\"[2298, 2299]\"\r\nNormal-1.zip,0,Normal,1723,1004,77,2,\"[1003, 1004]\"\r\nNCP-10.zip,2,NCP,2723,2679,40,1,[2679]\r\nNormal-21.zip,0,Normal,2302,757,96,1,[757]\r\nNormal-18.zip,0,Normal,2199,654,85,1,[654]\r\nNormal-25.zip,0,Normal,3858,5370,234,1,[5370]\r\nNormal-21.zip,0,Normal,2286,741,84,1,[741]\r\nNormal-1.zip,0,Normal,1720,995,74,2,\"[995, 996]\"\r\nNormal-3.zip,0,Normal,769,204,138,1,[204]\r\nNCP-9.zip,2,NCP,2687,2654,51,1,[2654]\r\nNormal-16.zip,0,Normal,2124,579,101,1,[579]\r\nNCP-6.zip,2,NCP,206,1556,139,2,\"[1556, 1557]\"\r\nNormal-20.zip,0,Normal,2256,711,86,1,[711]\r\nCP-10.zip,1,CP,1411,3886,66,2,\"[3885, 3886]\"\r\nCP-11.zip,1,CP,1418,3901,54,3,\"[3900, 3901, 3902]\"\r\nNCP-4.zip,2,NCP,155,1455,58,2,\"[1454, 1455]\"\r\nNCP-19.zip,2,NCP,536,2230,61,2,\"[2229, 2230]\"\r\nCP-13.zip,1,CP,1489,4068,229,4,\"[4067, 4068, 4069, 4070]\"\r\nNormal-1.zip,0,Normal,1722,1002,73,2,\"[1001, 1002]\"\r\nCP-14.zip,1,CP,1526,4157,124,3,\"[4157, 4158, 4159]\"\r\nCP-13.zip,1,CP,1489,4069,58,4,\"[4067, 4068, 4069, 4070]\"\r\nCP-1.zip,1,CP,0,3134,37,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-5.zip,2,NCP,177,1499,58,2,\"[1498, 1499]\"\r\nNCP-13.zip,2,NCP,350,1851,109,2,\"[1851, 1852]\"\r\nNormal-1.zip,0,Normal,1678,827,58,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-1.zip,1,CP,1083,3129,71,2,\"[3128, 3129]\"\r\nCP-1.zip,1,CP,0,3140,269,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-9.zip,1,CP,1378,3810,50,2,\"[3810, 3811]\"\r\nCP-8.zip,1,CP,1326,3689,53,2,\"[3688, 3689]\"\r\nCP-1.zip,1,CP,0,3133,290,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-4.zip,2,NCP,141,1427,54,2,\"[1426, 1427]\"\r\nNormal-1.zip,0,Normal,1723,1003,77,2,\"[1003, 1004]\"\r\nNCP-15.zip,2,NCP,423,2000,56,2,\"[1999, 2000]\"\r\nNCP-11.zip,2,NCP,302,1750,152,2,\"[1750, 1751]\"\r\nNCP-20.zip,2,NCP,570,2299,58,2,\"[2298, 2299]\"\r\nCP-12.zip,1,CP,1459,3995,164,3,\"[3995, 3996, 3997]\"\r\nNCP-16.zip,2,NCP,442,2039,53,2,\"[2038, 2039]\"\r\nCP-1.zip,1,CP,0,3136,290,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-1.zip,1,CP,0,3135,269,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-14.zip,1,CP,1526,4159,51,3,\"[4157, 4158, 4159]\"\r\nCP-1.zip,1,CP,0,3131,285,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-12.zip,2,NCP,336,1820,117,2,\"[1820, 1821]\"\r\nNCP-7.zip,2,NCP,235,1616,58,2,\"[1615, 1616]\"\r\nCP-11.zip,1,CP,1418,3902,54,3,\"[3900, 3901, 3902]\"\r\nNCP-7.zip,2,NCP,236,1618,119,2,\"[1617, 1618]\"\r\nCP-11.zip,1,CP,1439,3946,62,2,\"[3946, 3947]\"\r\nCP-1.zip,1,CP,0,3139,39,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-1.zip,1,CP,0,3132,42,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNormal-1.zip,0,Normal,1708,972,74,2,\"[971, 972]\"\r\nCP-12.zip,1,CP,1459,3997,69,3,\"[3995, 3996, 3997]\"\r\nNCP-18.zip,2,NCP,504,2164,155,2,\"[2164, 2165]\"\r\nNormal-1.zip,0,Normal,1720,996,74,2,\"[995, 996]\"\r\nCP-19.zip,1,CP,1786,3192,81,3,\"[3192, 3193, 3194]\"\r\nNormal-1.zip,0,Normal,1678,830,34,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-26.zip,1,CP,3720,5652,48,2,\"[5652, 5653]\"\r\nCP-19.zip,1,CP,1786,3193,81,3,\"[3192, 3193, 3194]\"\r\nCP-13.zip,1,CP,1489,4070,58,4,\"[4067, 4068, 4069, 4070]\"\r\nNCP-13.zip,2,NCP,361,1874,60,2,\"[1873, 1874]\"\r\nNCP-17.zip,2,NCP,483,2121,137,2,\"[2121, 2122]\"\r\nNormal-1.zip,0,Normal,1678,832,62,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-16.zip,2,NCP,43,1219,156,2,\"[1219, 1220]\"\r\nNCP-18.zip,2,NCP,499,2154,139,2,\"[2154, 2155]\"\r\nCP-10.zip,1,CP,1409,3882,66,2,\"[3881, 3882]\"\r\nNCP-5.zip,2,NCP,194,1533,56,2,\"[1532, 1533]\"\r\nNCP-19.zip,2,NCP,518,2193,57,2,\"[2192, 2193]\"\r\nCP-11.zip,1,CP,1432,3932,60,2,\"[3932, 3933]\"\r\nNormal-1.zip,0,Normal,1678,828,58,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNormal-1.zip,0,Normal,1678,831,62,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-1.zip,1,CP,0,3138,245,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNormal-1.zip,0,Normal,1710,975,78,2,\"[975, 976]\"\r\nCP-14.zip,1,CP,1545,4206,65,2,\"[4206, 4207]\"\r\nNCP-5.zip,2,NCP,18,1169,57,2,\"[1168, 1169]\"\r\nNormal-15.zip,0,Normal,2096,551,93,1,[551]\r\nCP-21.zip,1,CP,2776,3307,31,1,[3307]\r\nNCP-16.zip,2,NCP,449,2053,61,2,\"[2052, 2053]\"\r\nNCP-15.zip,2,NCP,404,1958,46,2,\"[1957, 1958]\"\r\nNCP-6.zip,2,NCP,210,1565,55,2,\"[1564, 1565]\"\r\nCP-3.zip,1,CP,1144,3362,159,1,[3362]\r\nNormal-8.zip,0,Normal,1879,334,88,1,[334]\r\nNormal-1.zip,0,Normal,1721,1000,75,4,\"[1000, 997, 998, 999]\"\r\nNCP-21.zip,2,NCP,583,2323,147,2,\"[2323, 2324]\"\r\nNCP-1.zip,2,NCP,1039,2610,45,1,[2610]\r\nNormal-8.zip,0,Normal,1882,337,86,1,[337]\r\nNormal-21.zip,0,Normal,2307,762,80,1,[762]\r\nCP-14.zip,1,CP,1528,4163,61,2,\"[4163, 4164]\"\r\nCP-11.zip,1,CP,1443,3958,58,3,\"[3957, 3958, 3959]\"\r\nNCP-18.zip,2,NCP,496,2149,70,2,\"[2148, 2149]\"\r\nCP-7.zip,1,CP,1270,3489,204,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-7.zip,0,Normal,1834,289,82,1,[289]\r\nNCP-13.zip,2,NCP,351,1853,145,2,\"[1853, 1854]\"\r\nCP-18.zip,1,CP,1782,3584,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1676,816,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-11.zip,1,CP,1428,3923,221,3,\"[3923, 3924, 3925]\"\r\nCP-8.zip,1,CP,1330,3699,58,3,\"[3698, 3699, 3700]\"\r\nNormal-19.zip,0,Normal,2233,688,76,1,[688]\r\nNCP-18.zip,2,NCP,514,2184,160,2,\"[2184, 2185]\"\r\nNormal-6.zip,0,Normal,1804,259,102,1,[259]\r\nNormal-22.zip,0,Normal,2598,108,38,1,[108]\r\nCP-14.zip,1,CP,1534,4176,58,2,\"[4176, 4177]\"\r\nCP-5.zip,1,CP,1217,3435,320,1,[3435]\r\nNCP-14.zip,2,NCP,378,1908,168,2,\"[1908, 1909]\"\r\nCP-18.zip,1,CP,1782,3582,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-25.zip,2,NCP,3963,5474,56,1,[5474]\r\nNCP-22.zip,2,NCP,82,1298,55,2,\"[1297, 1298]\"\r\nNCP-2.zip,2,NCP,1274,2715,55,1,[2715]\r\nCP-22.zip,1,CP,619,2981,102,1,[2981]\r\nNormal-24.zip,0,Normal,2661,171,31,1,[171]\r\nCP-14.zip,1,CP,1540,4192,58,3,\"[4191, 4192, 4193]\"\r\nNCP-10.zip,2,NCP,2724,2680,43,1,[2680]\r\nNormal-2.zip,0,Normal,1742,1055,60,1,[1055]\r\nCP-12.zip,1,CP,1486,4060,63,2,\"[4059, 4060]\"\r\nNCP-19.zip,2,NCP,527,2211,48,2,\"[2210, 2211]\"\r\nCP-10.zip,1,CP,1393,3846,60,2,\"[3845, 3846]\"\r\nNormal-1.zip,0,Normal,1721,997,68,4,\"[1000, 997, 998, 999]\"\r\nNormal-25.zip,0,Normal,3839,5351,220,1,[5351]\r\nNormal-12.zip,0,Normal,1991,446,306,1,[446]\r\nCP-19.zip,1,CP,1794,3595,38,2,\"[3594, 3595]\"\r\nNormal-1.zip,0,Normal,1669,785,54,5,\"[782, 783, 784, 785, 786]\"\r\nCP-18.zip,1,CP,1782,3580,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-11.zip,0,Normal,1963,418,95,1,[418]\r\nCP-11.zip,1,CP,1428,3924,56,3,\"[3923, 3924, 3925]\"\r\nNormal-9.zip,0,Normal,1918,373,85,1,[373]\r\nNormal-16.zip,0,Normal,2118,573,89,1,[573]\r\nNCP-4.zip,2,NCP,140,1424,128,2,\"[1424, 1425]\"\r\nNormal-16.zip,0,Normal,2142,597,84,1,[597]\r\nNCP-15.zip,2,NCP,410,1969,143,2,\"[1969, 1970]\"\r\nNormal-3.zip,0,Normal,749,184,89,1,[184]\r\nNormal-1.zip,0,Normal,1718,991,66,2,\"[991, 992]\"\r\nNCP-5.zip,2,NCP,176,1497,53,2,\"[1496, 1497]\"\r\nNCP-8.zip,2,NCP,265,1677,50,2,\"[1676, 1677]\"\r\nCP-7.zip,1,CP,1270,3495,148,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-26.zip,2,NCP,3982,5489,34,1,[5489]\r\nNCP-8.zip,2,NCP,2677,2695,51,1,[2695]\r\nNCP-13.zip,2,NCP,357,1866,63,2,\"[1865, 1866]\"\r\nNCP-13.zip,2,NCP,346,1843,139,2,\"[1843, 1844]\"\r\nNormal-1.zip,0,Normal,1676,820,72,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-14.zip,2,NCP,379,1911,62,2,\"[1910, 1911]\"\r\nNCP-1.zip,2,NCP,104,1345,139,2,\"[1345, 1346]\"\r\nNCP-2.zip,2,NCP,116,1373,127,2,\"[1373, 1374]\"\r\nNCP-17.zip,2,NCP,466,2087,145,2,\"[2087, 2088]\"\r\nCP-11.zip,1,CP,1443,3957,139,3,\"[3957, 3958, 3959]\"\r\nNCP-5.zip,2,NCP,181,1507,58,2,\"[1506, 1507]\"\r\nNCP-18.zip,2,NCP,496,2148,168,2,\"[2148, 2149]\"\r\nNCP-8.zip,2,NCP,265,1676,119,2,\"[1676, 1677]\"\r\nNormal-1.zip,0,Normal,1669,782,62,5,\"[782, 783, 784, 785, 786]\"\r\nCP-7.zip,1,CP,1270,3501,420,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1676,822,69,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-21.zip,2,NCP,583,2324,62,2,\"[2323, 2324]\"\r\nNCP-19.zip,2,NCP,527,2210,114,2,\"[2210, 2211]\"\r\nNCP-15.zip,2,NCP,404,1957,108,2,\"[1957, 1958]\"\r\nNCP-17.zip,2,NCP,466,2088,61,2,\"[2087, 2088]\"\r\nNCP-4.zip,2,NCP,140,1425,54,2,\"[1424, 1425]\"\r\nNCP-13.zip,2,NCP,346,1844,58,2,\"[1843, 1844]\"\r\nCP-7.zip,1,CP,1270,3494,129,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3497,133,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-18.zip,1,CP,1782,3579,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1676,818,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-11.zip,1,CP,1428,3925,56,3,\"[3923, 3924, 3925]\"\r\nCP-7.zip,1,CP,1270,3488,287,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3500,160,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1669,784,196,5,\"[782, 783, 784, 785, 786]\"\r\nNormal-1.zip,0,Normal,1669,783,62,5,\"[782, 783, 784, 785, 786]\"\r\nCP-18.zip,1,CP,1782,3586,69,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1721,998,68,4,\"[1000, 997, 998, 999]\"\r\nNormal-1.zip,0,Normal,1676,817,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-14.zip,1,CP,1540,4193,58,3,\"[4191, 4192, 4193]\"\r\nNormal-1.zip,0,Normal,1676,821,72,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNormal-1.zip,0,Normal,1676,819,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-22.zip,2,NCP,82,1297,129,2,\"[1297, 1298]\"\r\nNormal-1.zip,0,Normal,1718,992,66,2,\"[991, 992]\"\r\nCP-7.zip,1,CP,1270,3496,154,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-5.zip,2,NCP,181,1506,139,2,\"[1506, 1507]\"\r\nCP-7.zip,1,CP,1270,3492,137,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-13.zip,2,NCP,357,1865,150,2,\"[1865, 1866]\"\r\nCP-11.zip,1,CP,1443,3959,58,3,\"[3957, 3958, 3959]\"\r\nNormal-1.zip,0,Normal,1669,786,54,5,\"[782, 783, 784, 785, 786]\"\r\nNCP-6.zip,2,NCP,210,1564,131,2,\"[1564, 1565]\"\r\nCP-19.zip,1,CP,1794,3594,38,2,\"[3594, 3595]\"\r\nNCP-15.zip,2,NCP,410,1970,60,2,\"[1969, 1970]\"\r\nNCP-14.zip,2,NCP,379,1910,147,2,\"[1910, 1911]\"\r\nCP-7.zip,1,CP,1270,3491,142,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1721,999,75,4,\"[1000, 997, 998, 999]\"\r\nCP-14.zip,1,CP,1540,4191,221,3,\"[4191, 4192, 4193]\"\r\nCP-12.zip,1,CP,1486,4059,63,2,\"[4059, 4060]\"\r\nCP-14.zip,1,CP,1528,4164,61,2,\"[4163, 4164]\"\r\nNCP-16.zip,2,NCP,449,2052,145,2,\"[2052, 2053]\"\r\nNCP-13.zip,2,NCP,351,1854,61,2,\"[1853, 1854]\"\r\nCP-7.zip,1,CP,1270,3498,247,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-8.zip,1,CP,1330,3698,58,3,\"[3698, 3699, 3700]\"\r\nNCP-2.zip,2,NCP,116,1374,54,2,\"[1373, 1374]\"\r\nNCP-18.zip,2,NCP,514,2185,67,2,\"[2184, 2185]\"\r\nCP-18.zip,1,CP,1782,3587,69,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-8.zip,1,CP,1330,3700,58,3,\"[3698, 3699, 3700]\"\r\nNCP-14.zip,2,NCP,378,1909,69,2,\"[1908, 1909]\"\r\nNCP-1.zip,2,NCP,104,1346,58,2,\"[1345, 1346]\"\r\nCP-14.zip,1,CP,1534,4177,58,2,\"[4176, 4177]\"\r\nCP-7.zip,1,CP,1270,3490,237,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3493,193,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-18.zip,1,CP,1782,3583,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-7.zip,1,CP,1270,3502,21,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3499,363,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-5.zip,2,NCP,18,1168,135,2,\"[1168, 1169]\"\r\nCP-18.zip,1,CP,1782,3585,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-5.zip,2,NCP,176,1496,126,2,\"[1496, 1497]\"\r\nCP-18.zip,1,CP,1782,3581,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-10.zip,1,CP,1393,3845,60,2,\"[3845, 3846]\"\r\nNormal-2.zip,0,Normal,1740,1050,21,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-10.zip,1,CP,1387,3830,51,2,\"[3829, 3830]\"\r\nNCP-10.zip,2,NCP,2719,2675,44,1,[2675]\r\nCP-1.zip,1,CP,1065,3104,58,1,[3104]\r\nCP-10.zip,1,CP,1392,3843,62,2,\"[3843, 3844]\"\r\nCP-13.zip,1,CP,1508,4117,57,3,\"[4115, 4116, 4117]\"\r\nNCP-22.zip,2,NCP,863,2387,282,2,\"[2386, 2387]\"\r\nNormal-3.zip,0,Normal,763,198,102,1,[198]\r\nNormal-23.zip,0,Normal,2635,145,27,1,[145]\r\nNCP-20.zip,2,NCP,572,2303,58,2,\"[2302, 2303]\"\r\nNormal-1.zip,0,Normal,1683,862,65,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-10.zip,1,CP,1398,3856,44,2,\"[3856, 3857]\"\r\nCP-15.zip,1,CP,1566,4252,54,2,\"[4252, 4253]\"\r\nNCP-10.zip,2,NCP,280,1707,51,2,\"[1706, 1707]\"\r\nCP-19.zip,1,CP,1785,3187,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-15.zip,1,CP,1570,4258,22,1,[4258]\r\nCP-10.zip,1,CP,1413,3890,66,2,\"[3889, 3890]\"\r\nCP-7.zip,1,CP,1303,3618,42,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,2435,2903,295,3,\"[2901, 2902, 2903]\"\r\nCP-6.zip,1,CP,1229,3447,144,1,[3447]\r\nCP-26.zip,1,CP,3718,5647,51,2,\"[5647, 5648]\"\r\nNCP-22.zip,2,NCP,860,2382,212,2,\"[2382, 2383]\"\r\nNCP-22.zip,2,NCP,883,2419,52,2,\"[2419, 2420]\"\r\nNormal-2.zip,0,Normal,1751,1079,61,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nCP-3.zip,1,CP,1148,3366,158,1,[3366]\r\nCP-5.zip,1,CP,1200,3418,309,1,[3418]\r\nNormal-25.zip,0,Normal,3852,5364,195,1,[5364]\r\nCP-1.zip,1,CP,1088,3221,54,4,\"[3220, 3221, 3222, 3223]\"\r\nCP-21.zip,1,CP,585,2947,94,1,[2947]\r\nCP-18.zip,1,CP,1772,3178,72,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-20.zip,2,NCP,559,2275,127,2,\"[2275, 2276]\"\r\nNCP-18.zip,2,NCP,498,2153,58,2,\"[2152, 2153]\"\r\nNormal-27.zip,0,Normal,3911,5448,64,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nCP-1.zip,1,CP,1078,3123,68,1,[3123]\r\nCP-12.zip,1,CP,1473,4028,51,3,\"[4026, 4027, 4028]\"\r\nNormal-17.zip,0,Normal,2158,613,100,1,[613]\r\nNCP-7.zip,2,NCP,246,1639,58,2,\"[1638, 1639]\"\r\nNCP-17.zip,2,NCP,473,2102,61,2,\"[2101, 2102]\"\r\nNormal-2.zip,0,Normal,1732,1025,73,1,[1025]\r\nCP-15.zip,1,CP,1559,4237,53,2,\"[4237, 4238]\"\r\nCP-7.zip,1,CP,1259,3477,162,1,[3477]\r\nNCP-10.zip,2,NCP,271,1688,146,2,\"[1688, 1689]\"\r\nCP-7.zip,1,CP,1303,3627,252,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-11.zip,2,NCP,286,1717,121,2,\"[1717, 1718]\"\r\nCP-14.zip,1,CP,1541,4194,142,3,\"[4194, 4195, 4196]\"\r\nNormal-19.zip,0,Normal,2223,678,95,1,[678]\r\nCP-18.zip,1,CP,1658,4346,29,1,[4346]\r\nNCP-22.zip,2,NCP,822,2333,31,2,\"[2332, 2333]\"\r\nNCP-28.zip,2,NCP,870,2400,47,2,\"[2399, 2400]\"\r\nCP-9.zip,1,CP,1373,3801,55,2,\"[3800, 3801]\"\r\nCP-12.zip,1,CP,1456,3990,52,3,\"[3988, 3989, 3990]\"\r\nNCP-21.zip,2,NCP,75,1284,54,2,\"[1283, 1284]\"\r\nCP-9.zip,1,CP,1367,3787,58,3,\"[3785, 3786, 3787]\"\r\nNCP-17.zip,2,NCP,469,2094,66,2,\"[2093, 2094]\"\r\nCP-1.zip,1,CP,1097,3315,119,1,[3315]\r\nNCP-8.zip,2,NCP,255,1656,139,2,\"[1656, 1657]\"\r\nCP-11.zip,1,CP,1438,3944,46,2,\"[3944, 3945]\"\r\nNCP-6.zip,2,NCP,211,1566,137,2,\"[1566, 1567]\"\r\nNormal-2.zip,0,Normal,1759,1115,64,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-25.zip,2,NCP,3966,5476,43,1,[5476]\r\nNCP-21.zip,2,NCP,575,2309,61,2,\"[2308, 2309]\"\r\nCP-14.zip,1,CP,1523,4150,65,2,\"[4150, 4151]\"\r\nCP-19.zip,1,CP,2447,2923,83,2,\"[2923, 2924]\"\r\nCP-19.zip,1,CP,1788,3203,57,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNormal-2.zip,0,Normal,1740,1045,102,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNormal-1.zip,0,Normal,1681,845,69,1,[845]\r\nNCP-11.zip,2,NCP,310,1768,70,2,\"[1767, 1768]\"\r\nCP-5.zip,1,CP,1220,3438,200,1,[3438]\r\nNCP-22.zip,2,NCP,87,1307,145,2,\"[1307, 1308]\"\r\nNormal-4.zip,0,Normal,786,221,124,1,[221]\r\nNormal-20.zip,0,Normal,2270,725,86,1,[725]\r\nCP-16.zip,1,CP,1593,4281,22,1,[4281]\r\nNormal-18.zip,0,Normal,2200,655,94,1,[655]\r\nNCP-18.zip,2,NCP,515,2187,58,2,\"[2186, 2187]\"\r\nCP-28.zip,1,CP,3784,5728,29,1,[5728]\r\nNCP-5.zip,2,NCP,172,1488,139,2,\"[1488, 1489]\"\r\nCP-2.zip,1,CP,1109,3327,210,1,[3327]\r\nNCP-20.zip,2,NCP,551,2260,65,2,\"[2259, 2260]\"\r\nCP-19.zip,1,CP,2444,2918,124,2,\"[2918, 2919]\"\r\nNormal-2.zip,0,Normal,1760,1122,137,4,\"[1121, 1122, 1123, 1124]\"\r\nCP-12.zip,1,CP,1476,4033,106,2,\"[4033, 4034]\"\r\nCP-14.zip,1,CP,1538,4186,66,3,\"[4185, 4186, 4187]\"\r\nNCP-21.zip,2,NCP,61,1256,60,2,\"[1255, 1256]\"\r\nNormal-13.zip,0,Normal,2046,501,79,1,[501]\r\nCP-15.zip,1,CP,1565,4250,66,2,\"[4250, 4251]\"\r\nCP-10.zip,1,CP,1407,3876,58,2,\"[3876, 3877]\"\r\nCP-13.zip,1,CP,1508,4116,57,3,\"[4115, 4116, 4117]\"\r\nNormal-27.zip,0,Normal,3905,5437,288,2,\"[5437, 5438]\"\r\nNCP-13.zip,2,NCP,36,1204,141,2,\"[1204, 1205]\"\r\nNCP-30.zip,2,NCP,941,2484,169,1,[2484]\r\nNormal-2.zip,0,Normal,1758,1109,291,2,\"[1109, 1110]\"\r\nCP-8.zip,1,CP,1342,3723,139,3,\"[3723, 3724, 3725]\"\r\nCP-3.zip,1,CP,1132,3350,180,1,[3350]\r\nCP-18.zip,1,CP,1773,3184,67,4,\"[3182, 3183, 3184, 3185]\"\r\nNCP-17.zip,2,NCP,464,2083,60,2,\"[2082, 2083]\"\r\nNCP-16.zip,2,NCP,447,2048,139,2,\"[2048, 2049]\"\r\nNCP-3.zip,2,NCP,136,1416,126,2,\"[1416, 1417]\"\r\nNCP-18.zip,2,NCP,501,2158,146,2,\"[2158, 2159]\"\r\nCP-19.zip,1,CP,2439,2909,409,1,[2909]\r\nNCP-19.zip,2,NCP,538,2233,142,2,\"[2233, 2234]\"\r\nNormal-27.zip,0,Normal,3907,5440,63,2,\"[5440, 5441]\"\r\nCP-18.zip,1,CP,1773,3182,61,4,\"[3182, 3183, 3184, 3185]\"\r\nCP-8.zip,1,CP,1320,3677,62,2,\"[3676, 3677]\"\r\nCP-9.zip,1,CP,1366,3782,138,3,\"[3782, 3783, 3784]\"\r\nCP-7.zip,1,CP,1309,3651,49,2,\"[3651, 3652]\"\r\nNCP-22.zip,2,NCP,863,2386,228,2,\"[2386, 2387]\"\r\nNCP-18.zip,2,NCP,492,2140,139,2,\"[2140, 2141]\"\r\nNCP-21.zip,2,NCP,69,1271,48,2,\"[1270, 1271]\"\r\nCP-13.zip,1,CP,1515,4131,137,3,\"[4131, 4132, 4133]\"\r\nNormal-11.zip,0,Normal,1980,435,83,1,[435]\r\nNormal-14.zip,0,Normal,2073,528,87,1,[528]\r\nCP-3.zip,1,CP,1149,3367,157,1,[3367]\r\nNCP-14.zip,2,NCP,376,1905,60,2,\"[1904, 1905]\"\r\nNCP-8.zip,2,NCP,253,1653,58,2,\"[1652, 1653]\"\r\nCP-10.zip,1,CP,1413,3889,67,2,\"[3889, 3890]\"\r\nNCP-27.zip,2,NCP,1061,2638,75,1,[2638]\r\nNormal-9.zip,0,Normal,1921,376,80,1,[376]\r\nNCP-16.zip,2,NCP,453,2061,51,2,\"[2060, 2061]\"\r\nNCP-10.zip,2,NCP,275,1697,64,2,\"[1696, 1697]\"\r\nCP-24.zip,1,CP,708,3070,80,1,[3070]\r\nNCP-20.zip,2,NCP,560,2277,124,2,\"[2277, 2279]\"\r\nNCP-6.zip,2,NCP,207,1558,109,2,\"[1558, 1559]\"\r\nNCP-2.zip,2,NCP,114,1370,53,2,\"[1369, 1370]\"\r\nCP-10.zip,1,CP,1407,3877,58,2,\"[3876, 3877]\"\r\nNormal-1.zip,0,Normal,1682,858,70,6,\"[847, 848, 852, 853, 857, 858]\"\r\nCP-14.zip,1,CP,1548,4214,51,2,\"[4213, 4214]\"\r\nNormal-2.zip,0,Normal,1760,1124,74,4,\"[1121, 1122, 1123, 1124]\"\r\nNCP-14.zip,2,NCP,374,1900,58,2,\"[1899, 1900]\"\r\nNCP-7.zip,2,NCP,2486,2645,50,1,[2645]\r\nNCP-19.zip,2,NCP,542,2242,55,2,\"[2241, 2242]\"\r\nNormal-25.zip,0,Normal,3836,5348,202,1,[5348]\r\nNormal-11.zip,0,Normal,1961,416,91,1,[416]\r\nNCP-27.zip,2,NCP,819,2329,33,1,[2329]\r\nNCP-5.zip,2,NCP,184,1512,112,2,\"[1512, 1513]\"\r\nNCP-15.zip,2,NCP,416,1984,139,2,\"[1984, 1986]\"\r\nNormal-2.zip,0,Normal,1740,1047,60,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-14.zip,1,CP,1538,4187,65,3,\"[4185, 4186, 4187]\"\r\nCP-8.zip,1,CP,1351,3746,56,1,[3746]\r\nNCP-10.zip,2,NCP,281,1709,51,2,\"[1708, 1709]\"\r\nCP-10.zip,1,CP,1415,3895,65,3,\"[3894, 3895, 3896]\"\r\nNormal-1.zip,0,Normal,1682,848,67,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNCP-17.zip,2,NCP,485,2126,64,2,\"[2125, 2126]\"\r\nNCP-18.zip,2,NCP,501,2159,61,2,\"[2158, 2159]\"\r\nNormal-8.zip,0,Normal,1863,318,82,1,[318]\r\nCP-18.zip,1,CP,1772,3176,81,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-26.zip,1,CP,3652,5551,53,2,\"[5551, 5552]\"\r\nNormal-5.zip,0,Normal,808,243,134,1,[243]\r\nCP-28.zip,1,CP,3771,5715,23,1,[5715]\r\nCP-19.zip,1,CP,1785,3188,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-26.zip,1,CP,3637,5596,35,1,[5596]\r\nCP-12.zip,1,CP,1455,3987,58,3,\"[3985, 3986, 3987]\"\r\nCP-8.zip,1,CP,1336,3712,60,2,\"[3712, 3713]\"\r\nCP-7.zip,1,CP,1303,3624,224,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-30.zip,1,CP,4015,5564,226,1,[5564]\r\nNormal-8.zip,0,Normal,1883,338,91,1,[338]\r\nNormal-3.zip,0,Normal,1764,1145,62,4,\"[1143, 1144, 1145, 1146]\"\r\nNCP-15.zip,2,NCP,42,1218,61,2,\"[1216, 1218]\"\r\nNCP-7.zip,2,NCP,245,1636,149,2,\"[1636, 1637]\"\r\nNormal-14.zip,0,Normal,2066,521,74,1,[521]\r\nNormal-20.zip,0,Normal,2275,730,85,1,[730]\r\nNCP-8.zip,2,NCP,268,1682,126,2,\"[1682, 1683]\"\r\nCP-7.zip,1,CP,1307,3647,49,4,\"[3645, 3646, 3647, 3648]\"\r\nNormal-15.zip,0,Normal,2106,561,93,1,[561]\r\nCP-20.zip,1,CP,2772,3303,261,1,[3303]\r\nNCP-25.zip,2,NCP,3970,5479,48,1,[5479]\r\nCP-28.zip,1,CP,3772,5716,23,1,[5716]\r\nNormal-1.zip,0,Normal,1683,868,64,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNCP-5.zip,2,NCP,175,1494,131,2,\"[1494, 1495]\"\r\nNCP-18.zip,2,NCP,507,2171,58,2,\"[2170, 2171]\"\r\nCP-7.zip,1,CP,1303,3611,257,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-19.zip,2,NCP,537,2231,143,2,\"[2231, 2232]\"\r\nNormal-1.zip,0,Normal,1728,1014,66,4,\"[1013, 1014, 1015, 1016]\"\r\nNormal-23.zip,0,Normal,2608,118,25,1,[118]\r\nNCP-23.zip,2,NCP,90,1317,43,2,\"[1316, 1317]\"\r\nNCP-2.zip,2,NCP,123,1388,62,2,\"[1387, 1388]\"\r\nNCP-18.zip,2,NCP,507,2170,138,2,\"[2170, 2171]\"\r\nNCP-14.zip,2,NCP,395,1940,171,2,\"[1940, 1941]\"\r\nNCP-23.zip,2,NCP,946,2489,26,1,[2489]\r\nCP-7.zip,1,CP,1308,3649,43,2,\"[3649, 3650]\"\r\nNCP-17.zip,2,NCP,462,2078,161,2,\"[2078, 2079]\"\r\nNormal-16.zip,0,Normal,2145,600,86,1,[600]\r\nNCP-20.zip,2,NCP,560,2279,51,2,\"[2277, 2279]\"\r\nCP-30.zip,1,CP,3931,5630,82,4,\"[5630, 5631, 5632, 5633]\"\r\nCP-13.zip,1,CP,1501,4101,55,2,\"[4100, 4101]\"\r\nCP-1.zip,1,CP,1,3144,248,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nCP-25.zip,1,CP,713,3075,120,1,[3075]\r\nCP-15.zip,1,CP,1562,4244,55,2,\"[4243, 4244]\"\r\nCP-26.zip,1,CP,3643,5602,298,2,\"[5602, 5603]\"\r\nCP-27.zip,1,CP,3748,5692,17,1,[5692]\r\nCP-7.zip,1,CP,1303,3610,51,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-14.zip,1,CP,1524,4152,229,3,\"[4152, 4153, 4154]\"\r\nNormal-6.zip,0,Normal,1800,255,92,1,[255]\r\nNormal-1.zip,0,Normal,1711,978,63,2,\"[977, 978]\"\r\nNormal-17.zip,0,Normal,2157,612,78,1,[612]\r\nCP-8.zip,1,CP,1334,3707,133,2,\"[3707, 3708]\"\r\nNCP-19.zip,2,NCP,545,2247,135,2,\"[2247, 2248]\"\r\nCP-28.zip,1,CP,3790,5734,23,1,[5734]\r\nNCP-21.zip,2,NCP,61,1255,142,2,\"[1255, 1256]\"\r\nNCP-30.zip,2,NCP,993,2546,203,1,[2546]\r\nNCP-9.zip,2,NCP,2689,2656,47,1,[2656]\r\nNormal-27.zip,0,Normal,3907,5441,66,2,\"[5440, 5441]\"\r\nCP-26.zip,1,CP,3652,5552,52,2,\"[5551, 5552]\"\r\nNCP-11.zip,2,NCP,287,1719,142,2,\"[1719, 1720]\"\r\nNCP-2.zip,2,NCP,114,1369,125,2,\"[1369, 1370]\"\r\nNCP-21.zip,2,NCP,581,2320,58,2,\"[2319, 2320]\"\r\nNormal-26.zip,0,Normal,3887,5404,78,3,\"[5400, 5401, 5404]\"\r\nNCP-12.zip,2,NCP,325,1799,50,2,\"[1798, 1799]\"\r\nNCP-27.zip,2,NCP,1060,2637,81,1,[2637]\r\nCP-13.zip,1,CP,1516,4135,62,2,\"[4134, 4135]\"\r\nCP-15.zip,1,CP,1580,4268,21,1,[4268]\r\nNCP-15.zip,2,NCP,428,2009,125,2,\"[2009, 2010]\"\r\nNCP-19.zip,2,NCP,52,1237,135,2,\"[1237, 1238]\"\r\nNCP-9.zip,2,NCP,2691,2658,44,1,[2658]\r\nNCP-12.zip,2,NCP,34,1200,156,2,\"[1200, 1201]\"\r\nNCP-19.zip,2,NCP,539,2235,131,2,\"[2235, 2236]\"\r\nNormal-1.zip,0,Normal,1728,1015,72,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-6.zip,2,NCP,222,1588,122,2,\"[1588, 1589]\"\r\nNCP-10.zip,2,NCP,273,1693,54,2,\"[1692, 1693]\"\r\nCP-29.zip,1,CP,3822,5766,20,1,[5766]\r\nCP-10.zip,1,CP,1401,3864,51,3,\"[3862, 3863, 3864]\"\r\nNormal-13.zip,0,Normal,2030,485,66,1,[485]\r\nNCP-4.zip,2,NCP,164,1473,63,2,\"[1472, 1473]\"\r\nCP-21.zip,1,CP,3,3504,35,1,[3504]\r\nCP-9.zip,1,CP,1368,3788,69,2,\"[3788, 3789]\"\r\nNormal-1.zip,0,Normal,1683,865,72,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNormal-1.zip,0,Normal,1704,963,69,4,\"[961, 962, 963, 964]\"\r\nCP-12.zip,1,CP,1466,4012,52,2,\"[4011, 4012]\"\r\nNormal-11.zip,0,Normal,1971,426,100,1,[426]\r\nNCP-16.zip,2,NCP,450,2055,34,2,\"[2054, 2055]\"\r\nNCP-30.zip,2,NCP,962,2505,38,1,[2505]\r\nNCP-8.zip,2,NCP,2675,2648,44,1,[2648]\r\nCP-7.zip,1,CP,1303,3630,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-25.zip,2,NCP,3955,5468,46,1,[5468]\r\nNCP-18.zip,2,NCP,488,2131,139,2,\"[2131, 2133]\"\r\nCP-12.zip,1,CP,1484,4053,181,3,\"[4053, 4054, 4055]\"\r\nCP-9.zip,1,CP,1368,3789,69,2,\"[3788, 3789]\"\r\nNCP-10.zip,2,NCP,28,1188,145,2,\"[1188, 1189]\"\r\nCP-30.zip,1,CP,3931,5631,82,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-10.zip,2,NCP,277,1701,64,2,\"[1700, 1701]\"\r\nNCP-4.zip,2,NCP,148,1441,63,2,\"[1440, 1441]\"\r\nCP-12.zip,1,CP,1481,4044,139,3,\"[4044, 4045, 4046]\"\r\nNormal-21.zip,0,Normal,2288,743,96,1,[743]\r\nCP-30.zip,1,CP,4017,5566,41,1,[5566]\r\nCP-13.zip,1,CP,1499,4098,53,2,\"[4097, 4098]\"\r\nCP-13.zip,1,CP,1516,4134,62,2,\"[4134, 4135]\"\r\nNormal-13.zip,0,Normal,2049,504,88,1,[504]\r\nCP-18.zip,1,CP,1772,3179,72,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-20.zip,2,NCP,57,1248,56,2,\"[1247, 1248]\"\r\nNormal-1.zip,0,Normal,1704,961,71,4,\"[961, 962, 963, 964]\"\r\nCP-9.zip,1,CP,1366,3783,57,3,\"[3782, 3783, 3784]\"\r\nCP-32.zip,1,CP,2464,3228,66,1,[3228]\r\nCP-15.zip,1,CP,1555,4228,62,2,\"[4228, 4229]\"\r\nNormal-3.zip,0,Normal,758,193,122,1,[193]\r\nNCP-12.zip,2,NCP,329,1806,157,2,\"[1806, 1807]\"\r\nCP-7.zip,1,CP,1307,3646,259,4,\"[3645, 3646, 3647, 3648]\"\r\nCP-26.zip,1,CP,3722,5657,205,2,\"[5656, 5657]\"\r\nNCP-14.zip,2,NCP,382,1916,139,2,\"[1916, 1917]\"\r\nCP-27.zip,1,CP,3752,5696,20,1,[5696]\r\nNormal-16.zip,0,Normal,2129,584,75,1,[584]\r\nNCP-13.zip,2,NCP,367,1885,158,2,\"[1885, 1886]\"\r\nNCP-6.zip,2,NCP,204,1553,58,2,\"[1552, 1553]\"\r\nCP-30.zip,1,CP,3918,5542,71,1,[5542]\r\nCP-10.zip,1,CP,1392,3844,62,2,\"[3843, 3844]\"\r\nNormal-11.zip,0,Normal,1979,434,87,1,[434]\r\nNormal-2.zip,0,Normal,1741,1053,61,2,\"[1053, 1054]\"\r\nNormal-10.zip,0,Normal,1945,400,87,1,[400]\r\nNormal-26.zip,0,Normal,3882,5394,27,1,[5394]\r\nCP-20.zip,1,CP,2456,2940,126,1,[2940]\r\nNCP-5.zip,2,NCP,184,1513,48,2,\"[1512, 1513]\"\r\nNCP-9.zip,2,NCP,2693,2659,49,1,[2659]\r\nCP-8.zip,1,CP,1348,3739,197,3,\"[3739, 3740, 3741]\"\r\nNormal-18.zip,0,Normal,2214,669,102,1,[669]\r\nCP-10.zip,1,CP,1415,3896,65,3,\"[3894, 3895, 3896]\"\r\nNCP-3.zip,2,NCP,1290,2731,66,1,[2731]\r\nNormal-2.zip,0,Normal,1759,1111,62,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-15.zip,1,CP,1566,4253,54,2,\"[4252, 4253]\"\r\nNormal-27.zip,0,Normal,3911,5447,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-15.zip,2,NCP,401,1951,139,2,\"[1951, 1952]\"\r\nCP-7.zip,1,CP,1309,3652,49,2,\"[3651, 3652]\"\r\nNormal-4.zip,0,Normal,787,222,320,1,[222]\r\nNCP-20.zip,2,NCP,550,2258,60,2,\"[2257, 2258]\"\r\nNCP-5.zip,2,NCP,195,1534,143,2,\"[1534, 1535]\"\r\nNCP-13.zip,2,NCP,367,1886,66,2,\"[1885, 1886]\"\r\nNCP-19.zip,2,NCP,530,2218,132,1,[2218]\r\nNormal-6.zip,0,Normal,1811,266,95,1,[266]\r\nNCP-30.zip,2,NCP,963,2506,21,1,[2506]\r\nNormal-2.zip,0,Normal,1759,1112,62,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-13.zip,2,NCP,369,1890,58,2,\"[1889, 1890]\"\r\nNCP-16.zip,2,NCP,457,2068,134,2,\"[2068, 2069]\"\r\nNCP-26.zip,2,NCP,3981,5488,45,1,[5488]\r\nNCP-22.zip,2,NCP,816,2325,50,1,[2325]\r\nNormal-1.zip,0,Normal,1730,1019,63,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-15.zip,2,NCP,419,1991,130,2,\"[1991, 1992]\"\r\nCP-30.zip,1,CP,4016,5565,37,1,[5565]\r\nCP-24.zip,1,CP,694,3056,135,1,[3056]\r\nNCP-17.zip,2,NCP,470,2095,154,2,\"[2095, 2096]\"\r\nNormal-4.zip,0,Normal,781,216,118,1,[216]\r\nNormal-27.zip,0,Normal,3911,5449,64,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nCP-13.zip,1,CP,1497,4093,68,3,\"[4092, 4093, 4094]\"\r\nNCP-26.zip,2,NCP,3991,5515,43,1,[5515]\r\nCP-8.zip,1,CP,1331,3701,62,2,\"[3701, 3702]\"\r\nNormal-9.zip,0,Normal,1910,365,91,1,[365]\r\nNCP-27.zip,2,NCP,820,2330,34,1,[2330]\r\nCP-7.zip,1,CP,13,3171,65,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-20.zip,1,CP,2764,3295,39,1,[3295]\r\nNormal-1.zip,0,Normal,1714,984,71,3,\"[982, 983, 984]\"\r\nCP-13.zip,1,CP,1501,4100,55,2,\"[4100, 4101]\"\r\nNormal-15.zip,0,Normal,2117,572,87,1,[572]\r\nNCP-22.zip,2,NCP,87,1308,61,2,\"[1307, 1308]\"\r\nCP-30.zip,1,CP,3929,5627,70,2,\"[5626, 5627]\"\r\nNCP-4.zip,2,NCP,158,1461,52,2,\"[1460, 1461]\"\r\nCP-10.zip,1,CP,1389,3834,52,3,\"[3833, 3834, 3835]\"\r\nCP-13.zip,1,CP,1497,4094,68,3,\"[4092, 4093, 4094]\"\r\nCP-10.zip,1,CP,1415,3894,155,3,\"[3894, 3895, 3896]\"\r\nCP-30.zip,1,CP,4014,5563,35,1,[5563]\r\nNCP-17.zip,2,NCP,462,2079,67,2,\"[2078, 2079]\"\r\nCP-29.zip,1,CP,3803,5747,23,1,[5747]\r\nCP-1.zip,1,CP,1,3143,300,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNCP-11.zip,2,NCP,305,1756,157,2,\"[1756, 1758]\"\r\nNormal-1.zip,0,Normal,1668,781,63,4,\"[778, 779, 780, 781]\"\r\nCP-4.zip,1,CP,1174,3392,175,1,[3392]\r\nNormal-14.zip,0,Normal,2060,515,77,1,[515]\r\nNormal-22.zip,0,Normal,2602,112,32,1,[112]\r\nCP-14.zip,1,CP,1541,4196,58,3,\"[4194, 4195, 4196]\"\r\nNormal-12.zip,0,Normal,2019,474,87,1,[474]\r\nCP-25.zip,1,CP,733,3095,84,1,[3095]\r\nCP-13.zip,1,CP,1499,4097,53,2,\"[4097, 4098]\"\r\nNormal-1.zip,0,Normal,1683,861,65,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-19.zip,1,CP,1788,3201,55,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNormal-1.zip,0,Normal,1704,964,69,4,\"[961, 962, 963, 964]\"\r\nNCP-3.zip,2,NCP,1289,2730,62,1,[2730]\r\nNCP-20.zip,2,NCP,567,2292,148,2,\"[2292, 2293]\"\r\nCP-7.zip,1,CP,1303,3613,232,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-13.zip,0,Normal,2027,482,89,1,[482]\r\nNormal-2.zip,0,Normal,1759,1114,59,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNormal-27.zip,0,Normal,3897,5424,75,4,\"[5423, 5424, 5426, 5427]\"\r\nNormal-22.zip,0,Normal,2317,772,77,1,[772]\r\nNormal-2.zip,0,Normal,1758,1110,59,2,\"[1109, 1110]\"\r\nNCP-2.zip,2,NCP,121,1384,43,2,\"[1383, 1384]\"\r\nNCP-13.zip,2,NCP,356,1864,53,2,\"[1863, 1864]\"\r\nNormal-2.zip,0,Normal,1760,1121,85,4,\"[1121, 1122, 1123, 1124]\"\r\nCP-29.zip,1,CP,3825,5769,25,1,[5769]\r\nNCP-17.zip,2,NCP,46,1226,52,2,\"[1225, 1226]\"\r\nNCP-19.zip,2,NCP,53,1240,60,2,\"[1239, 1240]\"\r\nNCP-12.zip,2,NCP,314,1776,58,2,\"[1775, 1776]\"\r\nNormal-21.zip,0,Normal,2290,745,88,1,[745]\r\nNormal-2.zip,0,Normal,1760,1123,74,4,\"[1121, 1122, 1123, 1124]\"\r\nNormal-24.zip,0,Normal,2666,176,35,1,[176]\r\nCP-8.zip,1,CP,1346,3735,53,3,\"[3733, 3734, 3735]\"\r\nNormal-17.zip,0,Normal,2164,619,84,1,[619]\r\nNCP-8.zip,2,NCP,2672,2647,47,1,[2647]\r\nNCP-2.zip,2,NCP,1277,2718,57,1,[2718]\r\nCP-9.zip,1,CP,1370,3793,62,2,\"[3792, 3793]\"\r\nNCP-20.zip,2,NCP,551,2259,154,2,\"[2259, 2260]\"\r\nNormal-10.zip,0,Normal,1928,383,87,1,[383]\r\nCP-21.zip,1,CP,598,2960,646,1,[2960]\r\nCP-20.zip,1,CP,2755,3286,34,1,[3286]\r\nNormal-16.zip,0,Normal,2141,596,100,1,[596]\r\nCP-14.zip,1,CP,1544,4205,50,3,\"[4203, 4204, 4205]\"\r\nNCP-9.zip,2,NCP,270,1687,62,2,\"[1686, 1687]\"\r\nCP-5.zip,1,CP,1222,3440,157,1,[3440]\r\nCP-19.zip,1,CP,1791,3210,100,4,\"[3210, 3211, 3212, 3213]\"\r\nNCP-16.zip,2,NCP,450,2054,78,2,\"[2054, 2055]\"\r\nNCP-20.zip,2,NCP,557,2272,56,2,\"[2271, 2272]\"\r\nNCP-3.zip,2,NCP,1284,2725,50,1,[2725]\r\nCP-5.zip,1,CP,1205,3423,146,1,[3423]\r\nCP-19.zip,1,CP,1785,3191,79,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-1.zip,2,NCP,1017,2583,452,1,[2583]\r\nNormal-2.zip,0,Normal,1736,1033,25,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNCP-1.zip,2,NCP,1021,2589,183,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-7.zip,2,NCP,232,1608,146,2,\"[1608, 1609]\"\r\nNCP-23.zip,2,NCP,950,2493,34,1,[2493]\r\nCP-6.zip,1,CP,1246,3464,175,1,[3464]\r\nNormal-1.zip,0,Normal,1671,794,67,3,\"[793, 794, 795]\"\r\nCP-1.zip,1,CP,1095,3313,161,1,[3313]\r\nCP-25.zip,1,CP,714,3076,98,1,[3076]\r\nNCP-17.zip,2,NCP,479,2114,58,2,\"[2113, 2114]\"\r\nNCP-18.zip,2,NCP,515,2186,139,2,\"[2186, 2187]\"\r\nNormal-24.zip,0,Normal,2652,162,35,1,[162]\r\nNormal-2.zip,0,Normal,1752,1085,66,1,[1085]\r\nNormal-24.zip,0,Normal,2638,148,38,1,[148]\r\nNCP-8.zip,2,NCP,260,1666,163,2,\"[1666, 1667]\"\r\nNCP-18.zip,2,NCP,509,2175,58,2,\"[2174, 2175]\"\r\nNormal-14.zip,0,Normal,2079,534,92,1,[534]\r\nNormal-3.zip,0,Normal,751,186,119,1,[186]\r\nNCP-8.zip,2,NCP,263,1673,74,2,\"[1672, 1673]\"\r\nCP-22.zip,1,CP,626,2988,174,1,[2988]\r\nNormal-23.zip,0,Normal,2619,129,43,1,[129]\r\nCP-1.zip,1,CP,1069,3109,77,4,\"[3108, 3109, 3110, 3111]\"\r\nNCP-13.zip,2,NCP,360,1872,51,2,\"[1871, 1872]\"\r\nNCP-23.zip,2,NCP,915,2457,31,1,[2457]\r\nNormal-2.zip,0,Normal,1740,1048,60,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNCP-3.zip,2,NCP,131,1407,117,2,\"[1407, 1408]\"\r\nNCP-21.zip,2,NCP,79,1292,55,2,\"[1291, 1292]\"\r\nNormal-4.zip,0,Normal,779,214,290,1,[214]\r\nCP-27.zip,1,CP,3734,5676,32,3,\"[5676, 5677, 5678]\"\r\nNormal-15.zip,0,Normal,2104,559,101,1,[559]\r\nCP-5.zip,1,CP,1218,3436,213,1,[3436]\r\nNCP-3.zip,2,NCP,1291,2732,55,1,[2732]\r\nNCP-19.zip,2,NCP,537,2232,60,2,\"[2231, 2232]\"\r\nNCP-21.zip,2,NCP,71,1274,126,2,\"[1274, 1275]\"\r\nNCP-5.zip,2,NCP,195,1535,60,2,\"[1534, 1535]\"\r\nCP-9.zip,1,CP,1359,3766,46,3,\"[3764, 3765, 3766]\"\r\nNCP-2.zip,2,NCP,119,1380,62,2,\"[1379, 1380]\"\r\nNormal-19.zip,0,Normal,2241,696,86,1,[696]\r\nCP-7.zip,1,CP,1303,3626,51,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-15.zip,0,Normal,2112,567,84,1,[567]\r\nNCP-20.zip,2,NCP,569,2296,142,2,\"[2296, 2297]\"\r\nCP-7.zip,1,CP,1303,3606,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-9.zip,2,NCP,2700,2666,43,1,[2666]\r\nNCP-9.zip,2,NCP,2697,2663,46,1,[2663]\r\nCP-19.zip,1,CP,2435,2902,100,3,\"[2901, 2902, 2903]\"\r\nCP-29.zip,1,CP,3809,5753,19,1,[5753]\r\nNCP-10.zip,2,NCP,2718,2674,42,1,[2674]\r\nNormal-1.zip,0,Normal,1668,778,60,4,\"[778, 779, 780, 781]\"\r\nNCP-18.zip,2,NCP,509,2174,138,2,\"[2174, 2175]\"\r\nNCP-16.zip,2,NCP,456,2066,135,2,\"[2066, 2067]\"\r\nNCP-5.zip,2,NCP,187,1519,57,2,\"[1518, 1519]\"\r\nCP-7.zip,1,CP,1303,3612,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-20.zip,2,NCP,57,1247,132,2,\"[1247, 1248]\"\r\nNormal-1.zip,0,Normal,1715,986,71,2,\"[985, 986]\"\r\nNormal-2.zip,0,Normal,1749,1069,61,4,\"[1069, 1070, 1071, 1072]\"\r\nNCP-24.zip,2,NCP,984,2530,241,2,\"[2529, 2530]\"\r\nNormal-1.zip,0,Normal,1682,847,67,6,\"[847, 848, 852, 853, 857, 858]\"\r\nCP-7.zip,1,CP,1303,3619,213,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-1.zip,1,CP,1069,3110,77,4,\"[3108, 3109, 3110, 3111]\"\r\nNormal-12.zip,0,Normal,2017,472,99,1,[472]\r\nCP-10.zip,1,CP,1400,3861,54,2,\"[3860, 3861]\"\r\nNCP-22.zip,2,NCP,881,2416,225,1,[2416]\r\nCP-11.zip,1,CP,1420,3906,59,2,\"[3905, 3906]\"\r\nNCP-5.zip,2,NCP,172,1489,59,2,\"[1488, 1489]\"\r\nNCP-6.zip,2,NCP,20,1172,127,2,\"[1172, 1173]\"\r\nNCP-28.zip,2,NCP,846,2364,269,1,[2364]\r\nNormal-14.zip,0,Normal,2075,530,93,1,[530]\r\nCP-6.zip,1,CP,1238,3456,191,1,[3456]\r\nCP-7.zip,1,CP,1263,3481,120,1,[3481]\r\nCP-7.zip,1,CP,1303,3617,27,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1398,3857,44,2,\"[3856, 3857]\"\r\nCP-1.zip,1,CP,1088,3220,54,4,\"[3220, 3221, 3222, 3223]\"\r\nCP-8.zip,1,CP,1320,3676,62,2,\"[3676, 3677]\"\r\nCP-7.zip,1,CP,1303,3608,55,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-15.zip,2,NCP,426,2005,139,2,\"[2005, 2006]\"\r\nNCP-28.zip,2,NCP,869,2397,58,1,[2397]\r\nNCP-11.zip,2,NCP,288,1721,114,2,\"[1721, 1722]\"\r\nNCP-21.zip,2,NCP,581,2319,139,2,\"[2319, 2320]\"\r\nNormal-26.zip,0,Normal,3878,5390,24,1,[5390]\r\nNormal-13.zip,0,Normal,2041,496,95,1,[496]\r\nNormal-25.zip,0,Normal,3845,5357,182,1,[5357]\r\nNormal-22.zip,0,Normal,2599,109,39,1,[109]\r\nNormal-4.zip,0,Normal,789,224,120,1,[224]\r\nNormal-1.zip,0,Normal,1714,982,40,3,\"[982, 983, 984]\"\r\nNCP-16.zip,2,NCP,434,2022,51,2,\"[2021, 2022]\"\r\nNCP-28.zip,2,NCP,830,2343,120,1,[2343]\r\nNormal-1.zip,0,Normal,1704,962,71,4,\"[961, 962, 963, 964]\"\r\nNCP-5.zip,2,NCP,196,1537,55,2,\"[1536, 1537]\"\r\nCP-8.zip,1,CP,1336,3713,60,2,\"[3712, 3713]\"\r\nNCP-29.zip,2,NCP,895,2436,140,2,\"[2435, 2436]\"\r\nNCP-29.zip,2,NCP,930,2472,23,1,[2472]\r\nCP-12.zip,1,CP,1482,4047,181,3,\"[4047, 4048, 4049]\"\r\nCP-10.zip,1,CP,1401,3862,201,3,\"[3862, 3863, 3864]\"\r\nNCP-5.zip,2,NCP,182,1509,55,2,\"[1508, 1509]\"\r\nCP-12.zip,1,CP,1483,4050,148,3,\"[4050, 4051, 4052]\"\r\nNCP-28.zip,2,NCP,870,2399,247,2,\"[2399, 2400]\"\r\nNormal-2.zip,0,Normal,1741,1054,61,2,\"[1053, 1054]\"\r\nCP-8.zip,1,CP,1324,3684,58,2,\"[3684, 3685]\"\r\nNCP-9.zip,2,NCP,2681,2696,58,1,[2696]\r\nCP-9.zip,1,CP,1367,3786,58,3,\"[3785, 3786, 3787]\"\r\nCP-19.zip,1,CP,1790,3209,69,2,\"[3208, 3209]\"\r\nCP-11.zip,1,CP,1430,3928,77,2,\"[3928, 3929]\"\r\nNormal-18.zip,0,Normal,2207,662,99,1,[662]\r\nNormal-11.zip,0,Normal,1972,427,97,1,[427]\r\nCP-5.zip,1,CP,1221,3439,295,1,[3439]\r\nNCP-15.zip,2,NCP,42,1216,146,2,\"[1216, 1218]\"\r\nCP-22.zip,1,CP,640,3002,136,1,[3002]\r\nNCP-7.zip,2,NCP,245,1637,62,2,\"[1636, 1637]\"\r\nNCP-6.zip,2,NCP,215,1574,155,2,\"[1574, 1575]\"\r\nNCP-29.zip,2,NCP,903,2445,87,1,[2445]\r\nNCP-7.zip,2,NCP,232,1609,61,2,\"[1608, 1609]\"\r\nNCP-2.zip,2,NCP,119,1379,147,2,\"[1379, 1380]\"\r\nNormal-2.zip,0,Normal,1739,1042,278,3,\"[1042, 1043, 1044]\"\r\nCP-28.zip,1,CP,3791,5735,26,1,[5735]\r\nNCP-27.zip,2,NCP,828,2341,45,1,[2341]\r\nNCP-12.zip,2,NCP,314,1775,139,2,\"[1775, 1776]\"\r\nNCP-6.zip,2,NCP,20,1173,54,2,\"[1172, 1173]\"\r\nCP-13.zip,1,CP,1490,4073,69,3,\"[4071, 4072, 4073]\"\r\nNCP-20.zip,2,NCP,569,2297,60,2,\"[2296, 2297]\"\r\nNormal-2.zip,0,Normal,1759,1113,59,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-30.zip,2,NCP,987,2536,71,2,\"[2536, 2537]\"\r\nCP-15.zip,1,CP,1579,4267,20,1,[4267]\r\nCP-19.zip,1,CP,1790,3208,69,2,\"[3208, 3209]\"\r\nNCP-22.zip,2,NCP,883,2420,200,2,\"[2419, 2420]\"\r\nNCP-20.zip,2,NCP,568,2295,61,2,\"[2294, 2295]\"\r\nNormal-13.zip,0,Normal,2036,491,102,1,[491]\r\nNCP-26.zip,2,NCP,3973,5482,48,1,[5482]\r\nCP-7.zip,1,CP,1303,3609,271,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-27.zip,1,CP,3743,5687,22,1,[5687]\r\nNormal-11.zip,0,Normal,1981,436,91,1,[436]\r\nNCP-2.zip,2,NCP,125,1391,127,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-9.zip,2,NCP,270,1686,147,2,\"[1686, 1687]\"\r\nNormal-23.zip,0,Normal,2636,146,42,1,[146]\r\nNCP-3.zip,2,NCP,1286,2727,64,1,[2727]\r\nCP-10.zip,1,CP,1386,3828,66,2,\"[3827, 3828]\"\r\nNormal-19.zip,0,Normal,2230,685,91,1,[685]\r\nNormal-5.zip,0,Normal,805,240,327,1,[240]\r\nNormal-26.zip,0,Normal,3891,5412,62,2,\"[5411, 5412]\"\r\nNCP-27.zip,2,NCP,2671,2691,51,1,[2691]\r\nNCP-27.zip,2,NCP,1059,2636,52,1,[2636]\r\nCP-8.zip,1,CP,1344,3730,58,3,\"[3728, 3729, 3730]\"\r\nNormal-24.zip,0,Normal,2662,172,41,1,[172]\r\nNormal-3.zip,0,Normal,744,179,278,1,[179]\r\nCP-5.zip,1,CP,1202,3420,207,1,[3420]\r\nNCP-27.zip,2,NCP,1006,2567,19,2,\"[2566, 2567]\"\r\nNormal-19.zip,0,Normal,2248,703,87,1,[703]\r\nNormal-2.zip,0,Normal,1736,1034,25,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNormal-8.zip,0,Normal,1867,322,87,1,[322]\r\nNormal-23.zip,0,Normal,2609,119,40,1,[119]\r\nCP-11.zip,1,CP,1453,3980,56,3,\"[3979, 3980, 3981]\"\r\nNormal-26.zip,0,Normal,3875,5387,24,1,[5387]\r\nCP-26.zip,1,CP,3647,5607,32,1,[5607]\r\nNormal-12.zip,0,Normal,2006,461,77,1,[461]\r\nNormal-6.zip,0,Normal,1827,282,99,1,[282]\r\nNCP-19.zip,2,NCP,533,2224,156,1,[2224]\r\nNCP-11.zip,2,NCP,287,1720,60,2,\"[1719, 1720]\"\r\nNCP-7.zip,2,NCP,2487,2687,38,1,[2687]\r\nCP-3.zip,1,CP,1160,3378,318,1,[3378]\r\nNormal-7.zip,0,Normal,1858,313,95,1,[313]\r\nCP-13.zip,1,CP,1514,4129,61,2,\"[4129, 4130]\"\r\nNCP-20.zip,2,NCP,561,2280,139,2,\"[2280, 2281]\"\r\nCP-14.zip,1,CP,1527,4161,58,3,\"[4160, 4161, 4162]\"\r\nCP-25.zip,1,CP,721,3083,86,1,[3083]\r\nCP-13.zip,1,CP,1496,4091,55,2,\"[4090, 4091]\"\r\nNormal-1.zip,0,Normal,1728,1013,66,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-12.zip,2,NCP,317,1781,117,2,\"[1781, 1782]\"\r\nCP-19.zip,1,CP,2437,2906,132,3,\"[2905, 2906, 2907]\"\r\nNCP-5.zip,2,NCP,196,1536,131,2,\"[1536, 1537]\"\r\nCP-11.zip,1,CP,1437,3942,57,2,\"[3942, 3943]\"\r\nNCP-5.zip,2,NCP,182,1508,130,2,\"[1508, 1509]\"\r\nCP-9.zip,1,CP,1363,3774,64,2,\"[3774, 3775]\"\r\nCP-10.zip,1,CP,1401,3863,51,3,\"[3862, 3863, 3864]\"\r\nNCP-10.zip,2,NCP,275,1696,153,2,\"[1696, 1697]\"\r\nCP-10.zip,1,CP,1387,3829,51,2,\"[3829, 3830]\"\r\nCP-30.zip,1,CP,3931,5633,68,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-7.zip,2,NCP,234,1614,58,2,\"[1613, 1614]\"\r\nNCP-3.zip,2,NCP,1296,2737,66,1,[2737]\r\nNCP-11.zip,2,NCP,283,1712,62,1,[1712]\r\nCP-9.zip,1,CP,1363,3775,64,2,\"[3774, 3775]\"\r\nNCP-8.zip,2,NCP,255,1657,58,2,\"[1656, 1657]\"\r\nNCP-17.zip,2,NCP,464,2082,144,2,\"[2082, 2083]\"\r\nCP-12.zip,1,CP,1473,4027,51,3,\"[4026, 4027, 4028]\"\r\nCP-28.zip,1,CP,3781,5725,20,1,[5725]\r\nNCP-14.zip,2,NCP,391,1933,55,2,\"[1932, 1933]\"\r\nNormal-13.zip,0,Normal,2032,487,85,1,[487]\r\nNCP-28.zip,2,NCP,872,2403,183,2,\"[2403, 2404]\"\r\nNCP-17.zip,2,NCP,479,2113,139,2,\"[2113, 2114]\"\r\nNCP-11.zip,2,NCP,305,1758,65,2,\"[1756, 1758]\"\r\nNCP-1.zip,2,NCP,1021,2587,201,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-30.zip,2,NCP,957,2500,50,1,[2500]\r\nNormal-17.zip,0,Normal,2172,627,91,1,[627]\r\nCP-7.zip,1,CP,1316,3667,147,3,\"[3667, 3668, 3669]\"\r\nNCP-24.zip,2,NCP,971,2514,74,1,[2514]\r\nNormal-2.zip,0,Normal,1740,1051,59,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNCP-18.zip,2,NCP,494,2145,65,2,\"[2144, 2145]\"\r\nNCP-30.zip,2,NCP,987,2537,368,2,\"[2536, 2537]\"\r\nNormal-13.zip,0,Normal,2048,503,94,1,[503]\r\nCP-8.zip,1,CP,1347,3736,265,3,\"[3736, 3737, 3738]\"\r\nNormal-2.zip,0,Normal,1751,1081,62,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-15.zip,2,NCP,41,1215,63,2,\"[1214, 1215]\"\r\nCP-12.zip,1,CP,1456,3989,52,3,\"[3988, 3989, 3990]\"\r\nNCP-21.zip,2,NCP,80,1294,54,2,\"[1293, 1294]\"\r\nCP-29.zip,1,CP,3808,5752,23,1,[5752]\r\nCP-26.zip,1,CP,3732,5671,53,2,\"[5671, 5672]\"\r\nNCP-8.zip,2,NCP,251,1648,131,2,\"[1648, 1649]\"\r\nNormal-2.zip,0,Normal,1755,1099,71,4,\"[1097, 1098, 1099, 1100]\"\r\nNormal-2.zip,0,Normal,1759,1120,66,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-28.zip,2,NCP,874,2407,341,1,[2407]\r\nNCP-17.zip,2,NCP,469,2093,159,2,\"[2093, 2094]\"\r\nNormal-1.zip,0,Normal,1730,1023,59,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNormal-8.zip,0,Normal,1891,346,96,1,[346]\r\nCP-7.zip,1,CP,1303,3621,230,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3607,247,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-4.zip,1,CP,1170,3388,180,1,[3388]\r\nCP-10.zip,1,CP,1395,3849,63,2,\"[3849, 3850]\"\r\nNCP-2.zip,2,NCP,123,1387,148,2,\"[1387, 1388]\"\r\nNCP-16.zip,2,NCP,446,2047,61,2,\"[2046, 2047]\"\r\nNCP-2.zip,2,NCP,111,1364,56,2,\"[1363, 1364]\"\r\nCP-15.zip,1,CP,1568,4256,22,1,[4256]\r\nNCP-21.zip,2,NCP,79,1291,131,2,\"[1291, 1292]\"\r\nCP-10.zip,1,CP,1395,3850,63,2,\"[3849, 3850]\"\r\nCP-14.zip,1,CP,1542,4199,54,3,\"[4197, 4198, 4199]\"\r\nCP-15.zip,1,CP,1555,4229,62,2,\"[4228, 4229]\"\r\nCP-21.zip,1,CP,606,2968,255,1,[2968]\r\nCP-12.zip,1,CP,1480,4042,54,2,\"[4042, 4043]\"\r\nNCP-27.zip,2,NCP,1063,2640,82,1,[2640]\r\nNormal-7.zip,0,Normal,1831,286,99,1,[286]\r\nCP-14.zip,1,CP,1552,4221,62,2,\"[4221, 4222]\"\r\nNCP-19.zip,2,NCP,541,2240,51,2,\"[2239, 2240]\"\r\nNCP-23.zip,2,NCP,91,1318,100,2,\"[1318, 1319]\"\r\nNormal-2.zip,0,Normal,1739,1043,56,3,\"[1042, 1043, 1044]\"\r\nCP-19.zip,1,CP,2437,2907,183,3,\"[2905, 2906, 2907]\"\r\nNormal-27.zip,0,Normal,3911,5452,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nCP-10.zip,1,CP,1396,3851,139,3,\"[3851, 3852, 3853]\"\r\nCP-13.zip,1,CP,1490,4072,69,3,\"[4071, 4072, 4073]\"\r\nCP-6.zip,1,CP,1242,3460,229,1,[3460]\r\nNCP-17.zip,2,NCP,471,2098,59,2,\"[2097, 2098]\"\r\nNCP-16.zip,2,NCP,434,2021,119,2,\"[2021, 2022]\"\r\nNCP-16.zip,2,NCP,446,2046,146,2,\"[2046, 2047]\"\r\nNCP-17.zip,2,NCP,473,2101,145,2,\"[2101, 2102]\"\r\nNCP-21.zip,2,NCP,69,1270,113,2,\"[1270, 1271]\"\r\nNormal-9.zip,0,Normal,1896,351,98,1,[351]\r\nNCP-9.zip,2,NCP,2709,2702,44,1,[2702]\r\nNCP-29.zip,2,NCP,907,2449,287,1,[2449]\r\nNCP-2.zip,2,NCP,106,1349,150,2,\"[1349, 1350]\"\r\nNCP-17.zip,2,NCP,477,2109,139,2,\"[2109, 2110]\"\r\nCP-27.zip,1,CP,3734,5677,163,3,\"[5676, 5677, 5678]\"\r\nNormal-8.zip,0,Normal,1877,332,88,1,[332]\r\nNormal-7.zip,0,Normal,1853,308,94,1,[308]\r\nNCP-2.zip,2,NCP,1272,2713,62,1,[2713]\r\nCP-13.zip,1,CP,1515,4132,57,3,\"[4131, 4132, 4133]\"\r\nNCP-21.zip,2,NCP,68,1269,49,2,\"[1268, 1269]\"\r\nCP-25.zip,1,CP,719,3081,128,1,[3081]\r\nNCP-10.zip,2,NCP,276,1698,139,2,\"[1698, 1699]\"\r\nNCP-11.zip,2,NCP,294,1734,57,2,\"[1733, 1734]\"\r\nCP-8.zip,1,CP,1342,3724,58,3,\"[3723, 3724, 3725]\"\r\nNormal-4.zip,0,Normal,783,218,118,1,[218]\r\nNormal-11.zip,0,Normal,1977,432,96,1,[432]\r\nCP-12.zip,1,CP,1460,3998,60,2,\"[3998, 3999]\"\r\nNCP-12.zip,2,NCP,32,1197,61,2,\"[1196, 1197]\"\r\nNormal-2.zip,0,Normal,1736,1035,55,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nCP-7.zip,1,CP,1308,3650,219,2,\"[3649, 3650]\"\r\nNCP-17.zip,2,NCP,485,2125,153,2,\"[2125, 2126]\"\r\nNormal-24.zip,0,Normal,2649,159,26,1,[159]\r\nCP-1.zip,1,CP,1082,3127,74,1,[3127]\r\nCP-28.zip,1,CP,3788,5732,26,1,[5732]\r\nNormal-3.zip,0,Normal,1764,1143,66,4,\"[1143, 1144, 1145, 1146]\"\r\nNCP-2.zip,2,NCP,125,1392,132,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-1.zip,2,NCP,1013,2577,524,1,[2577]\r\nCP-22.zip,1,CP,630,2992,118,1,[2992]\r\nNormal-27.zip,0,Normal,3897,5423,70,4,\"[5423, 5424, 5426, 5427]\"\r\nCP-1.zip,1,CP,1088,3222,50,4,\"[3220, 3221, 3222, 3223]\"\r\nNCP-11.zip,2,NCP,294,1733,136,2,\"[1733, 1734]\"\r\nCP-3.zip,1,CP,1135,3353,202,1,[3353]\r\nCP-10.zip,1,CP,1408,3879,59,3,\"[3878, 3879, 3880]\"\r\nCP-19.zip,1,CP,1791,3213,71,4,\"[3210, 3211, 3212, 3213]\"\r\nNormal-1.zip,0,Normal,1709,974,61,2,\"[973, 974]\"\r\nCP-11.zip,1,CP,1438,3945,46,2,\"[3944, 3945]\"\r\nCP-8.zip,1,CP,1325,3687,64,2,\"[3686, 3687]\"\r\nCP-20.zip,1,CP,2761,3292,38,1,[3292]\r\nNCP-17.zip,2,NCP,470,2096,64,2,\"[2095, 2096]\"\r\nNCP-4.zip,2,NCP,164,1472,150,2,\"[1472, 1473]\"\r\nNCP-14.zip,2,NCP,380,1912,148,2,\"[1912, 1913]\"\r\nCP-7.zip,1,CP,1266,3484,134,1,[3484]\r\nCP-10.zip,1,CP,1400,3860,54,2,\"[3860, 3861]\"\r\nNCP-10.zip,2,NCP,281,1708,121,2,\"[1708, 1709]\"\r\nNCP-14.zip,2,NCP,397,1944,158,2,\"[1944, 1945]\"\r\nCP-27.zip,1,CP,3734,5678,32,3,\"[5676, 5677, 5678]\"\r\nCP-15.zip,1,CP,1559,4238,53,2,\"[4237, 4238]\"\r\nNormal-26.zip,0,Normal,3888,5406,63,1,[5406]\r\nNCP-11.zip,2,NCP,308,1764,49,2,\"[1763, 1764]\"\r\nNCP-16.zip,2,NCP,435,2024,62,2,\"[2023, 2024]\"\r\nNCP-11.zip,2,NCP,285,1715,149,2,\"[1715, 1716]\"\r\nNCP-20.zip,2,NCP,568,2294,144,2,\"[2294, 2295]\"\r\nNCP-20.zip,2,NCP,550,2257,143,2,\"[2257, 2258]\"\r\nNCP-6.zip,2,NCP,218,1581,58,2,\"[1580, 1581]\"\r\nNormal-15.zip,0,Normal,2092,547,87,1,[547]\r\nCP-10.zip,1,CP,1396,3853,58,3,\"[3851, 3852, 3853]\"\r\nNormal-12.zip,0,Normal,2010,465,91,1,[465]\r\nNormal-18.zip,0,Normal,2194,649,89,1,[649]\r\nNCP-10.zip,2,NCP,276,1699,58,2,\"[1698, 1699]\"\r\nCP-27.zip,1,CP,3746,5690,17,1,[5690]\r\nNormal-24.zip,0,Normal,2656,166,34,1,[166]\r\nCP-29.zip,1,CP,3802,5746,26,1,[5746]\r\nCP-17.zip,1,CP,1641,4329,26,1,[4329]\r\nNormal-2.zip,0,Normal,1749,1072,66,4,\"[1069, 1070, 1071, 1072]\"\r\nCP-9.zip,1,CP,1373,3800,55,2,\"[3800, 3801]\"\r\nNormal-22.zip,0,Normal,2596,106,44,1,[106]\r\nNormal-14.zip,0,Normal,2072,527,77,1,[527]\r\nNormal-20.zip,0,Normal,2251,706,89,1,[706]\r\nCP-19.zip,1,CP,2435,2901,104,3,\"[2901, 2902, 2903]\"\r\nCP-12.zip,1,CP,1482,4049,75,3,\"[4047, 4048, 4049]\"\r\nCP-6.zip,1,CP,1231,3449,375,1,[3449]\r\nCP-28.zip,1,CP,3797,5741,28,1,[5741]\r\nCP-7.zip,1,CP,1307,3648,242,4,\"[3645, 3646, 3647, 3648]\"\r\nNCP-1.zip,2,NCP,1030,2600,279,1,[2600]\r\nCP-11.zip,1,CP,1448,3970,62,2,\"[3969, 3970]\"\r\nNormal-20.zip,0,Normal,2255,710,95,1,[710]\r\nCP-2.zip,1,CP,1124,3342,215,1,[3342]\r\nNCP-28.zip,2,NCP,872,2404,46,2,\"[2403, 2404]\"\r\nNormal-3.zip,0,Normal,1765,1147,60,2,\"[1147, 1148]\"\r\nNCP-11.zip,2,NCP,289,1724,47,2,\"[1723, 1724]\"\r\nCP-11.zip,1,CP,1442,3956,58,3,\"[3954, 3955, 3956]\"\r\nCP-1.zip,1,CP,1081,3126,68,1,[3126]\r\nNormal-20.zip,0,Normal,2263,718,108,1,[718]\r\nNCP-19.zip,2,NCP,524,2204,191,1,[2204]\r\nNormal-4.zip,0,Normal,784,219,105,1,[219]\r\nCP-8.zip,1,CP,1337,3715,60,2,\"[3714, 3715]\"\r\nNCP-28.zip,2,NCP,841,2356,282,1,[2356]\r\nNCP-26.zip,2,NCP,3983,5510,40,1,[5510]\r\nCP-20.zip,1,CP,2767,3298,35,1,[3298]\r\nNormal-19.zip,0,Normal,2229,684,87,1,[684]\r\nNCP-15.zip,2,NCP,429,2012,55,2,\"[2011, 2012]\"\r\nCP-19.zip,1,CP,1788,3197,52,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-15.zip,1,CP,1558,4235,62,3,\"[4234, 4235, 4236]\"\r\nNCP-3.zip,2,NCP,1280,2721,50,1,[2721]\r\nNCP-4.zip,2,NCP,148,1440,150,2,\"[1440, 1441]\"\r\nNormal-22.zip,0,Normal,2582,92,39,1,[92]\r\nNormal-23.zip,0,Normal,2623,133,35,1,[133]\r\nCP-13.zip,1,CP,1496,4090,55,2,\"[4090, 4091]\"\r\nNormal-2.zip,0,Normal,1740,1049,21,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-30.zip,1,CP,3835,5779,23,1,[5779]\r\nCP-11.zip,1,CP,1442,3954,139,3,\"[3954, 3955, 3956]\"\r\nNCP-15.zip,2,NCP,429,2011,131,2,\"[2011, 2012]\"\r\nCP-17.zip,1,CP,1621,4309,29,1,[4309]\r\nCP-6.zip,1,CP,1244,3462,87,1,[3462]\r\nNCP-1.zip,2,NCP,1021,2590,181,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-9.zip,2,NCP,2706,2672,51,1,[2672]\r\nNCP-14.zip,2,NCP,391,1932,131,2,\"[1932, 1933]\"\r\nCP-19.zip,1,CP,1785,3189,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-3.zip,1,CP,1134,3352,330,1,[3352]\r\nCP-8.zip,1,CP,1346,3734,53,3,\"[3733, 3734, 3735]\"\r\nNCP-12.zip,2,NCP,320,1789,58,2,\"[1788, 1789]\"\r\nNCP-21.zip,2,NCP,77,1287,126,2,\"[1287, 1288]\"\r\nCP-17.zip,1,CP,1647,4335,23,1,[4335]\r\nCP-11.zip,1,CP,1453,3979,221,3,\"[3979, 3980, 3981]\"\r\nNormal-2.zip,0,Normal,1759,1117,65,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-17.zip,2,NCP,481,2118,68,2,\"[2117, 2118]\"\r\nNCP-3.zip,2,NCP,1279,2720,66,1,[2720]\r\nCP-8.zip,1,CP,1346,3733,53,3,\"[3733, 3734, 3735]\"\r\nNormal-10.zip,0,Normal,1954,409,88,1,[409]\r\nCP-17.zip,1,CP,1648,4336,29,1,[4336]\r\nCP-14.zip,1,CP,1524,4154,58,3,\"[4152, 4153, 4154]\"\r\nNormal-18.zip,0,Normal,2216,671,97,1,[671]\r\nNCP-27.zip,2,NCP,179,1503,43,2,\"[1503, 1502]\"\r\nCP-19.zip,1,CP,1788,3202,55,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-12.zip,1,CP,1482,4048,75,3,\"[4047, 4048, 4049]\"\r\nNCP-29.zip,2,NCP,913,2455,268,1,[2455]\r\nCP-19.zip,1,CP,2444,2919,112,2,\"[2918, 2919]\"\r\nCP-22.zip,1,CP,639,3001,136,1,[3001]\r\nNCP-2.zip,2,NCP,121,1383,100,2,\"[1383, 1384]\"\r\nCP-8.zip,1,CP,1324,3685,58,2,\"[3684, 3685]\"\r\nCP-11.zip,1,CP,1430,3929,77,2,\"[3928, 3929]\"\r\nNCP-10.zip,2,NCP,271,1689,61,2,\"[1688, 1689]\"\r\nNCP-15.zip,2,NCP,401,1952,58,2,\"[1951, 1952]\"\r\nNormal-4.zip,0,Normal,788,223,336,1,[223]\r\nNormal-27.zip,0,Normal,3898,5428,74,1,[5428]\r\nNormal-21.zip,0,Normal,2312,767,88,1,[767]\r\nNormal-17.zip,0,Normal,2170,625,62,1,[625]\r\nNCP-3.zip,2,NCP,130,1406,59,2,\"[1405, 1406]\"\r\nCP-3.zip,1,CP,1154,3372,169,1,[3372]\r\nNormal-3.zip,0,Normal,1765,1148,60,2,\"[1147, 1148]\"\r\nNormal-11.zip,0,Normal,1962,417,78,1,[417]\r\nCP-18.zip,1,CP,1667,4355,26,1,[4355]\r\nCP-1.zip,1,CP,1066,3105,59,1,[3105]\r\nNCP-1.zip,2,NCP,1047,2619,473,1,[2619]\r\nNCP-10.zip,2,NCP,2711,2704,44,1,[2704]\r\nNormal-19.zip,0,Normal,2237,692,85,1,[692]\r\nNCP-11.zip,2,NCP,289,1723,110,2,\"[1723, 1724]\"\r\nNCP-7.zip,2,NCP,240,1626,66,2,\"[1625, 1626]\"\r\nNormal-11.zip,0,Normal,1974,429,96,1,[429]\r\nNormal-26.zip,0,Normal,3887,5401,67,3,\"[5400, 5401, 5404]\"\r\nNormal-26.zip,0,Normal,3891,5411,67,2,\"[5411, 5412]\"\r\nNormal-18.zip,0,Normal,2191,646,106,1,[646]\r\nNCP-28.zip,2,NCP,840,2355,55,1,[2355]\r\nNormal-6.zip,0,Normal,1814,269,88,1,[269]\r\nNCP-12.zip,2,NCP,329,1807,66,2,\"[1806, 1807]\"\r\nCP-24.zip,1,CP,686,3048,133,1,[3048]\r\nCP-19.zip,1,CP,2432,2894,124,1,[2894]\r\nNormal-10.zip,0,Normal,1952,407,107,1,[407]\r\nCP-13.zip,1,CP,1515,4133,57,3,\"[4131, 4132, 4133]\"\r\nCP-8.zip,1,CP,1347,3737,34,3,\"[3736, 3737, 3738]\"\r\nNormal-2.zip,0,Normal,1754,1095,69,4,\"[1093, 1094, 1095, 1096]\"\r\nCP-22.zip,1,CP,622,2984,459,1,[2984]\r\nCP-7.zip,1,CP,1303,3629,244,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-3.zip,1,CP,1141,3359,350,1,[3359]\r\nCP-7.zip,1,CP,1303,3631,242,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5451,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nCP-14.zip,1,CP,1533,4173,100,3,\"[4173, 4174, 4175]\"\r\nNormal-10.zip,0,Normal,1935,390,91,1,[390]\r\nNCP-22.zip,2,NCP,822,2332,36,2,\"[2332, 2333]\"\r\nCP-7.zip,1,CP,1303,3622,28,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-22.zip,0,Normal,2320,775,91,1,[775]\r\nNormal-25.zip,0,Normal,3859,5371,216,1,[5371]\r\nNormal-12.zip,0,Normal,2018,473,93,1,[473]\r\nCP-9.zip,1,CP,1359,3764,181,3,\"[3764, 3765, 3766]\"\r\nCP-20.zip,1,CP,2452,2931,298,1,[2931]\r\nNCP-23.zip,2,NCP,90,1316,100,2,\"[1316, 1317]\"\r\nNormal-2.zip,0,Normal,1744,1058,71,2,\"[1058, 1059]\"\r\nNCP-18.zip,2,NCP,492,2141,58,2,\"[2140, 2141]\"\r\nNormal-13.zip,0,Normal,2053,508,81,1,[508]\r\nNormal-17.zip,0,Normal,2156,611,82,1,[611]\r\nCP-19.zip,1,CP,1785,3190,79,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-19.zip,2,NCP,541,2239,121,2,\"[2239, 2240]\"\r\nNCP-19.zip,2,NCP,531,2221,58,2,\"[2220, 2221]\"\r\nCP-19.zip,1,CP,2448,2925,104,2,\"[2925, 2926]\"\r\nCP-31.zip,1,CP,4044,5593,276,1,[5593]\r\nCP-8.zip,1,CP,1345,3732,55,2,\"[3731, 3732]\"\r\nNormal-3.zip,0,Normal,743,178,340,1,[178]\r\nNormal-23.zip,0,Normal,2613,123,40,1,[123]\r\nNormal-1.zip,0,Normal,1714,983,71,3,\"[982, 983, 984]\"\r\nNCP-8.zip,2,NCP,268,1683,53,2,\"[1682, 1683]\"\r\nCP-8.zip,1,CP,1347,3738,34,3,\"[3736, 3737, 3738]\"\r\nCP-25.zip,1,CP,718,3080,466,1,[3080]\r\nNormal-13.zip,0,Normal,2024,479,86,1,[479]\r\nNCP-11.zip,2,NCP,310,1767,169,2,\"[1767, 1768]\"\r\nNormal-1.zip,0,Normal,1668,780,63,4,\"[778, 779, 780, 781]\"\r\nCP-17.zip,1,CP,1636,4324,26,1,[4324]\r\nNCP-20.zip,2,NCP,55,1244,63,2,\"[1243, 1244]\"\r\nCP-32.zip,1,CP,2463,3227,77,1,[3227]\r\nNCP-16.zip,2,NCP,435,2023,153,2,\"[2023, 2024]\"\r\nNCP-2.zip,2,NCP,106,1350,63,2,\"[1349, 1350]\"\r\nCP-27.zip,1,CP,3753,5697,20,1,[5697]\r\nNCP-15.zip,2,NCP,415,1983,63,2,\"[1982, 1983]\"\r\nNCP-5.zip,2,NCP,191,1527,54,2,\"[1526, 1527]\"\r\nCP-3.zip,1,CP,1142,3360,138,1,[3360]\r\nNCP-19.zip,2,NCP,531,2220,139,2,\"[2220, 2221]\"\r\nCP-14.zip,1,CP,1524,4153,58,3,\"[4152, 4153, 4154]\"\r\nCP-19.zip,1,CP,1788,3196,49,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-14.zip,1,CP,1533,4175,42,3,\"[4173, 4174, 4175]\"\r\nNCP-7.zip,2,NCP,244,1635,69,2,\"[1634, 1635]\"\r\nNormal-7.zip,0,Normal,1848,303,95,1,[303]\r\nCP-24.zip,1,CP,703,3065,120,1,[3065]\r\nNormal-1.zip,0,Normal,1731,1024,36,1,[1024]\r\nCP-11.zip,1,CP,1448,3969,62,2,\"[3969, 3970]\"\r\nNormal-25.zip,0,Normal,3850,5362,180,1,[5362]\r\nCP-7.zip,1,CP,13,3172,255,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-14.zip,1,CP,1533,4174,42,3,\"[4173, 4174, 4175]\"\r\nNCP-3.zip,2,NCP,130,1405,140,2,\"[1405, 1406]\"\r\nCP-23.zip,1,CP,647,3009,384,1,[3009]\r\nNormal-24.zip,0,Normal,2637,147,36,1,[147]\r\nNCP-28.zip,2,NCP,848,2367,283,2,\"[2366, 2367]\"\r\nNormal-9.zip,0,Normal,1903,358,86,1,[358]\r\nNormal-26.zip,0,Normal,3889,5408,65,2,\"[5407, 5408]\"\r\nNCP-20.zip,2,NCP,567,2293,60,2,\"[2292, 2293]\"\r\nCP-22.zip,1,CP,621,2983,174,1,[2983]\r\nCP-10.zip,1,CP,1389,3835,51,3,\"[3833, 3834, 3835]\"\r\nCP-9.zip,1,CP,1362,3773,61,2,\"[3772, 3773]\"\r\nNormal-27.zip,0,Normal,3897,5426,72,4,\"[5423, 5424, 5426, 5427]\"\r\nNCP-15.zip,2,NCP,428,2010,53,2,\"[2009, 2010]\"\r\nNormal-3.zip,0,Normal,759,194,297,1,[194]\r\nCP-13.zip,1,CP,1497,4092,68,3,\"[4092, 4093, 4094]\"\r\nNormal-19.zip,0,Normal,2246,701,87,1,[701]\r\nCP-3.zip,1,CP,1130,3348,166,1,[3348]\r\nCP-14.zip,1,CP,1552,4222,62,2,\"[4221, 4222]\"\r\nNCP-26.zip,2,NCP,3994,5518,52,1,[5518]\r\nNCP-27.zip,2,NCP,328,1805,43,2,\"[1804, 1805]\"\r\nNCP-13.zip,2,NCP,369,1889,138,2,\"[1889, 1890]\"\r\nCP-20.zip,1,CP,2756,3287,56,1,[3287]\r\nCP-22.zip,1,CP,638,3000,116,1,[3000]\r\nCP-6.zip,1,CP,1250,3468,451,1,[3468]\r\nCP-19.zip,1,CP,2437,2905,316,3,\"[2905, 2906, 2907]\"\r\nNormal-16.zip,0,Normal,2130,585,88,1,[585]\r\nNCP-14.zip,2,NCP,376,1904,142,2,\"[1904, 1905]\"\r\nNormal-10.zip,0,Normal,1932,387,91,1,[387]\r\nNCP-16.zip,2,NCP,453,2060,121,2,\"[2060, 2061]\"\r\nNCP-5.zip,2,NCP,191,1526,128,2,\"[1526, 1527]\"\r\nCP-12.zip,1,CP,1476,4034,53,2,\"[4033, 4034]\"\r\nNCP-5.zip,2,NCP,175,1495,55,2,\"[1494, 1495]\"\r\nNCP-21.zip,2,NCP,71,1275,53,2,\"[1274, 1275]\"\r\nNormal-10.zip,0,Normal,1925,380,90,1,[380]\r\nNCP-30.zip,2,NCP,994,2548,226,2,\"[2547, 2548]\"\r\nCP-4.zip,1,CP,1192,3410,184,1,[3410]\r\nNormal-23.zip,0,Normal,2631,141,38,1,[141]\r\nNormal-2.zip,0,Normal,1751,1080,61,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-9.zip,2,NCP,2684,2697,50,1,[2697]\r\nCP-27.zip,1,CP,3757,5701,22,1,[5701]\r\nNCP-3.zip,2,NCP,1288,2729,61,1,[2729]\r\nNCP-18.zip,2,NCP,505,2166,157,2,\"[2166, 2167]\"\r\nCP-8.zip,1,CP,1348,3741,59,3,\"[3739, 3740, 3741]\"\r\nNormal-24.zip,0,Normal,2651,161,34,1,[161]\r\nNormal-23.zip,0,Normal,2618,128,35,1,[128]\r\nCP-8.zip,1,CP,1331,3702,62,2,\"[3701, 3702]\"\r\nNCP-14.zip,2,NCP,398,1947,70,2,\"[1946, 1947]\"\r\nNCP-4.zip,2,NCP,158,1460,122,2,\"[1460, 1461]\"\r\nNCP-23.zip,2,NCP,89,1312,157,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-2.zip,0,Normal,1759,1116,64,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-17.zip,1,CP,1645,4333,26,1,[4333]\r\nCP-10.zip,1,CP,1408,3880,59,3,\"[3878, 3879, 3880]\"\r\nCP-30.zip,1,CP,3917,5541,62,1,[5541]\r\nNCP-30.zip,2,NCP,933,2475,23,1,[2475]\r\nCP-8.zip,1,CP,1344,3728,142,3,\"[3728, 3729, 3730]\"\r\nNCP-17.zip,2,NCP,459,2072,133,2,\"[2072, 2073]\"\r\nNCP-4.zip,2,NCP,150,1445,75,2,\"[1444, 1445]\"\r\nCP-12.zip,1,CP,1455,3986,58,3,\"[3985, 3986, 3987]\"\r\nNormal-27.zip,0,Normal,3897,5427,72,4,\"[5423, 5424, 5426, 5427]\"\r\nCP-18.zip,1,CP,1772,3177,81,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-27.zip,1,CP,3745,5689,23,1,[5689]\r\nNCP-29.zip,2,NCP,920,2462,183,1,[2462]\r\nNCP-9.zip,2,NCP,2688,2655,56,1,[2655]\r\nNormal-8.zip,0,Normal,1887,342,94,1,[342]\r\nCP-1.zip,1,CP,1076,3120,70,1,[3120]\r\nNCP-21.zip,2,NCP,575,2308,144,2,\"[2308, 2309]\"\r\nNormal-15.zip,0,Normal,2100,555,94,1,[555]\r\nNCP-11.zip,2,NCP,285,1716,62,2,\"[1715, 1716]\"\r\nCP-8.zip,1,CP,1344,3729,59,3,\"[3728, 3729, 3730]\"\r\nNormal-12.zip,0,Normal,2021,476,85,1,[476]\r\nNormal-15.zip,0,Normal,2105,560,87,1,[560]\r\nCP-9.zip,1,CP,1366,3784,57,3,\"[3782, 3783, 3784]\"\r\nCP-18.zip,1,CP,1772,3181,75,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-15.zip,2,NCP,426,2006,58,2,\"[2005, 2006]\"\r\nNCP-1.zip,2,NCP,1020,2586,45,1,[2586]\r\nNCP-13.zip,2,NCP,356,1863,124,2,\"[1863, 1864]\"\r\nNormal-8.zip,0,Normal,1865,320,99,1,[320]\r\nNCP-30.zip,2,NCP,994,2547,226,2,\"[2547, 2548]\"\r\nNormal-12.zip,0,Normal,2011,466,93,1,[466]\r\nCP-18.zip,1,CP,1773,3185,67,4,\"[3182, 3183, 3184, 3185]\"\r\nNCP-18.zip,2,NCP,505,2167,66,2,\"[2166, 2167]\"\r\nCP-8.zip,1,CP,1328,3694,69,2,\"[3693, 3694]\"\r\nNCP-2.zip,2,NCP,1278,2719,61,1,[2719]\r\nCP-25.zip,1,CP,736,3098,494,1,[3098]\r\nCP-19.zip,1,CP,1785,3186,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-24.zip,1,CP,7,3512,299,2,\"[3511, 3512]\"\r\nNormal-27.zip,0,Normal,3913,5455,71,2,\"[5454, 5455]\"\r\nNCP-6.zip,2,NCP,218,1580,139,2,\"[1580, 1581]\"\r\nNormal-4.zip,0,Normal,795,230,120,1,[230]\r\nNCP-6.zip,2,NCP,207,1559,46,2,\"[1558, 1559]\"\r\nNCP-5.zip,2,NCP,189,1523,58,2,\"[1522, 1523]\"\r\nNormal-22.zip,0,Normal,2314,769,84,1,[769]\r\nCP-14.zip,1,CP,1541,4195,58,3,\"[4194, 4195, 4196]\"\r\nNormal-26.zip,0,Normal,3866,5378,27,1,[5378]\r\nNCP-30.zip,2,NCP,938,2481,78,2,\"[2480, 2481]\"\r\nNCP-1.zip,2,NCP,1041,2612,126,1,[2612]\r\nNormal-24.zip,0,Normal,2664,174,28,1,[174]\r\nNCP-21.zip,2,NCP,75,1283,128,2,\"[1283, 1284]\"\r\nCP-14.zip,1,CP,1542,4198,54,3,\"[4197, 4198, 4199]\"\r\nNCP-11.zip,2,NCP,286,1718,51,2,\"[1717, 1718]\"\r\nCP-8.zip,1,CP,1332,3704,41,2,\"[3703, 3704]\"\r\nCP-14.zip,1,CP,1527,4160,142,3,\"[4160, 4161, 4162]\"\r\nNormal-2.zip,0,Normal,1749,1071,66,4,\"[1069, 1070, 1071, 1072]\"\r\nCP-7.zip,1,CP,13,3170,271,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-20.zip,1,CP,2769,3300,36,1,[3300]\r\nNormal-11.zip,0,Normal,1973,428,90,1,[428]\r\nCP-28.zip,1,CP,3783,5727,26,1,[5727]\r\nNCP-12.zip,2,NCP,320,1788,139,2,\"[1788, 1789]\"\r\nNormal-10.zip,0,Normal,1929,384,91,1,[384]\r\nNormal-7.zip,0,Normal,1841,296,79,1,[296]\r\nNormal-8.zip,0,Normal,1881,336,91,1,[336]\r\nNCP-25.zip,2,NCP,3964,5475,41,1,[5475]\r\nCP-12.zip,1,CP,1480,4043,54,2,\"[4042, 4043]\"\r\nNCP-23.zip,2,NCP,91,1319,43,2,\"[1318, 1319]\"\r\nNCP-11.zip,2,NCP,30,1193,56,1,[1193]\r\nNCP-29.zip,2,NCP,924,2466,18,1,[2466]\r\nCP-16.zip,1,CP,1614,4302,23,1,[4302]\r\nNormal-14.zip,0,Normal,2061,516,88,1,[516]\r\nNCP-27.zip,2,NCP,826,2339,54,1,[2339]\r\nNormal-13.zip,0,Normal,2038,493,80,1,[493]\r\nNormal-1.zip,0,Normal,1715,985,71,2,\"[985, 986]\"\r\nCP-28.zip,1,CP,3782,5726,25,1,[5726]\r\nCP-21.zip,1,CP,2777,3308,22,1,[3308]\r\nCP-8.zip,1,CP,1328,3693,69,2,\"[3693, 3694]\"\r\nCP-7.zip,1,CP,1303,3628,50,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-17.zip,2,NCP,468,2091,154,2,\"[2091, 2092]\"\r\nNCP-13.zip,2,NCP,36,1205,59,2,\"[1204, 1205]\"\r\nNormal-12.zip,0,Normal,2000,455,93,1,[455]\r\nCP-19.zip,1,CP,2448,2926,102,2,\"[2925, 2926]\"\r\nNCP-10.zip,2,NCP,2728,2711,54,1,[2711]\r\nNCP-8.zip,2,NCP,263,1672,177,2,\"[1672, 1673]\"\r\nCP-30.zip,1,CP,3831,5775,25,1,[5775]\r\nNormal-1.zip,0,Normal,1709,973,61,2,\"[973, 974]\"\r\nNormal-2.zip,0,Normal,1740,1046,300,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-27.zip,1,CP,3751,5695,22,1,[5695]\r\nCP-11.zip,1,CP,1453,3981,56,3,\"[3979, 3980, 3981]\"\r\nCP-16.zip,1,CP,1617,4305,23,1,[4305]\r\nCP-19.zip,1,CP,1788,3198,53,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-19.zip,1,CP,2447,2924,86,2,\"[2923, 2924]\"\r\nNCP-20.zip,2,NCP,55,1243,150,2,\"[1243, 1244]\"\r\nCP-1.zip,1,CP,1069,3108,77,4,\"[3108, 3109, 3110, 3111]\"\r\nCP-29.zip,1,CP,3827,5771,26,1,[5771]\r\nCP-16.zip,1,CP,1599,4287,17,1,[4287]\r\nNCP-12.zip,2,NCP,34,1201,64,2,\"[1200, 1201]\"\r\nNCP-19.zip,2,NCP,523,2202,148,2,\"[2202, 2203]\"\r\nCP-19.zip,1,CP,2429,2890,100,1,[2890]\r\nNCP-9.zip,2,NCP,2695,2661,45,1,[2661]\r\nNormal-1.zip,0,Normal,1730,1022,59,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nCP-24.zip,1,CP,7,3511,298,2,\"[3511, 3512]\"\r\nNCP-27.zip,2,NCP,1045,2617,30,1,[2617]\r\nNormal-15.zip,0,Normal,2088,543,75,1,[543]\r\nNormal-25.zip,0,Normal,3853,5365,205,1,[5365]\r\nNormal-14.zip,0,Normal,2076,531,77,1,[531]\r\nNCP-22.zip,2,NCP,84,1301,127,2,\"[1301, 1302]\"\r\nCP-18.zip,1,CP,1660,4348,23,1,[4348]\r\nNCP-26.zip,2,NCP,3980,5487,38,1,[5487]\r\nCP-7.zip,1,CP,1303,3620,45,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-20.zip,1,CP,2758,3289,35,1,[3289]\r\nNormal-6.zip,0,Normal,1808,263,95,1,[263]\r\nNormal-2.zip,0,Normal,1739,1044,56,3,\"[1042, 1043, 1044]\"\r\nCP-1.zip,1,CP,1068,3107,62,1,[3107]\r\nNormal-14.zip,0,Normal,2083,538,87,1,[538]\r\nCP-12.zip,1,CP,1484,4054,46,3,\"[4053, 4054, 4055]\"\r\nCP-29.zip,1,CP,3811,5755,23,1,[5755]\r\nCP-14.zip,1,CP,1548,4213,51,2,\"[4213, 4214]\"\r\nNCP-20.zip,2,NCP,561,2281,58,2,\"[2280, 2281]\"\r\nCP-14.zip,1,CP,1544,4204,51,3,\"[4203, 4204, 4205]\"\r\nNCP-27.zip,2,NCP,1062,2639,176,1,[2639]\r\nCP-25.zip,1,CP,735,3097,110,1,[3097]\r\nCP-7.zip,1,CP,1303,3614,27,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-2.zip,1,CP,1115,3333,180,1,[3333]\r\nCP-27.zip,1,CP,3756,5700,20,1,[5700]\r\nNormal-5.zip,0,Normal,813,248,136,1,[248]\r\nNormal-19.zip,0,Normal,2221,676,103,1,[676]\r\nNormal-27.zip,0,Normal,3902,5434,73,1,[5434]\r\nCP-11.zip,1,CP,1437,3943,57,2,\"[3942, 3943]\"\r\nNCP-2.zip,2,NCP,126,1398,64,2,\"[1396, 1398]\"\r\nNormal-20.zip,0,Normal,2265,720,87,1,[720]\r\nCP-16.zip,1,CP,1589,4277,23,1,[4277]\r\nNormal-16.zip,0,Normal,2149,604,85,1,[604]\r\nNCP-19.zip,2,NCP,523,2203,62,2,\"[2202, 2203]\"\r\nCP-12.zip,1,CP,1455,3985,138,3,\"[3985, 3986, 3987]\"\r\nNormal-27.zip,0,Normal,3911,5450,68,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nCP-30.zip,1,CP,4040,5589,38,1,[5589]\r\nNCP-1.zip,2,NCP,1049,2622,205,1,[2622]\r\nNormal-1.zip,0,Normal,1674,811,74,2,\"[810, 811]\"\r\nNCP-19.zip,2,NCP,539,2236,55,2,\"[2235, 2236]\"\r\nNormal-1.zip,0,Normal,1668,779,60,4,\"[778, 779, 780, 781]\"\r\nNCP-19.zip,2,NCP,542,2241,130,2,\"[2241, 2242]\"\r\nCP-25.zip,1,CP,739,3101,112,1,[3101]\r\nCP-9.zip,1,CP,1367,3785,140,3,\"[3785, 3786, 3787]\"\r\nCP-14.zip,1,CP,1549,4215,61,2,\"[4215, 4216]\"\r\nNCP-19.zip,2,NCP,53,1239,144,2,\"[1239, 1240]\"\r\nNormal-1.zip,0,Normal,1730,1021,294,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-29.zip,2,NCP,918,2460,213,1,[2460]\r\nNCP-23.zip,2,NCP,89,1311,138,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-2.zip,0,Normal,1759,1119,66,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-9.zip,1,CP,1359,3765,46,3,\"[3764, 3765, 3766]\"\r\nNCP-25.zip,2,NCP,3706,5533,54,1,[5533]\r\nCP-8.zip,1,CP,1325,3686,65,2,\"[3686, 3687]\"\r\nNormal-1.zip,0,Normal,1683,864,72,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNCP-19.zip,2,NCP,545,2248,57,2,\"[2247, 2248]\"\r\nNCP-15.zip,2,NCP,418,1989,143,2,\"[1989, 1990]\"\r\nCP-7.zip,1,CP,1261,3479,198,1,[3479]\r\nNCP-29.zip,2,NCP,895,2435,143,2,\"[2435, 2436]\"\r\nCP-12.zip,1,CP,1483,4051,62,3,\"[4050, 4051, 4052]\"\r\nCP-12.zip,1,CP,1460,3999,60,2,\"[3998, 3999]\"\r\nCP-12.zip,1,CP,1456,3988,122,3,\"[3988, 3989, 3990]\"\r\nNormal-12.zip,0,Normal,2014,469,98,1,[469]\r\nCP-14.zip,1,CP,1542,4197,180,3,\"[4197, 4198, 4199]\"\r\nNormal-2.zip,0,Normal,1755,1098,73,4,\"[1097, 1098, 1099, 1100]\"\r\nNCP-14.zip,2,NCP,382,1917,58,2,\"[1916, 1917]\"\r\nNCP-4.zip,2,NCP,153,1451,58,2,\"[1450, 1451]\"\r\nNormal-27.zip,0,Normal,3913,5454,68,2,\"[5454, 5455]\"\r\nNormal-1.zip,0,Normal,1674,810,74,2,\"[810, 811]\"\r\nNormal-2.zip,0,Normal,1736,1036,55,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNormal-1.zip,0,Normal,1682,852,81,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNormal-4.zip,0,Normal,796,231,287,1,[231]\r\nNCP-11.zip,2,NCP,292,1729,138,2,\"[1729, 1730]\"\r\nNCP-12.zip,2,NCP,327,1803,55,2,\"[1802, 1803]\"\r\nNormal-25.zip,0,Normal,3712,5342,28,1,[5342]\r\nCP-4.zip,1,CP,1182,3400,130,1,[3400]\r\nCP-2.zip,1,CP,1113,3331,197,1,[3331]\r\nNCP-22.zip,2,NCP,888,2426,55,1,[2426]\r\nNormal-25.zip,0,Normal,3846,5358,209,1,[5358]\r\nCP-9.zip,1,CP,1362,3772,61,2,\"[3772, 3773]\"\r\nNCP-20.zip,2,NCP,572,2302,138,2,\"[2302, 2303]\"\r\nCP-4.zip,1,CP,1193,3411,190,1,[3411]\r\nNormal-5.zip,0,Normal,802,237,298,1,[237]\r\nCP-23.zip,1,CP,655,3017,511,1,[3017]\r\nNormal-2.zip,0,Normal,1751,1084,67,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-13.zip,2,NCP,360,1871,121,2,\"[1871, 1872]\"\r\nNCP-30.zip,2,NCP,977,2521,257,1,[2521]\r\nNCP-26.zip,2,NCP,3990,5514,51,1,[5514]\r\nNormal-3.zip,0,Normal,768,203,130,1,[203]\r\nNormal-1.zip,0,Normal,1713,980,71,2,\"[980, 981]\"\r\nCP-26.zip,1,CP,3732,5672,53,2,\"[5671, 5672]\"\r\nCP-7.zip,1,CP,1303,3625,32,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-20.zip,1,CP,2762,3293,33,1,[3293]\r\nNormal-2.zip,0,Normal,1740,1052,59,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNormal-20.zip,0,Normal,2267,722,100,1,[722]\r\nNCP-5.zip,2,NCP,189,1522,139,2,\"[1522, 1523]\"\r\nNCP-28.zip,2,NCP,848,2366,57,2,\"[2366, 2367]\"\r\nNCP-6.zip,2,NCP,215,1575,65,2,\"[1574, 1575]\"\r\nNormal-27.zip,0,Normal,3905,5438,58,2,\"[5437, 5438]\"\r\nCP-4.zip,1,CP,1163,3381,239,1,[3381]\r\nCP-18.zip,1,CP,1665,4353,25,1,[4353]\r\nNormal-25.zip,0,Normal,3842,5354,189,1,[5354]\r\nNormal-22.zip,0,Normal,2583,93,46,1,[93]\r\nNCP-11.zip,2,NCP,308,1763,116,2,\"[1763, 1764]\"\r\nCP-4.zip,1,CP,1180,3398,150,1,[3398]\r\nCP-7.zip,1,CP,1316,3668,63,3,\"[3667, 3668, 3669]\"\r\nCP-5.zip,1,CP,1213,3431,159,1,[3431]\r\nNormal-10.zip,0,Normal,1947,402,89,1,[402]\r\nCP-24.zip,1,CP,698,3060,124,1,[3060]\r\nCP-15.zip,1,CP,1562,4243,55,2,\"[4243, 4244]\"\r\nNCP-10.zip,2,NCP,280,1706,121,2,\"[1706, 1707]\"\r\nNCP-18.zip,2,NCP,498,2152,139,2,\"[2152, 2153]\"\r\nNCP-25.zip,2,NCP,3962,5473,58,1,[5473]\r\nCP-18.zip,1,CP,1772,3180,75,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNormal-13.zip,0,Normal,2029,484,94,1,[484]\r\nNCP-16.zip,2,NCP,443,2041,50,2,\"[2040, 2041]\"\r\nNCP-24.zip,2,NCP,984,2529,259,2,\"[2529, 2530]\"\r\nCP-18.zip,1,CP,1773,3183,61,4,\"[3182, 3183, 3184, 3185]\"\r\nCP-5.zip,1,CP,1194,3412,158,1,[3412]\r\nNCP-14.zip,2,NCP,39,1211,58,2,\"[1210, 1211]\"\r\nCP-13.zip,1,CP,15,3174,98,1,[3174]\r\nCP-28.zip,1,CP,3775,5719,29,1,[5719]\r\nNCP-17.zip,2,NCP,477,2110,58,2,\"[2109, 2110]\"\r\nNormal-16.zip,0,Normal,2133,588,73,1,[588]\r\nNCP-4.zip,2,NCP,150,1444,181,2,\"[1444, 1445]\"\r\nCP-4.zip,1,CP,1188,3406,308,1,[3406]\r\nNCP-8.zip,2,NCP,251,1649,55,2,\"[1648, 1649]\"\r\nCP-1.zip,1,CP,1094,3312,329,1,[3312]\r\nNCP-12.zip,2,NCP,327,1802,130,2,\"[1802, 1803]\"\r\nNormal-7.zip,0,Normal,1830,285,84,1,[285]\r\nCP-12.zip,1,CP,1481,4045,58,3,\"[4044, 4045, 4046]\"\r\nNCP-19.zip,2,NCP,52,1238,57,2,\"[1237, 1238]\"\r\nNCP-20.zip,2,NCP,557,2271,132,2,\"[2271, 2272]\"\r\nNCP-14.zip,2,NCP,398,1946,167,2,\"[1946, 1947]\"\r\nNCP-8.zip,2,NCP,260,1667,68,2,\"[1666, 1667]\"\r\nNormal-2.zip,0,Normal,1754,1094,73,4,\"[1093, 1094, 1095, 1096]\"\r\nNormal-2.zip,0,Normal,1736,1032,124,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNCP-11.zip,2,NCP,292,1730,58,2,\"[1729, 1730]\"\r\nCP-25.zip,1,CP,725,3087,80,1,[3087]\r\nCP-15.zip,1,CP,1558,4234,62,3,\"[4234, 4235, 4236]\"\r\nNormal-17.zip,0,Normal,2161,616,99,1,[616]\r\nNCP-23.zip,2,NCP,970,2513,62,1,[2513]\r\nNCP-10.zip,2,NCP,277,1700,152,2,\"[1700, 1701]\"\r\nNCP-14.zip,2,NCP,395,1941,71,2,\"[1940, 1941]\"\r\nNormal-2.zip,0,Normal,1755,1100,71,4,\"[1097, 1098, 1099, 1100]\"\r\nCP-26.zip,1,CP,3718,5648,254,2,\"[5647, 5648]\"\r\nNormal-25.zip,0,Normal,3841,5353,188,1,[5353]\r\nNormal-23.zip,0,Normal,2621,131,41,1,[131]\r\nCP-7.zip,1,CP,1303,3623,45,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-20.zip,2,NCP,555,2267,133,2,\"[2267, 2268]\"\r\nNCP-7.zip,2,NCP,244,1634,165,2,\"[1634, 1635]\"\r\nNormal-6.zip,0,Normal,1821,276,102,1,[276]\r\nNCP-17.zip,2,NCP,459,2073,56,2,\"[2072, 2073]\"\r\nNCP-2.zip,2,NCP,124,1390,58,2,\"[1389, 1390]\"\r\nNormal-18.zip,0,Normal,2185,640,100,1,[640]\r\nNCP-5.zip,2,NCP,193,1530,124,2,\"[1530, 1531]\"\r\nNCP-8.zip,2,NCP,253,1652,139,2,\"[1652, 1653]\"\r\nNCP-23.zip,2,NCP,89,1313,58,4,\"[1311, 1312, 1313, 1315]\"\r\nCP-5.zip,1,CP,1216,3434,307,1,[3434]\r\nNCP-30.zip,2,NCP,979,2523,345,1,[2523]\r\nNCP-23.zip,2,NCP,97,1331,41,2,\"[1330, 1331]\"\r\nNCP-20.zip,2,NCP,555,2268,56,2,\"[2267, 2268]\"\r\nNormal-16.zip,0,Normal,2126,581,84,1,[581]\r\nNCP-18.zip,2,NCP,488,2133,58,2,\"[2131, 2133]\"\r\nNCP-10.zip,2,NCP,28,1189,61,2,\"[1188, 1189]\"\r\nNCP-15.zip,2,NCP,41,1214,151,2,\"[1214, 1215]\"\r\nNCP-12.zip,2,NCP,32,1196,145,2,\"[1196, 1197]\"\r\nCP-26.zip,1,CP,3722,5656,50,2,\"[5656, 5657]\"\r\nCP-15.zip,1,CP,1573,4261,22,1,[4261]\r\nNCP-27.zip,2,NCP,1028,2598,147,1,[2598]\r\nNormal-18.zip,0,Normal,2197,652,105,1,[652]\r\nNormal-16.zip,0,Normal,2152,607,66,1,[607]\r\nNCP-14.zip,2,NCP,380,1913,62,2,\"[1912, 1913]\"\r\nNormal-15.zip,0,Normal,2093,548,72,1,[548]\r\nNCP-3.zip,2,NCP,1299,2740,63,1,[2740]\r\nCP-8.zip,1,CP,1348,3740,59,3,\"[3739, 3740, 3741]\"\r\nNormal-6.zip,0,Normal,1822,277,101,1,[277]\r\nNormal-4.zip,0,Normal,800,235,116,1,[235]\r\nCP-10.zip,1,CP,1386,3827,66,2,\"[3827, 3828]\"\r\nNormal-12.zip,0,Normal,2004,459,106,1,[459]\r\nCP-7.zip,1,CP,1303,3615,44,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-25.zip,2,NCP,3957,5470,47,1,[5470]\r\nCP-5.zip,1,CP,1204,3422,294,1,[3422]\r\nCP-11.zip,1,CP,1420,3905,59,2,\"[3905, 3906]\"\r\nCP-17.zip,1,CP,1649,4337,23,1,[4337]\r\nCP-28.zip,1,CP,3769,5713,18,1,[5713]\r\nNormal-8.zip,0,Normal,1868,323,91,1,[323]\r\nCP-1.zip,1,CP,1087,3219,400,1,[3219]\r\nCP-26.zip,1,CP,3640,5599,295,1,[5599]\r\nNCP-7.zip,2,NCP,248,1642,139,2,\"[1642, 1643]\"\r\nNCP-2.zip,2,NCP,125,1395,55,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-25.zip,2,NCP,3941,5538,38,1,[5538]\r\nCP-15.zip,1,CP,1558,4236,62,3,\"[4234, 4235, 4236]\"\r\nNCP-16.zip,2,NCP,443,2040,117,2,\"[2040, 2041]\"\r\nNormal-15.zip,0,Normal,2102,557,100,1,[557]\r\nNormal-2.zip,0,Normal,1755,1097,73,4,\"[1097, 1098, 1099, 1100]\"\r\nNormal-9.zip,0,Normal,1924,379,98,1,[379]\r\nCP-7.zip,1,CP,1303,3616,209,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-13.zip,1,CP,1517,4136,64,2,\"[4136, 4137]\"\r\nCP-1.zip,1,CP,1,3146,70,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNormal-19.zip,0,Normal,2226,681,99,1,[681]\r\nCP-13.zip,1,CP,1517,4137,64,2,\"[4136, 4137]\"\r\nNCP-23.zip,2,NCP,95,1326,165,2,\"[1326, 1327]\"\r\nNCP-19.zip,2,NCP,538,2234,60,2,\"[2233, 2234]\"\r\nCP-6.zip,1,CP,1253,3471,130,1,[3471]\r\nNCP-7.zip,2,NCP,242,1629,133,2,\"[1629, 1630]\"\r\nCP-8.zip,1,CP,1337,3714,60,2,\"[3714, 3715]\"\r\nNCP-23.zip,2,NCP,912,2454,373,1,[2454]\r\nNormal-23.zip,0,Normal,2622,132,38,1,[132]\r\nNormal-8.zip,0,Normal,1871,326,73,1,[326]\r\nNCP-5.zip,2,NCP,193,1531,52,2,\"[1530, 1531]\"\r\nNormal-24.zip,0,Normal,2646,156,41,1,[156]\r\nCP-14.zip,1,CP,1538,4185,159,3,\"[4185, 4186, 4187]\"\r\nCP-23.zip,1,CP,667,3029,226,1,[3029]\r\nCP-1.zip,1,CP,1,3147,70,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNCP-27.zip,2,NCP,1006,2566,42,2,\"[2566, 2567]\"\r\nNormal-1.zip,0,Normal,1711,977,63,2,\"[977, 978]\"\r\nNCP-14.zip,2,NCP,374,1899,139,2,\"[1899, 1900]\"\r\nNCP-16.zip,2,NCP,457,2069,57,2,\"[2068, 2069]\"\r\nCP-22.zip,1,CP,634,2996,680,1,[2996]\r\nNCP-23.zip,2,NCP,905,2447,26,1,[2447]\r\nNormal-2.zip,0,Normal,1759,1118,65,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-11.zip,2,NCP,290,1725,103,2,\"[1725, 1726]\"\r\nNCP-21.zip,2,NCP,77,1288,53,2,\"[1287, 1288]\"\r\nCP-30.zip,1,CP,4018,5567,33,1,[5567]\r\nCP-12.zip,1,CP,1483,4052,62,3,\"[4050, 4051, 4052]\"\r\nCP-24.zip,1,CP,692,3054,74,1,[3054]\r\nNCP-6.zip,2,NCP,204,1552,139,2,\"[1552, 1553]\"\r\nNCP-7.zip,2,NCP,24,1179,146,2,\"[1179, 1180]\"\r\nCP-6.zip,1,CP,1251,3469,133,1,[3469]\r\nNormal-1.zip,0,Normal,1682,857,70,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNCP-10.zip,2,NCP,2712,2705,42,1,[2705]\r\nCP-2.zip,1,CP,1100,3318,201,1,[3318]\r\nNormal-1.zip,0,Normal,1671,795,67,3,\"[793, 794, 795]\"\r\nNCP-17.zip,2,NCP,461,2077,67,2,\"[2076, 2077]\"\r\nCP-15.zip,1,CP,1564,4249,51,2,\"[4248, 4249]\"\r\nNCP-4.zip,2,NCP,153,1450,137,2,\"[1450, 1451]\"\r\nCP-4.zip,1,CP,1166,3384,202,1,[3384]\r\nNCP-28.zip,2,NCP,851,2370,145,1,[2370]\r\nNCP-23.zip,2,NCP,95,1327,69,2,\"[1326, 1327]\"\r\nNormal-18.zip,0,Normal,2196,651,95,1,[651]\r\nCP-27.zip,1,CP,3749,5693,20,1,[5693]\r\nNormal-6.zip,0,Normal,1797,252,85,1,[252]\r\nCP-14.zip,1,CP,1544,4203,122,3,\"[4203, 4204, 4205]\"\r\nCP-8.zip,1,CP,1345,3731,55,2,\"[3731, 3732]\"\r\nNCP-8.zip,2,NCP,2678,2649,55,1,[2649]\r\nNCP-23.zip,2,NCP,89,1315,66,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-17.zip,0,Normal,2167,622,76,1,[622]\r\nCP-22.zip,1,CP,631,2993,130,1,[2993]\r\nCP-16.zip,1,CP,1618,4306,26,1,[4306]\r\nNCP-17.zip,2,NCP,471,2097,139,2,\"[2097, 2098]\"\r\nNCP-15.zip,2,NCP,416,1986,58,2,\"[1984, 1986]\"\r\nCP-10.zip,1,CP,1389,3833,121,3,\"[3833, 3834, 3835]\"\r\nCP-24.zip,1,CP,696,3058,74,1,[3058]\r\nNCP-26.zip,2,NCP,3996,5494,37,1,[5494]\r\nCP-15.zip,1,CP,1565,4251,66,2,\"[4250, 4251]\"\r\nNCP-7.zip,2,NCP,248,1643,58,2,\"[1642, 1643]\"\r\nNCP-30.zip,2,NCP,932,2474,20,1,[2474]\r\nCP-8.zip,1,CP,1332,3703,41,2,\"[3703, 3704]\"\r\nNormal-2.zip,0,Normal,1754,1093,73,4,\"[1093, 1094, 1095, 1096]\"\r\nNCP-3.zip,2,NCP,131,1408,50,2,\"[1407, 1408]\"\r\nNCP-13.zip,2,NCP,37,1206,147,2,\"[1206, 1207]\"\r\nNCP-7.zip,2,NCP,242,1630,56,2,\"[1629, 1630]\"\r\nCP-26.zip,1,CP,3643,5603,257,2,\"[5602, 5603]\"\r\nNCP-6.zip,2,NCP,211,1567,58,2,\"[1566, 1567]\"\r\nNormal-24.zip,0,Normal,2639,149,28,1,[149]\r\nNormal-13.zip,0,Normal,2037,492,82,1,[492]\r\nCP-16.zip,1,CP,1610,4298,22,1,[4298]\r\nNCP-15.zip,2,NCP,415,1982,149,2,\"[1982, 1983]\"\r\nNCP-2.zip,2,NCP,125,1394,55,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNormal-23.zip,0,Normal,2616,126,39,1,[126]\r\nCP-26.zip,1,CP,3635,5594,291,1,[5594]\r\nNormal-18.zip,0,Normal,2211,666,85,1,[666]\r\nNCP-17.zip,2,NCP,481,2117,163,2,\"[2117, 2118]\"\r\nNCP-13.zip,2,NCP,37,1207,62,2,\"[1206, 1207]\"\r\nNormal-2.zip,0,Normal,1749,1070,61,4,\"[1069, 1070, 1071, 1072]\"\r\nNCP-29.zip,2,NCP,927,2469,20,1,[2469]\r\nCP-6.zip,1,CP,1226,3444,190,1,[3444]\r\nNCP-14.zip,2,NCP,394,1938,147,2,\"[1938, 1939]\"\r\nCP-19.zip,1,CP,1791,3212,71,4,\"[3210, 3211, 3212, 3213]\"\r\nCP-8.zip,1,CP,1334,3708,56,2,\"[3707, 3708]\"\r\nNCP-12.zip,2,NCP,324,1796,120,2,\"[1796, 1797]\"\r\nCP-30.zip,1,CP,3929,5626,71,2,\"[5626, 5627]\"\r\nNormal-7.zip,0,Normal,1832,287,91,1,[287]\r\nNormal-1.zip,0,Normal,1713,981,71,2,\"[980, 981]\"\r\nNCP-2.zip,2,NCP,111,1363,133,2,\"[1363, 1364]\"\r\nNormal-3.zip,0,Normal,1764,1144,66,4,\"[1143, 1144, 1145, 1146]\"\r\nCP-15.zip,1,CP,1560,4239,63,2,\"[4239, 4240]\"\r\nNCP-22.zip,2,NCP,84,1302,54,2,\"[1301, 1302]\"\r\nNormal-2.zip,0,Normal,1744,1059,71,2,\"[1058, 1059]\"\r\nCP-21.zip,1,CP,590,2952,86,1,[2952]\r\nNormal-9.zip,0,Normal,1901,356,83,1,[356]\r\nNCP-17.zip,2,NCP,461,2076,160,2,\"[2076, 2077]\"\r\nCP-24.zip,1,CP,683,3045,138,1,[3045]\r\nNormal-11.zip,0,Normal,1983,438,105,1,[438]\r\nNCP-14.zip,2,NCP,39,1210,139,2,\"[1210, 1211]\"\r\nNCP-18.zip,2,NCP,494,2144,156,2,\"[2144, 2145]\"\r\nNCP-14.zip,2,NCP,388,1927,68,2,\"[1926, 1927]\"\r\nNCP-28.zip,2,NCP,853,2373,664,1,[2373]\r\nNormal-22.zip,0,Normal,2588,98,33,1,[98]\r\nNCP-17.zip,2,NCP,46,1225,124,2,\"[1225, 1226]\"\r\nNCP-2.zip,2,NCP,126,1396,152,2,\"[1396, 1398]\"\r\nNCP-15.zip,2,NCP,418,1990,58,2,\"[1989, 1990]\"\r\nNormal-3.zip,0,Normal,765,200,136,1,[200]\r\nCP-9.zip,1,CP,1370,3792,62,2,\"[3792, 3793]\"\r\nCP-13.zip,1,CP,1490,4071,166,3,\"[4071, 4072, 4073]\"\r\nCP-5.zip,1,CP,1212,3430,187,1,[3430]\r\nNCP-29.zip,2,NCP,894,2434,16,1,[2434]\r\nNormal-2.zip,0,Normal,1751,1083,67,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nCP-19.zip,1,CP,1788,3199,58,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-12.zip,1,CP,1466,4011,52,2,\"[4011, 4012]\"\r\nCP-1.zip,1,CP,1088,3223,50,4,\"[3220, 3221, 3222, 3223]\"\r\nNCP-25.zip,2,NCP,3947,5503,41,1,[5503]\r\nCP-30.zip,1,CP,3931,5632,143,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-2.zip,2,NCP,124,1389,139,2,\"[1389, 1390]\"\r\nCP-7.zip,1,CP,1307,3645,53,4,\"[3645, 3646, 3647, 3648]\"\r\nNCP-27.zip,2,NCP,823,2334,183,1,[2334]\r\nNormal-1.zip,0,Normal,1728,1016,72,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-22.zip,2,NCP,860,2383,183,2,\"[2382, 2383]\"\r\nNCP-20.zip,2,NCP,559,2276,54,2,\"[2275, 2276]\"\r\nNormal-2.zip,0,Normal,1754,1096,69,4,\"[1093, 1094, 1095, 1096]\"\r\nCP-12.zip,1,CP,1473,4026,51,3,\"[4026, 4027, 4028]\"\r\nNormal-3.zip,0,Normal,1764,1146,62,4,\"[1143, 1144, 1145, 1146]\"\r\nCP-2.zip,1,CP,1103,3321,180,1,[3321]\r\nCP-4.zip,1,CP,1181,3399,238,1,[3399]\r\nCP-19.zip,1,CP,2436,2904,138,1,[2904]\r\nCP-28.zip,1,CP,3795,5739,23,1,[5739]\r\nCP-29.zip,1,CP,3805,5749,20,1,[5749]\r\nNCP-3.zip,2,NCP,1300,2741,60,1,[2741]\r\nNCP-23.zip,2,NCP,898,2439,48,1,[2439]\r\nNormal-23.zip,0,Normal,2612,122,31,1,[122]\r\nNCP-7.zip,2,NCP,24,1180,61,2,\"[1179, 1180]\"\r\nNormal-6.zip,0,Normal,1807,262,95,1,[262]\r\nNCP-30.zip,2,NCP,996,2551,189,2,\"[2551, 2552]\"\r\nNormal-9.zip,0,Normal,1893,348,82,1,[348]\r\nNCP-11.zip,2,NCP,290,1726,44,2,\"[1725, 1726]\"\r\nNCP-21.zip,2,NCP,80,1293,129,2,\"[1293, 1294]\"\r\nNormal-24.zip,0,Normal,2655,165,37,1,[165]\r\nNCP-30.zip,2,NCP,996,2552,218,2,\"[2551, 2552]\"\r\nCP-18.zip,1,CP,1653,4341,29,1,[4341]\r\nNCP-5.zip,2,NCP,187,1518,136,2,\"[1518, 1519]\"\r\nNCP-26.zip,2,NCP,3993,5517,39,1,[5517]\r\nNCP-10.zip,2,NCP,273,1692,128,2,\"[1692, 1693]\"\r\nNormal-2.zip,0,Normal,1751,1082,62,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-5.zip,2,NCP,179,1502,122,2,\"[1503, 1502]\"\r\nNormal-26.zip,0,Normal,3887,5400,67,3,\"[5400, 5401, 5404]\"\r\nNCP-7.zip,2,NCP,234,1613,139,2,\"[1613, 1614]\"\r\nNormal-1.zip,0,Normal,1725,1006,60,1,[1006]\r\nNCP-15.zip,2,NCP,419,1992,55,2,\"[1991, 1992]\"\r\nCP-14.zip,1,CP,1523,4151,65,2,\"[4150, 4151]\"\r\nNCP-23.zip,2,NCP,938,2480,195,2,\"[2480, 2481]\"\r\nNCP-13.zip,2,NCP,342,1835,149,2,\"[1835, 1836]\"\r\nCP-24.zip,1,CP,680,3042,86,1,[3042]\r\nNCP-14.zip,2,NCP,394,1939,62,2,\"[1938, 1939]\"\r\nNCP-11.zip,2,NCP,288,1722,49,2,\"[1721, 1722]\"\r\nCP-14.zip,1,CP,1527,4162,58,3,\"[4160, 4161, 4162]\"\r\nCP-6.zip,1,CP,1241,3459,132,1,[3459]\r\nCP-10.zip,1,CP,1408,3878,198,3,\"[3878, 3879, 3880]\"\r\nNCP-14.zip,2,NCP,397,1945,66,2,\"[1944, 1945]\"\r\nCP-1.zip,1,CP,1,3145,248,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNormal-15.zip,0,Normal,2111,566,95,1,[566]\r\nNormal-12.zip,0,Normal,2007,462,85,1,[462]\r\nNCP-6.zip,2,NCP,222,1589,52,2,\"[1588, 1589]\"\r\nNormal-25.zip,0,Normal,3856,5368,220,1,[5368]\r\nCP-6.zip,1,CP,1245,3463,306,1,[3463]\r\nCP-9.zip,1,CP,1380,3814,56,1,[3814]\r\nCP-11.zip,1,CP,1442,3955,58,3,\"[3954, 3955, 3956]\"\r\nNormal-26.zip,0,Normal,3889,5407,68,2,\"[5407, 5408]\"\r\nNormal-4.zip,0,Normal,773,208,321,1,[208]\r\nCP-23.zip,1,CP,671,3033,448,1,[3033]\r\nCP-23.zip,1,CP,674,3036,126,1,[3036]\r\nCP-19.zip,1,CP,1788,3200,54,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNCP-12.zip,2,NCP,328,1804,112,2,\"[1804, 1805]\"\r\nNormal-22.zip,0,Normal,2581,91,44,1,[91]\r\nCP-7.zip,1,CP,1316,3669,62,3,\"[3667, 3668, 3669]\"\r\nNCP-1.zip,2,NCP,1046,2618,70,1,[2618]\r\nNCP-16.zip,2,NCP,456,2067,57,2,\"[2066, 2067]\"\r\nCP-13.zip,1,CP,1508,4115,57,3,\"[4115, 4116, 4117]\"\r\nNormal-1.zip,0,Normal,1730,1020,63,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-17.zip,2,NCP,468,2092,65,2,\"[2091, 2092]\"\r\nNCP-7.zip,2,NCP,2488,2688,40,1,[2688]\r\nCP-10.zip,1,CP,1396,3852,58,3,\"[3851, 3852, 3853]\"\r\nNCP-16.zip,2,NCP,447,2049,58,2,\"[2048, 2049]\"\r\nNormal-8.zip,0,Normal,1864,319,88,1,[319]\r\nCP-15.zip,1,CP,1560,4240,63,2,\"[4239, 4240]\"\r\nCP-12.zip,1,CP,1484,4055,46,3,\"[4053, 4054, 4055]\"\r\nNormal-1.zip,0,Normal,1682,853,81,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNormal-22.zip,0,Normal,2580,90,37,1,[90]\r\nCP-2.zip,1,CP,1128,3346,196,1,[3346]\r\nNCP-7.zip,2,NCP,240,1625,158,2,\"[1625, 1626]\"\r\nNormal-15.zip,0,Normal,2086,541,91,1,[541]\r\nNormal-7.zip,0,Normal,1837,292,94,1,[292]\r\nCP-1.zip,1,CP,1069,3111,77,4,\"[3108, 3109, 3110, 3111]\"\r\nCP-14.zip,1,CP,1549,4216,61,2,\"[4215, 4216]\"\r\nNormal-11.zip,0,Normal,1970,425,88,1,[425]\r\nNCP-13.zip,2,NCP,342,1836,61,2,\"[1835, 1836]\"\r\nCP-25.zip,1,CP,728,3090,86,1,[3090]\r\nNCP-21.zip,2,NCP,68,1268,115,2,\"[1268, 1269]\"\r\nCP-8.zip,1,CP,1342,3725,58,3,\"[3723, 3724, 3725]\"\r\nCP-12.zip,1,CP,1481,4046,58,3,\"[4044, 4045, 4046]\"\r\nCP-5.zip,1,CP,1210,3428,156,1,[3428]\r\nNCP-3.zip,2,NCP,136,1417,53,2,\"[1416, 1417]\"\r\nNCP-2.zip,2,NCP,125,1393,54,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-23.zip,2,NCP,97,1330,97,2,\"[1330, 1331]\"\r\nNCP-1.zip,2,NCP,1021,2588,209,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-12.zip,2,NCP,317,1782,50,2,\"[1781, 1782]\"\r\nNCP-14.zip,2,NCP,388,1926,162,2,\"[1926, 1927]\"\r\nCP-26.zip,1,CP,3641,5600,300,1,[5600]\r\nNormal-3.zip,0,Normal,760,195,117,1,[195]\r\nNCP-12.zip,2,NCP,325,1798,117,2,\"[1798, 1799]\"\r\nNormal-1.zip,0,Normal,1671,793,72,3,\"[793, 794, 795]\"\r\nNormal-1.zip,0,Normal,1683,869,64,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNormal-5.zip,0,Normal,807,242,132,1,[242]\r\nCP-19.zip,1,CP,1791,3211,55,4,\"[3210, 3211, 3212, 3213]\"\r\nNormal-4.zip,0,Normal,792,227,108,1,[227]\r\nCP-15.zip,1,CP,1564,4248,51,2,\"[4248, 4249]\"\r\nNCP-12.zip,2,NCP,324,1797,51,2,\"[1796, 1797]\"\r\nCP-13.zip,1,CP,1514,4130,61,2,\"[4129, 4130]\"\r\nCP-30.zip,1,CP,4013,5562,29,1,[5562]\r\nCP-7.zip,1,CP,13,3173,255,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-5.zip,1,CP,1214,3432,282,1,[3432]\r\nNormal-8.zip,0,Normal,1878,333,88,1,[333]\r\nNCP-7.zip,2,NCP,246,1638,139,2,\"[1638, 1639]\"\r\nNormal-21.zip,0,Normal,2297,752,83,1,[752]"
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_fold1_train.csv",
    "content": "zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids\r\nCP-6.zip,1,CP,1229,3447,144,1,[3447]\r\nCP-26.zip,1,CP,3718,5647,51,2,\"[5647, 5648]\"\r\nCP-3.zip,1,CP,1148,3366,158,1,[3366]\r\nCP-5.zip,1,CP,1200,3418,309,1,[3418]\r\nCP-1.zip,1,CP,1088,3221,54,4,\"[3220, 3221, 3222, 3223]\"\r\nCP-21.zip,1,CP,585,2947,94,1,[2947]\r\nCP-18.zip,1,CP,1772,3178,72,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-1.zip,1,CP,1078,3123,68,1,[3123]\r\nCP-12.zip,1,CP,1473,4028,51,3,\"[4026, 4027, 4028]\"\r\nCP-15.zip,1,CP,1559,4237,53,2,\"[4237, 4238]\"\r\nCP-7.zip,1,CP,1259,3477,162,1,[3477]\r\nCP-14.zip,1,CP,1541,4194,142,3,\"[4194, 4195, 4196]\"\r\nCP-18.zip,1,CP,1658,4346,29,1,[4346]\r\nCP-9.zip,1,CP,1373,3801,55,2,\"[3800, 3801]\"\r\nCP-12.zip,1,CP,1456,3990,52,3,\"[3988, 3989, 3990]\"\r\nCP-9.zip,1,CP,1367,3787,58,3,\"[3785, 3786, 3787]\"\r\nCP-1.zip,1,CP,1097,3315,119,1,[3315]\r\nCP-11.zip,1,CP,1438,3944,46,2,\"[3944, 3945]\"\r\nNormal-2.zip,0,Normal,1759,1115,64,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-14.zip,1,CP,1523,4150,65,2,\"[4150, 4151]\"\r\nCP-19.zip,1,CP,2447,2923,83,2,\"[2923, 2924]\"\r\nCP-19.zip,1,CP,1788,3203,57,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-5.zip,1,CP,1220,3438,200,1,[3438]\r\nCP-16.zip,1,CP,1593,4281,22,1,[4281]\r\nNormal-18.zip,0,Normal,2200,655,94,1,[655]\r\nCP-28.zip,1,CP,3784,5728,29,1,[5728]\r\nCP-2.zip,1,CP,1109,3327,210,1,[3327]\r\nCP-19.zip,1,CP,2444,2918,124,2,\"[2918, 2919]\"\r\nNormal-2.zip,0,Normal,1760,1122,137,4,\"[1121, 1122, 1123, 1124]\"\r\nCP-12.zip,1,CP,1476,4033,106,2,\"[4033, 4034]\"\r\nCP-14.zip,1,CP,1538,4186,66,3,\"[4185, 4186, 4187]\"\r\nNormal-13.zip,0,Normal,2046,501,79,1,[501]\r\nCP-15.zip,1,CP,1565,4250,66,2,\"[4250, 4251]\"\r\nCP-10.zip,1,CP,1407,3876,58,2,\"[3876, 3877]\"\r\nNormal-27.zip,0,Normal,3905,5437,288,2,\"[5437, 5438]\"\r\nNCP-13.zip,2,NCP,36,1204,141,2,\"[1204, 1205]\"\r\nNCP-30.zip,2,NCP,941,2484,169,1,[2484]\r\nNormal-2.zip,0,Normal,1758,1109,291,2,\"[1109, 1110]\"\r\nCP-8.zip,1,CP,1342,3723,139,3,\"[3723, 3724, 3725]\"\r\nCP-3.zip,1,CP,1132,3350,180,1,[3350]\r\nCP-18.zip,1,CP,1773,3184,67,4,\"[3182, 3183, 3184, 3185]\"\r\nNCP-17.zip,2,NCP,464,2083,60,2,\"[2082, 2083]\"\r\nNCP-16.zip,2,NCP,447,2048,139,2,\"[2048, 2049]\"\r\nNCP-3.zip,2,NCP,136,1416,126,2,\"[1416, 1417]\"\r\nNCP-18.zip,2,NCP,501,2158,146,2,\"[2158, 2159]\"\r\nCP-19.zip,1,CP,2439,2909,409,1,[2909]\r\nNCP-19.zip,2,NCP,538,2233,142,2,\"[2233, 2234]\"\r\nNormal-27.zip,0,Normal,3907,5440,63,2,\"[5440, 5441]\"\r\nCP-18.zip,1,CP,1773,3182,61,4,\"[3182, 3183, 3184, 3185]\"\r\nCP-8.zip,1,CP,1320,3677,62,2,\"[3676, 3677]\"\r\nCP-9.zip,1,CP,1366,3782,138,3,\"[3782, 3783, 3784]\"\r\nCP-7.zip,1,CP,1309,3651,49,2,\"[3651, 3652]\"\r\nNCP-18.zip,2,NCP,492,2140,139,2,\"[2140, 2141]\"\r\nNCP-21.zip,2,NCP,69,1271,48,2,\"[1270, 1271]\"\r\nCP-13.zip,1,CP,1515,4131,137,3,\"[4131, 4132, 4133]\"\r\nNormal-11.zip,0,Normal,1980,435,83,1,[435]\r\nNormal-14.zip,0,Normal,2073,528,87,1,[528]\r\nCP-3.zip,1,CP,1149,3367,157,1,[3367]\r\nNCP-14.zip,2,NCP,376,1905,60,2,\"[1904, 1905]\"\r\nNCP-8.zip,2,NCP,253,1653,58,2,\"[1652, 1653]\"\r\nNCP-27.zip,2,NCP,1061,2638,75,1,[2638]\r\nNormal-9.zip,0,Normal,1921,376,80,1,[376]\r\nNCP-16.zip,2,NCP,453,2061,51,2,\"[2060, 2061]\"\r\nNCP-10.zip,2,NCP,275,1697,64,2,\"[1696, 1697]\"\r\nCP-24.zip,1,CP,708,3070,80,1,[3070]\r\nNCP-20.zip,2,NCP,560,2277,124,2,\"[2277, 2279]\"\r\nNCP-6.zip,2,NCP,207,1558,109,2,\"[1558, 1559]\"\r\nNCP-2.zip,2,NCP,114,1370,53,2,\"[1369, 1370]\"\r\nCP-10.zip,1,CP,1407,3877,58,2,\"[3876, 3877]\"\r\nNormal-1.zip,0,Normal,1682,858,70,6,\"[847, 848, 852, 853, 857, 858]\"\r\nCP-14.zip,1,CP,1548,4214,51,2,\"[4213, 4214]\"\r\nNormal-2.zip,0,Normal,1760,1124,74,4,\"[1121, 1122, 1123, 1124]\"\r\nNCP-14.zip,2,NCP,374,1900,58,2,\"[1899, 1900]\"\r\nNCP-7.zip,2,NCP,2486,2645,50,1,[2645]\r\nNCP-19.zip,2,NCP,542,2242,55,2,\"[2241, 2242]\"\r\nNormal-25.zip,0,Normal,3836,5348,202,1,[5348]\r\nNormal-11.zip,0,Normal,1961,416,91,1,[416]\r\nNCP-27.zip,2,NCP,819,2329,33,1,[2329]\r\nNCP-5.zip,2,NCP,184,1512,112,2,\"[1512, 1513]\"\r\nNCP-15.zip,2,NCP,416,1984,139,2,\"[1984, 1986]\"\r\nCP-14.zip,1,CP,1538,4187,65,3,\"[4185, 4186, 4187]\"\r\nCP-8.zip,1,CP,1351,3746,56,1,[3746]\r\nNCP-10.zip,2,NCP,281,1709,51,2,\"[1708, 1709]\"\r\nCP-10.zip,1,CP,1415,3895,65,3,\"[3894, 3895, 3896]\"\r\nNormal-1.zip,0,Normal,1682,848,67,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNCP-17.zip,2,NCP,485,2126,64,2,\"[2125, 2126]\"\r\nNCP-18.zip,2,NCP,501,2159,61,2,\"[2158, 2159]\"\r\nNormal-8.zip,0,Normal,1863,318,82,1,[318]\r\nCP-18.zip,1,CP,1772,3176,81,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-26.zip,1,CP,3652,5551,53,2,\"[5551, 5552]\"\r\nNormal-5.zip,0,Normal,808,243,134,1,[243]\r\nCP-28.zip,1,CP,3771,5715,23,1,[5715]\r\nCP-26.zip,1,CP,3637,5596,35,1,[5596]\r\nCP-12.zip,1,CP,1455,3987,58,3,\"[3985, 3986, 3987]\"\r\nCP-8.zip,1,CP,1336,3712,60,2,\"[3712, 3713]\"\r\nCP-30.zip,1,CP,4015,5564,226,1,[5564]\r\nNormal-8.zip,0,Normal,1883,338,91,1,[338]\r\nNormal-3.zip,0,Normal,1764,1145,62,4,\"[1143, 1144, 1145, 1146]\"\r\nNCP-15.zip,2,NCP,42,1218,61,2,\"[1216, 1218]\"\r\nNCP-7.zip,2,NCP,245,1636,149,2,\"[1636, 1637]\"\r\nNormal-14.zip,0,Normal,2066,521,74,1,[521]\r\nNormal-20.zip,0,Normal,2275,730,85,1,[730]\r\nNCP-8.zip,2,NCP,268,1682,126,2,\"[1682, 1683]\"\r\nCP-7.zip,1,CP,1307,3647,49,4,\"[3645, 3646, 3647, 3648]\"\r\nNormal-15.zip,0,Normal,2106,561,93,1,[561]\r\nCP-20.zip,1,CP,2772,3303,261,1,[3303]\r\nNCP-25.zip,2,NCP,3970,5479,48,1,[5479]\r\nCP-28.zip,1,CP,3772,5716,23,1,[5716]\r\nNCP-5.zip,2,NCP,175,1494,131,2,\"[1494, 1495]\"\r\nNCP-18.zip,2,NCP,507,2171,58,2,\"[2170, 2171]\"\r\nNCP-19.zip,2,NCP,537,2231,143,2,\"[2231, 2232]\"\r\nNormal-1.zip,0,Normal,1728,1014,66,4,\"[1013, 1014, 1015, 1016]\"\r\nNormal-23.zip,0,Normal,2608,118,25,1,[118]\r\nNCP-23.zip,2,NCP,90,1317,43,2,\"[1316, 1317]\"\r\nNCP-2.zip,2,NCP,123,1388,62,2,\"[1387, 1388]\"\r\nNCP-18.zip,2,NCP,507,2170,138,2,\"[2170, 2171]\"\r\nNCP-14.zip,2,NCP,395,1940,171,2,\"[1940, 1941]\"\r\nNCP-23.zip,2,NCP,946,2489,26,1,[2489]\r\nCP-7.zip,1,CP,1308,3649,43,2,\"[3649, 3650]\"\r\nNCP-17.zip,2,NCP,462,2078,161,2,\"[2078, 2079]\"\r\nNormal-16.zip,0,Normal,2145,600,86,1,[600]\r\nNCP-20.zip,2,NCP,560,2279,51,2,\"[2277, 2279]\"\r\nCP-30.zip,1,CP,3931,5630,82,4,\"[5630, 5631, 5632, 5633]\"\r\nCP-13.zip,1,CP,1501,4101,55,2,\"[4100, 4101]\"\r\nCP-1.zip,1,CP,1,3144,248,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nCP-25.zip,1,CP,713,3075,120,1,[3075]\r\nCP-15.zip,1,CP,1562,4244,55,2,\"[4243, 4244]\"\r\nCP-26.zip,1,CP,3643,5602,298,2,\"[5602, 5603]\"\r\nCP-27.zip,1,CP,3748,5692,17,1,[5692]\r\nCP-14.zip,1,CP,1524,4152,229,3,\"[4152, 4153, 4154]\"\r\nNormal-6.zip,0,Normal,1800,255,92,1,[255]\r\nNormal-1.zip,0,Normal,1711,978,63,2,\"[977, 978]\"\r\nNormal-17.zip,0,Normal,2157,612,78,1,[612]\r\nCP-8.zip,1,CP,1334,3707,133,2,\"[3707, 3708]\"\r\nNCP-19.zip,2,NCP,545,2247,135,2,\"[2247, 2248]\"\r\nCP-28.zip,1,CP,3790,5734,23,1,[5734]\r\nNCP-30.zip,2,NCP,993,2546,203,1,[2546]\r\nNCP-9.zip,2,NCP,2689,2656,47,1,[2656]\r\nNormal-27.zip,0,Normal,3907,5441,66,2,\"[5440, 5441]\"\r\nCP-26.zip,1,CP,3652,5552,52,2,\"[5551, 5552]\"\r\nNCP-11.zip,2,NCP,287,1719,142,2,\"[1719, 1720]\"\r\nNCP-2.zip,2,NCP,114,1369,125,2,\"[1369, 1370]\"\r\nNCP-21.zip,2,NCP,581,2320,58,2,\"[2319, 2320]\"\r\nNormal-26.zip,0,Normal,3887,5404,78,3,\"[5400, 5401, 5404]\"\r\nNCP-12.zip,2,NCP,325,1799,50,2,\"[1798, 1799]\"\r\nNCP-27.zip,2,NCP,1060,2637,81,1,[2637]\r\nCP-13.zip,1,CP,1516,4135,62,2,\"[4134, 4135]\"\r\nCP-15.zip,1,CP,1580,4268,21,1,[4268]\r\nNCP-15.zip,2,NCP,428,2009,125,2,\"[2009, 2010]\"\r\nNCP-19.zip,2,NCP,52,1237,135,2,\"[1237, 1238]\"\r\nNCP-9.zip,2,NCP,2691,2658,44,1,[2658]\r\nNCP-12.zip,2,NCP,34,1200,156,2,\"[1200, 1201]\"\r\nNCP-19.zip,2,NCP,539,2235,131,2,\"[2235, 2236]\"\r\nNormal-1.zip,0,Normal,1728,1015,72,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-6.zip,2,NCP,222,1588,122,2,\"[1588, 1589]\"\r\nNCP-10.zip,2,NCP,273,1693,54,2,\"[1692, 1693]\"\r\nCP-29.zip,1,CP,3822,5766,20,1,[5766]\r\nCP-10.zip,1,CP,1401,3864,51,3,\"[3862, 3863, 3864]\"\r\nNormal-13.zip,0,Normal,2030,485,66,1,[485]\r\nNCP-4.zip,2,NCP,164,1473,63,2,\"[1472, 1473]\"\r\nCP-21.zip,1,CP,3,3504,35,1,[3504]\r\nCP-9.zip,1,CP,1368,3788,69,2,\"[3788, 3789]\"\r\nNormal-1.zip,0,Normal,1704,963,69,4,\"[961, 962, 963, 964]\"\r\nCP-12.zip,1,CP,1466,4012,52,2,\"[4011, 4012]\"\r\nNormal-11.zip,0,Normal,1971,426,100,1,[426]\r\nNCP-16.zip,2,NCP,450,2055,34,2,\"[2054, 2055]\"\r\nNCP-30.zip,2,NCP,962,2505,38,1,[2505]\r\nNCP-8.zip,2,NCP,2675,2648,44,1,[2648]\r\nNCP-25.zip,2,NCP,3955,5468,46,1,[5468]\r\nNCP-18.zip,2,NCP,488,2131,139,2,\"[2131, 2133]\"\r\nCP-12.zip,1,CP,1484,4053,181,3,\"[4053, 4054, 4055]\"\r\nCP-9.zip,1,CP,1368,3789,69,2,\"[3788, 3789]\"\r\nNCP-10.zip,2,NCP,28,1188,145,2,\"[1188, 1189]\"\r\nCP-30.zip,1,CP,3931,5631,82,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-10.zip,2,NCP,277,1701,64,2,\"[1700, 1701]\"\r\nNCP-4.zip,2,NCP,148,1441,63,2,\"[1440, 1441]\"\r\nCP-12.zip,1,CP,1481,4044,139,3,\"[4044, 4045, 4046]\"\r\nNormal-21.zip,0,Normal,2288,743,96,1,[743]\r\nCP-30.zip,1,CP,4017,5566,41,1,[5566]\r\nCP-13.zip,1,CP,1499,4098,53,2,\"[4097, 4098]\"\r\nCP-13.zip,1,CP,1516,4134,62,2,\"[4134, 4135]\"\r\nNormal-13.zip,0,Normal,2049,504,88,1,[504]\r\nCP-18.zip,1,CP,1772,3179,72,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-20.zip,2,NCP,57,1248,56,2,\"[1247, 1248]\"\r\nNormal-1.zip,0,Normal,1704,961,71,4,\"[961, 962, 963, 964]\"\r\nCP-9.zip,1,CP,1366,3783,57,3,\"[3782, 3783, 3784]\"\r\nCP-32.zip,1,CP,2464,3228,66,1,[3228]\r\nCP-15.zip,1,CP,1555,4228,62,2,\"[4228, 4229]\"\r\nNormal-3.zip,0,Normal,758,193,122,1,[193]\r\nNCP-12.zip,2,NCP,329,1806,157,2,\"[1806, 1807]\"\r\nCP-7.zip,1,CP,1307,3646,259,4,\"[3645, 3646, 3647, 3648]\"\r\nCP-26.zip,1,CP,3722,5657,205,2,\"[5656, 5657]\"\r\nNCP-14.zip,2,NCP,382,1916,139,2,\"[1916, 1917]\"\r\nCP-27.zip,1,CP,3752,5696,20,1,[5696]\r\nNormal-16.zip,0,Normal,2129,584,75,1,[584]\r\nNCP-13.zip,2,NCP,367,1885,158,2,\"[1885, 1886]\"\r\nNCP-6.zip,2,NCP,204,1553,58,2,\"[1552, 1553]\"\r\nCP-30.zip,1,CP,3918,5542,71,1,[5542]\r\nNormal-11.zip,0,Normal,1979,434,87,1,[434]\r\nNormal-2.zip,0,Normal,1741,1053,61,2,\"[1053, 1054]\"\r\nNormal-10.zip,0,Normal,1945,400,87,1,[400]\r\nNormal-26.zip,0,Normal,3882,5394,27,1,[5394]\r\nCP-20.zip,1,CP,2456,2940,126,1,[2940]\r\nNCP-5.zip,2,NCP,184,1513,48,2,\"[1512, 1513]\"\r\nNCP-9.zip,2,NCP,2693,2659,49,1,[2659]\r\nCP-8.zip,1,CP,1348,3739,197,3,\"[3739, 3740, 3741]\"\r\nNormal-18.zip,0,Normal,2214,669,102,1,[669]\r\nCP-10.zip,1,CP,1415,3896,65,3,\"[3894, 3895, 3896]\"\r\nNCP-3.zip,2,NCP,1290,2731,66,1,[2731]\r\nNormal-2.zip,0,Normal,1759,1111,62,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-15.zip,2,NCP,401,1951,139,2,\"[1951, 1952]\"\r\nCP-7.zip,1,CP,1309,3652,49,2,\"[3651, 3652]\"\r\nNormal-4.zip,0,Normal,787,222,320,1,[222]\r\nNCP-20.zip,2,NCP,550,2258,60,2,\"[2257, 2258]\"\r\nNCP-5.zip,2,NCP,195,1534,143,2,\"[1534, 1535]\"\r\nNCP-13.zip,2,NCP,367,1886,66,2,\"[1885, 1886]\"\r\nNCP-19.zip,2,NCP,530,2218,132,1,[2218]\r\nNormal-6.zip,0,Normal,1811,266,95,1,[266]\r\nNCP-30.zip,2,NCP,963,2506,21,1,[2506]\r\nNormal-2.zip,0,Normal,1759,1112,62,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-13.zip,2,NCP,369,1890,58,2,\"[1889, 1890]\"\r\nNCP-16.zip,2,NCP,457,2068,134,2,\"[2068, 2069]\"\r\nNCP-26.zip,2,NCP,3981,5488,45,1,[5488]\r\nNCP-22.zip,2,NCP,816,2325,50,1,[2325]\r\nNormal-1.zip,0,Normal,1730,1019,63,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-15.zip,2,NCP,419,1991,130,2,\"[1991, 1992]\"\r\nCP-30.zip,1,CP,4016,5565,37,1,[5565]\r\nCP-24.zip,1,CP,694,3056,135,1,[3056]\r\nNCP-17.zip,2,NCP,470,2095,154,2,\"[2095, 2096]\"\r\nNormal-4.zip,0,Normal,781,216,118,1,[216]\r\nCP-13.zip,1,CP,1497,4093,68,3,\"[4092, 4093, 4094]\"\r\nNCP-26.zip,2,NCP,3991,5515,43,1,[5515]\r\nCP-8.zip,1,CP,1331,3701,62,2,\"[3701, 3702]\"\r\nNormal-9.zip,0,Normal,1910,365,91,1,[365]\r\nNCP-27.zip,2,NCP,820,2330,34,1,[2330]\r\nCP-7.zip,1,CP,13,3171,65,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-20.zip,1,CP,2764,3295,39,1,[3295]\r\nNormal-1.zip,0,Normal,1714,984,71,3,\"[982, 983, 984]\"\r\nCP-13.zip,1,CP,1501,4100,55,2,\"[4100, 4101]\"\r\nNormal-15.zip,0,Normal,2117,572,87,1,[572]\r\nCP-30.zip,1,CP,3929,5627,70,2,\"[5626, 5627]\"\r\nNCP-4.zip,2,NCP,158,1461,52,2,\"[1460, 1461]\"\r\nCP-10.zip,1,CP,1389,3834,52,3,\"[3833, 3834, 3835]\"\r\nCP-13.zip,1,CP,1497,4094,68,3,\"[4092, 4093, 4094]\"\r\nCP-10.zip,1,CP,1415,3894,155,3,\"[3894, 3895, 3896]\"\r\nCP-30.zip,1,CP,4014,5563,35,1,[5563]\r\nNCP-17.zip,2,NCP,462,2079,67,2,\"[2078, 2079]\"\r\nCP-29.zip,1,CP,3803,5747,23,1,[5747]\r\nCP-1.zip,1,CP,1,3143,300,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNCP-11.zip,2,NCP,305,1756,157,2,\"[1756, 1758]\"\r\nNormal-1.zip,0,Normal,1668,781,63,4,\"[778, 779, 780, 781]\"\r\nCP-4.zip,1,CP,1174,3392,175,1,[3392]\r\nNormal-14.zip,0,Normal,2060,515,77,1,[515]\r\nNormal-22.zip,0,Normal,2602,112,32,1,[112]\r\nCP-14.zip,1,CP,1541,4196,58,3,\"[4194, 4195, 4196]\"\r\nNormal-12.zip,0,Normal,2019,474,87,1,[474]\r\nCP-25.zip,1,CP,733,3095,84,1,[3095]\r\nCP-13.zip,1,CP,1499,4097,53,2,\"[4097, 4098]\"\r\nCP-19.zip,1,CP,1788,3201,55,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNormal-1.zip,0,Normal,1704,964,69,4,\"[961, 962, 963, 964]\"\r\nNCP-3.zip,2,NCP,1289,2730,62,1,[2730]\r\nNCP-20.zip,2,NCP,567,2292,148,2,\"[2292, 2293]\"\r\nNormal-13.zip,0,Normal,2027,482,89,1,[482]\r\nNormal-2.zip,0,Normal,1759,1114,59,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNormal-27.zip,0,Normal,3897,5424,75,4,\"[5423, 5424, 5426, 5427]\"\r\nNormal-22.zip,0,Normal,2317,772,77,1,[772]\r\nNormal-2.zip,0,Normal,1758,1110,59,2,\"[1109, 1110]\"\r\nNCP-2.zip,2,NCP,121,1384,43,2,\"[1383, 1384]\"\r\nNCP-13.zip,2,NCP,356,1864,53,2,\"[1863, 1864]\"\r\nNormal-2.zip,0,Normal,1760,1121,85,4,\"[1121, 1122, 1123, 1124]\"\r\nCP-29.zip,1,CP,3825,5769,25,1,[5769]\r\nNCP-17.zip,2,NCP,46,1226,52,2,\"[1225, 1226]\"\r\nNCP-19.zip,2,NCP,53,1240,60,2,\"[1239, 1240]\"\r\nNCP-12.zip,2,NCP,314,1776,58,2,\"[1775, 1776]\"\r\nNormal-21.zip,0,Normal,2290,745,88,1,[745]\r\nNormal-2.zip,0,Normal,1760,1123,74,4,\"[1121, 1122, 1123, 1124]\"\r\nNormal-24.zip,0,Normal,2666,176,35,1,[176]\r\nCP-8.zip,1,CP,1346,3735,53,3,\"[3733, 3734, 3735]\"\r\nNormal-17.zip,0,Normal,2164,619,84,1,[619]\r\nNCP-8.zip,2,NCP,2672,2647,47,1,[2647]\r\nNCP-2.zip,2,NCP,1277,2718,57,1,[2718]\r\nCP-9.zip,1,CP,1370,3793,62,2,\"[3792, 3793]\"\r\nNormal-10.zip,0,Normal,1928,383,87,1,[383]\r\nCP-21.zip,1,CP,598,2960,646,1,[2960]\r\nCP-20.zip,1,CP,2755,3286,34,1,[3286]\r\nNormal-16.zip,0,Normal,2141,596,100,1,[596]\r\nCP-14.zip,1,CP,1544,4205,50,3,\"[4203, 4204, 4205]\"\r\nNCP-9.zip,2,NCP,270,1687,62,2,\"[1686, 1687]\"\r\nCP-5.zip,1,CP,1222,3440,157,1,[3440]\r\nCP-19.zip,1,CP,1791,3210,100,4,\"[3210, 3211, 3212, 3213]\"\r\nNCP-16.zip,2,NCP,450,2054,78,2,\"[2054, 2055]\"\r\nNCP-20.zip,2,NCP,557,2272,56,2,\"[2271, 2272]\"\r\nNCP-3.zip,2,NCP,1284,2725,50,1,[2725]\r\nCP-5.zip,1,CP,1205,3423,146,1,[3423]\r\nNCP-1.zip,2,NCP,1017,2583,452,1,[2583]\r\nNormal-2.zip,0,Normal,1736,1033,25,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNCP-1.zip,2,NCP,1021,2589,183,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-7.zip,2,NCP,232,1608,146,2,\"[1608, 1609]\"\r\nNCP-23.zip,2,NCP,950,2493,34,1,[2493]\r\nCP-6.zip,1,CP,1246,3464,175,1,[3464]\r\nNormal-1.zip,0,Normal,1671,794,67,3,\"[793, 794, 795]\"\r\nCP-1.zip,1,CP,1095,3313,161,1,[3313]\r\nCP-25.zip,1,CP,714,3076,98,1,[3076]\r\nNCP-17.zip,2,NCP,479,2114,58,2,\"[2113, 2114]\"\r\nNormal-24.zip,0,Normal,2652,162,35,1,[162]\r\nNormal-2.zip,0,Normal,1752,1085,66,1,[1085]\r\nNormal-24.zip,0,Normal,2638,148,38,1,[148]\r\nNCP-8.zip,2,NCP,260,1666,163,2,\"[1666, 1667]\"\r\nNCP-18.zip,2,NCP,509,2175,58,2,\"[2174, 2175]\"\r\nNormal-14.zip,0,Normal,2079,534,92,1,[534]\r\nNormal-3.zip,0,Normal,751,186,119,1,[186]\r\nNCP-8.zip,2,NCP,263,1673,74,2,\"[1672, 1673]\"\r\nCP-22.zip,1,CP,626,2988,174,1,[2988]\r\nNormal-23.zip,0,Normal,2619,129,43,1,[129]\r\nCP-1.zip,1,CP,1069,3109,77,4,\"[3108, 3109, 3110, 3111]\"\r\nNCP-13.zip,2,NCP,360,1872,51,2,\"[1871, 1872]\"\r\nNCP-23.zip,2,NCP,915,2457,31,1,[2457]\r\nNCP-3.zip,2,NCP,131,1407,117,2,\"[1407, 1408]\"\r\nNCP-21.zip,2,NCP,79,1292,55,2,\"[1291, 1292]\"\r\nNormal-4.zip,0,Normal,779,214,290,1,[214]\r\nCP-27.zip,1,CP,3734,5676,32,3,\"[5676, 5677, 5678]\"\r\nNormal-15.zip,0,Normal,2104,559,101,1,[559]\r\nCP-5.zip,1,CP,1218,3436,213,1,[3436]\r\nNCP-3.zip,2,NCP,1291,2732,55,1,[2732]\r\nNCP-19.zip,2,NCP,537,2232,60,2,\"[2231, 2232]\"\r\nNCP-21.zip,2,NCP,71,1274,126,2,\"[1274, 1275]\"\r\nNCP-5.zip,2,NCP,195,1535,60,2,\"[1534, 1535]\"\r\nCP-9.zip,1,CP,1359,3766,46,3,\"[3764, 3765, 3766]\"\r\nNCP-2.zip,2,NCP,119,1380,62,2,\"[1379, 1380]\"\r\nNormal-19.zip,0,Normal,2241,696,86,1,[696]\r\nNormal-15.zip,0,Normal,2112,567,84,1,[567]\r\nNCP-20.zip,2,NCP,569,2296,142,2,\"[2296, 2297]\"\r\nNCP-9.zip,2,NCP,2700,2666,43,1,[2666]\r\nNCP-9.zip,2,NCP,2697,2663,46,1,[2663]\r\nCP-29.zip,1,CP,3809,5753,19,1,[5753]\r\nNCP-10.zip,2,NCP,2718,2674,42,1,[2674]\r\nNormal-1.zip,0,Normal,1668,778,60,4,\"[778, 779, 780, 781]\"\r\nNCP-18.zip,2,NCP,509,2174,138,2,\"[2174, 2175]\"\r\nNCP-16.zip,2,NCP,456,2066,135,2,\"[2066, 2067]\"\r\nNCP-5.zip,2,NCP,187,1519,57,2,\"[1518, 1519]\"\r\nNCP-20.zip,2,NCP,57,1247,132,2,\"[1247, 1248]\"\r\nNormal-1.zip,0,Normal,1715,986,71,2,\"[985, 986]\"\r\nNormal-2.zip,0,Normal,1749,1069,61,4,\"[1069, 1070, 1071, 1072]\"\r\nNCP-24.zip,2,NCP,984,2530,241,2,\"[2529, 2530]\"\r\nNormal-1.zip,0,Normal,1682,847,67,6,\"[847, 848, 852, 853, 857, 858]\"\r\nCP-1.zip,1,CP,1069,3110,77,4,\"[3108, 3109, 3110, 3111]\"\r\nNormal-12.zip,0,Normal,2017,472,99,1,[472]\r\nCP-10.zip,1,CP,1400,3861,54,2,\"[3860, 3861]\"\r\nNCP-22.zip,2,NCP,881,2416,225,1,[2416]\r\nCP-11.zip,1,CP,1420,3906,59,2,\"[3905, 3906]\"\r\nNCP-6.zip,2,NCP,20,1172,127,2,\"[1172, 1173]\"\r\nNCP-28.zip,2,NCP,846,2364,269,1,[2364]\r\nNormal-14.zip,0,Normal,2075,530,93,1,[530]\r\nCP-6.zip,1,CP,1238,3456,191,1,[3456]\r\nCP-7.zip,1,CP,1263,3481,120,1,[3481]\r\nCP-1.zip,1,CP,1088,3220,54,4,\"[3220, 3221, 3222, 3223]\"\r\nCP-8.zip,1,CP,1320,3676,62,2,\"[3676, 3677]\"\r\nNCP-15.zip,2,NCP,426,2005,139,2,\"[2005, 2006]\"\r\nNCP-28.zip,2,NCP,869,2397,58,1,[2397]\r\nNCP-11.zip,2,NCP,288,1721,114,2,\"[1721, 1722]\"\r\nNCP-21.zip,2,NCP,581,2319,139,2,\"[2319, 2320]\"\r\nNormal-26.zip,0,Normal,3878,5390,24,1,[5390]\r\nNormal-13.zip,0,Normal,2041,496,95,1,[496]\r\nNormal-25.zip,0,Normal,3845,5357,182,1,[5357]\r\nNormal-22.zip,0,Normal,2599,109,39,1,[109]\r\nNormal-4.zip,0,Normal,789,224,120,1,[224]\r\nNormal-1.zip,0,Normal,1714,982,40,3,\"[982, 983, 984]\"\r\nNCP-16.zip,2,NCP,434,2022,51,2,\"[2021, 2022]\"\r\nNCP-28.zip,2,NCP,830,2343,120,1,[2343]\r\nNormal-1.zip,0,Normal,1704,962,71,4,\"[961, 962, 963, 964]\"\r\nNCP-5.zip,2,NCP,196,1537,55,2,\"[1536, 1537]\"\r\nCP-8.zip,1,CP,1336,3713,60,2,\"[3712, 3713]\"\r\nNCP-29.zip,2,NCP,895,2436,140,2,\"[2435, 2436]\"\r\nNCP-29.zip,2,NCP,930,2472,23,1,[2472]\r\nCP-12.zip,1,CP,1482,4047,181,3,\"[4047, 4048, 4049]\"\r\nCP-10.zip,1,CP,1401,3862,201,3,\"[3862, 3863, 3864]\"\r\nNCP-5.zip,2,NCP,182,1509,55,2,\"[1508, 1509]\"\r\nCP-12.zip,1,CP,1483,4050,148,3,\"[4050, 4051, 4052]\"\r\nNormal-2.zip,0,Normal,1741,1054,61,2,\"[1053, 1054]\"\r\nCP-8.zip,1,CP,1324,3684,58,2,\"[3684, 3685]\"\r\nNCP-9.zip,2,NCP,2681,2696,58,1,[2696]\r\nCP-9.zip,1,CP,1367,3786,58,3,\"[3785, 3786, 3787]\"\r\nCP-19.zip,1,CP,1790,3209,69,2,\"[3208, 3209]\"\r\nCP-11.zip,1,CP,1430,3928,77,2,\"[3928, 3929]\"\r\nNormal-18.zip,0,Normal,2207,662,99,1,[662]\r\nNormal-11.zip,0,Normal,1972,427,97,1,[427]\r\nCP-5.zip,1,CP,1221,3439,295,1,[3439]\r\nNCP-15.zip,2,NCP,42,1216,146,2,\"[1216, 1218]\"\r\nCP-22.zip,1,CP,640,3002,136,1,[3002]\r\nNCP-7.zip,2,NCP,245,1637,62,2,\"[1636, 1637]\"\r\nNCP-6.zip,2,NCP,215,1574,155,2,\"[1574, 1575]\"\r\nNCP-29.zip,2,NCP,903,2445,87,1,[2445]\r\nNCP-7.zip,2,NCP,232,1609,61,2,\"[1608, 1609]\"\r\nNCP-2.zip,2,NCP,119,1379,147,2,\"[1379, 1380]\"\r\nNormal-2.zip,0,Normal,1739,1042,278,3,\"[1042, 1043, 1044]\"\r\nCP-28.zip,1,CP,3791,5735,26,1,[5735]\r\nNCP-27.zip,2,NCP,828,2341,45,1,[2341]\r\nNCP-12.zip,2,NCP,314,1775,139,2,\"[1775, 1776]\"\r\nNCP-6.zip,2,NCP,20,1173,54,2,\"[1172, 1173]\"\r\nCP-13.zip,1,CP,1490,4073,69,3,\"[4071, 4072, 4073]\"\r\nNCP-20.zip,2,NCP,569,2297,60,2,\"[2296, 2297]\"\r\nNormal-2.zip,0,Normal,1759,1113,59,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-30.zip,2,NCP,987,2536,71,2,\"[2536, 2537]\"\r\nCP-15.zip,1,CP,1579,4267,20,1,[4267]\r\nCP-19.zip,1,CP,1790,3208,69,2,\"[3208, 3209]\"\r\nNCP-20.zip,2,NCP,568,2295,61,2,\"[2294, 2295]\"\r\nNormal-13.zip,0,Normal,2036,491,102,1,[491]\r\nNCP-26.zip,2,NCP,3973,5482,48,1,[5482]\r\nCP-27.zip,1,CP,3743,5687,22,1,[5687]\r\nNormal-11.zip,0,Normal,1981,436,91,1,[436]\r\nNCP-2.zip,2,NCP,125,1391,127,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-9.zip,2,NCP,270,1686,147,2,\"[1686, 1687]\"\r\nNormal-23.zip,0,Normal,2636,146,42,1,[146]\r\nNCP-3.zip,2,NCP,1286,2727,64,1,[2727]\r\nCP-10.zip,1,CP,1386,3828,66,2,\"[3827, 3828]\"\r\nNormal-19.zip,0,Normal,2230,685,91,1,[685]\r\nNormal-5.zip,0,Normal,805,240,327,1,[240]\r\nNormal-26.zip,0,Normal,3891,5412,62,2,\"[5411, 5412]\"\r\nNCP-27.zip,2,NCP,2671,2691,51,1,[2691]\r\nNCP-27.zip,2,NCP,1059,2636,52,1,[2636]\r\nCP-8.zip,1,CP,1344,3730,58,3,\"[3728, 3729, 3730]\"\r\nNormal-24.zip,0,Normal,2662,172,41,1,[172]\r\nNormal-3.zip,0,Normal,744,179,278,1,[179]\r\nCP-5.zip,1,CP,1202,3420,207,1,[3420]\r\nNCP-27.zip,2,NCP,1006,2567,19,2,\"[2566, 2567]\"\r\nNormal-19.zip,0,Normal,2248,703,87,1,[703]\r\nNormal-2.zip,0,Normal,1736,1034,25,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNormal-8.zip,0,Normal,1867,322,87,1,[322]\r\nNormal-23.zip,0,Normal,2609,119,40,1,[119]\r\nCP-11.zip,1,CP,1453,3980,56,3,\"[3979, 3980, 3981]\"\r\nNormal-26.zip,0,Normal,3875,5387,24,1,[5387]\r\nCP-26.zip,1,CP,3647,5607,32,1,[5607]\r\nNormal-12.zip,0,Normal,2006,461,77,1,[461]\r\nNormal-6.zip,0,Normal,1827,282,99,1,[282]\r\nNCP-19.zip,2,NCP,533,2224,156,1,[2224]\r\nNCP-11.zip,2,NCP,287,1720,60,2,\"[1719, 1720]\"\r\nNCP-7.zip,2,NCP,2487,2687,38,1,[2687]\r\nCP-3.zip,1,CP,1160,3378,318,1,[3378]\r\nNormal-7.zip,0,Normal,1858,313,95,1,[313]\r\nCP-13.zip,1,CP,1514,4129,61,2,\"[4129, 4130]\"\r\nNCP-20.zip,2,NCP,561,2280,139,2,\"[2280, 2281]\"\r\nCP-14.zip,1,CP,1527,4161,58,3,\"[4160, 4161, 4162]\"\r\nCP-25.zip,1,CP,721,3083,86,1,[3083]\r\nCP-13.zip,1,CP,1496,4091,55,2,\"[4090, 4091]\"\r\nNormal-1.zip,0,Normal,1728,1013,66,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-12.zip,2,NCP,317,1781,117,2,\"[1781, 1782]\"\r\nCP-19.zip,1,CP,2437,2906,132,3,\"[2905, 2906, 2907]\"\r\nNCP-5.zip,2,NCP,196,1536,131,2,\"[1536, 1537]\"\r\nCP-11.zip,1,CP,1437,3942,57,2,\"[3942, 3943]\"\r\nNCP-5.zip,2,NCP,182,1508,130,2,\"[1508, 1509]\"\r\nCP-9.zip,1,CP,1363,3774,64,2,\"[3774, 3775]\"\r\nCP-10.zip,1,CP,1401,3863,51,3,\"[3862, 3863, 3864]\"\r\nNCP-10.zip,2,NCP,275,1696,153,2,\"[1696, 1697]\"\r\nCP-30.zip,1,CP,3931,5633,68,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-7.zip,2,NCP,234,1614,58,2,\"[1613, 1614]\"\r\nNCP-3.zip,2,NCP,1296,2737,66,1,[2737]\r\nNCP-11.zip,2,NCP,283,1712,62,1,[1712]\r\nCP-9.zip,1,CP,1363,3775,64,2,\"[3774, 3775]\"\r\nNCP-17.zip,2,NCP,464,2082,144,2,\"[2082, 2083]\"\r\nCP-12.zip,1,CP,1473,4027,51,3,\"[4026, 4027, 4028]\"\r\nCP-28.zip,1,CP,3781,5725,20,1,[5725]\r\nNCP-14.zip,2,NCP,391,1933,55,2,\"[1932, 1933]\"\r\nNormal-13.zip,0,Normal,2032,487,85,1,[487]\r\nNCP-28.zip,2,NCP,872,2403,183,2,\"[2403, 2404]\"\r\nNCP-17.zip,2,NCP,479,2113,139,2,\"[2113, 2114]\"\r\nNCP-11.zip,2,NCP,305,1758,65,2,\"[1756, 1758]\"\r\nNCP-1.zip,2,NCP,1021,2587,201,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-30.zip,2,NCP,957,2500,50,1,[2500]\r\nNormal-17.zip,0,Normal,2172,627,91,1,[627]\r\nCP-7.zip,1,CP,1316,3667,147,3,\"[3667, 3668, 3669]\"\r\nNCP-24.zip,2,NCP,971,2514,74,1,[2514]\r\nNCP-18.zip,2,NCP,494,2145,65,2,\"[2144, 2145]\"\r\nNCP-30.zip,2,NCP,987,2537,368,2,\"[2536, 2537]\"\r\nNormal-13.zip,0,Normal,2048,503,94,1,[503]\r\nCP-8.zip,1,CP,1347,3736,265,3,\"[3736, 3737, 3738]\"\r\nNCP-15.zip,2,NCP,41,1215,63,2,\"[1214, 1215]\"\r\nCP-12.zip,1,CP,1456,3989,52,3,\"[3988, 3989, 3990]\"\r\nNCP-21.zip,2,NCP,80,1294,54,2,\"[1293, 1294]\"\r\nCP-29.zip,1,CP,3808,5752,23,1,[5752]\r\nCP-26.zip,1,CP,3732,5671,53,2,\"[5671, 5672]\"\r\nNCP-8.zip,2,NCP,251,1648,131,2,\"[1648, 1649]\"\r\nNormal-2.zip,0,Normal,1755,1099,71,4,\"[1097, 1098, 1099, 1100]\"\r\nNormal-2.zip,0,Normal,1759,1120,66,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-28.zip,2,NCP,874,2407,341,1,[2407]\r\nNormal-1.zip,0,Normal,1730,1023,59,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNormal-8.zip,0,Normal,1891,346,96,1,[346]\r\nCP-4.zip,1,CP,1170,3388,180,1,[3388]\r\nCP-10.zip,1,CP,1395,3849,63,2,\"[3849, 3850]\"\r\nNCP-2.zip,2,NCP,123,1387,148,2,\"[1387, 1388]\"\r\nNCP-16.zip,2,NCP,446,2047,61,2,\"[2046, 2047]\"\r\nNCP-2.zip,2,NCP,111,1364,56,2,\"[1363, 1364]\"\r\nCP-15.zip,1,CP,1568,4256,22,1,[4256]\r\nNCP-21.zip,2,NCP,79,1291,131,2,\"[1291, 1292]\"\r\nCP-10.zip,1,CP,1395,3850,63,2,\"[3849, 3850]\"\r\nCP-14.zip,1,CP,1542,4199,54,3,\"[4197, 4198, 4199]\"\r\nCP-15.zip,1,CP,1555,4229,62,2,\"[4228, 4229]\"\r\nCP-21.zip,1,CP,606,2968,255,1,[2968]\r\nCP-12.zip,1,CP,1480,4042,54,2,\"[4042, 4043]\"\r\nNCP-27.zip,2,NCP,1063,2640,82,1,[2640]\r\nNormal-7.zip,0,Normal,1831,286,99,1,[286]\r\nCP-14.zip,1,CP,1552,4221,62,2,\"[4221, 4222]\"\r\nNCP-19.zip,2,NCP,541,2240,51,2,\"[2239, 2240]\"\r\nNCP-23.zip,2,NCP,91,1318,100,2,\"[1318, 1319]\"\r\nNormal-2.zip,0,Normal,1739,1043,56,3,\"[1042, 1043, 1044]\"\r\nCP-19.zip,1,CP,2437,2907,183,3,\"[2905, 2906, 2907]\"\r\nCP-10.zip,1,CP,1396,3851,139,3,\"[3851, 3852, 3853]\"\r\nCP-13.zip,1,CP,1490,4072,69,3,\"[4071, 4072, 4073]\"\r\nCP-6.zip,1,CP,1242,3460,229,1,[3460]\r\nNCP-17.zip,2,NCP,471,2098,59,2,\"[2097, 2098]\"\r\nNCP-16.zip,2,NCP,434,2021,119,2,\"[2021, 2022]\"\r\nNCP-16.zip,2,NCP,446,2046,146,2,\"[2046, 2047]\"\r\nNCP-21.zip,2,NCP,69,1270,113,2,\"[1270, 1271]\"\r\nNormal-9.zip,0,Normal,1896,351,98,1,[351]\r\nNCP-9.zip,2,NCP,2709,2702,44,1,[2702]\r\nNCP-29.zip,2,NCP,907,2449,287,1,[2449]\r\nNCP-2.zip,2,NCP,106,1349,150,2,\"[1349, 1350]\"\r\nNCP-17.zip,2,NCP,477,2109,139,2,\"[2109, 2110]\"\r\nCP-27.zip,1,CP,3734,5677,163,3,\"[5676, 5677, 5678]\"\r\nNormal-8.zip,0,Normal,1877,332,88,1,[332]\r\nNormal-7.zip,0,Normal,1853,308,94,1,[308]\r\nNCP-2.zip,2,NCP,1272,2713,62,1,[2713]\r\nCP-13.zip,1,CP,1515,4132,57,3,\"[4131, 4132, 4133]\"\r\nNCP-21.zip,2,NCP,68,1269,49,2,\"[1268, 1269]\"\r\nCP-25.zip,1,CP,719,3081,128,1,[3081]\r\nNCP-10.zip,2,NCP,276,1698,139,2,\"[1698, 1699]\"\r\nNCP-11.zip,2,NCP,294,1734,57,2,\"[1733, 1734]\"\r\nCP-8.zip,1,CP,1342,3724,58,3,\"[3723, 3724, 3725]\"\r\nNormal-4.zip,0,Normal,783,218,118,1,[218]\r\nNormal-11.zip,0,Normal,1977,432,96,1,[432]\r\nCP-12.zip,1,CP,1460,3998,60,2,\"[3998, 3999]\"\r\nNCP-12.zip,2,NCP,32,1197,61,2,\"[1196, 1197]\"\r\nNormal-2.zip,0,Normal,1736,1035,55,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nCP-7.zip,1,CP,1308,3650,219,2,\"[3649, 3650]\"\r\nNCP-17.zip,2,NCP,485,2125,153,2,\"[2125, 2126]\"\r\nNormal-24.zip,0,Normal,2649,159,26,1,[159]\r\nCP-1.zip,1,CP,1082,3127,74,1,[3127]\r\nCP-28.zip,1,CP,3788,5732,26,1,[5732]\r\nNormal-3.zip,0,Normal,1764,1143,66,4,\"[1143, 1144, 1145, 1146]\"\r\nNCP-2.zip,2,NCP,125,1392,132,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-1.zip,2,NCP,1013,2577,524,1,[2577]\r\nCP-22.zip,1,CP,630,2992,118,1,[2992]\r\nNormal-27.zip,0,Normal,3897,5423,70,4,\"[5423, 5424, 5426, 5427]\"\r\nCP-1.zip,1,CP,1088,3222,50,4,\"[3220, 3221, 3222, 3223]\"\r\nNCP-11.zip,2,NCP,294,1733,136,2,\"[1733, 1734]\"\r\nCP-3.zip,1,CP,1135,3353,202,1,[3353]\r\nCP-10.zip,1,CP,1408,3879,59,3,\"[3878, 3879, 3880]\"\r\nCP-19.zip,1,CP,1791,3213,71,4,\"[3210, 3211, 3212, 3213]\"\r\nNormal-1.zip,0,Normal,1709,974,61,2,\"[973, 974]\"\r\nCP-11.zip,1,CP,1438,3945,46,2,\"[3944, 3945]\"\r\nCP-8.zip,1,CP,1325,3687,64,2,\"[3686, 3687]\"\r\nCP-20.zip,1,CP,2761,3292,38,1,[3292]\r\nNCP-17.zip,2,NCP,470,2096,64,2,\"[2095, 2096]\"\r\nNCP-4.zip,2,NCP,164,1472,150,2,\"[1472, 1473]\"\r\nNCP-14.zip,2,NCP,380,1912,148,2,\"[1912, 1913]\"\r\nCP-7.zip,1,CP,1266,3484,134,1,[3484]\r\nCP-10.zip,1,CP,1400,3860,54,2,\"[3860, 3861]\"\r\nNCP-10.zip,2,NCP,281,1708,121,2,\"[1708, 1709]\"\r\nNCP-14.zip,2,NCP,397,1944,158,2,\"[1944, 1945]\"\r\nCP-27.zip,1,CP,3734,5678,32,3,\"[5676, 5677, 5678]\"\r\nCP-15.zip,1,CP,1559,4238,53,2,\"[4237, 4238]\"\r\nNormal-26.zip,0,Normal,3888,5406,63,1,[5406]\r\nNCP-11.zip,2,NCP,308,1764,49,2,\"[1763, 1764]\"\r\nNCP-16.zip,2,NCP,435,2024,62,2,\"[2023, 2024]\"\r\nNCP-11.zip,2,NCP,285,1715,149,2,\"[1715, 1716]\"\r\nNCP-20.zip,2,NCP,568,2294,144,2,\"[2294, 2295]\"\r\nNCP-20.zip,2,NCP,550,2257,143,2,\"[2257, 2258]\"\r\nNCP-6.zip,2,NCP,218,1581,58,2,\"[1580, 1581]\"\r\nNormal-15.zip,0,Normal,2092,547,87,1,[547]\r\nCP-10.zip,1,CP,1396,3853,58,3,\"[3851, 3852, 3853]\"\r\nNormal-12.zip,0,Normal,2010,465,91,1,[465]\r\nNormal-18.zip,0,Normal,2194,649,89,1,[649]\r\nNCP-10.zip,2,NCP,276,1699,58,2,\"[1698, 1699]\"\r\nCP-27.zip,1,CP,3746,5690,17,1,[5690]\r\nNormal-24.zip,0,Normal,2656,166,34,1,[166]\r\nCP-29.zip,1,CP,3802,5746,26,1,[5746]\r\nCP-17.zip,1,CP,1641,4329,26,1,[4329]\r\nNormal-2.zip,0,Normal,1749,1072,66,4,\"[1069, 1070, 1071, 1072]\"\r\nCP-9.zip,1,CP,1373,3800,55,2,\"[3800, 3801]\"\r\nNormal-22.zip,0,Normal,2596,106,44,1,[106]\r\nNormal-14.zip,0,Normal,2072,527,77,1,[527]\r\nNormal-20.zip,0,Normal,2251,706,89,1,[706]\r\nCP-12.zip,1,CP,1482,4049,75,3,\"[4047, 4048, 4049]\"\r\nCP-6.zip,1,CP,1231,3449,375,1,[3449]\r\nCP-28.zip,1,CP,3797,5741,28,1,[5741]\r\nCP-7.zip,1,CP,1307,3648,242,4,\"[3645, 3646, 3647, 3648]\"\r\nNCP-1.zip,2,NCP,1030,2600,279,1,[2600]\r\nCP-11.zip,1,CP,1448,3970,62,2,\"[3969, 3970]\"\r\nNormal-20.zip,0,Normal,2255,710,95,1,[710]\r\nCP-2.zip,1,CP,1124,3342,215,1,[3342]\r\nNCP-28.zip,2,NCP,872,2404,46,2,\"[2403, 2404]\"\r\nNormal-3.zip,0,Normal,1765,1147,60,2,\"[1147, 1148]\"\r\nNCP-11.zip,2,NCP,289,1724,47,2,\"[1723, 1724]\"\r\nCP-11.zip,1,CP,1442,3956,58,3,\"[3954, 3955, 3956]\"\r\nCP-1.zip,1,CP,1081,3126,68,1,[3126]\r\nNormal-20.zip,0,Normal,2263,718,108,1,[718]\r\nNCP-19.zip,2,NCP,524,2204,191,1,[2204]\r\nNormal-4.zip,0,Normal,784,219,105,1,[219]\r\nCP-8.zip,1,CP,1337,3715,60,2,\"[3714, 3715]\"\r\nNCP-28.zip,2,NCP,841,2356,282,1,[2356]\r\nNCP-26.zip,2,NCP,3983,5510,40,1,[5510]\r\nCP-20.zip,1,CP,2767,3298,35,1,[3298]\r\nNormal-19.zip,0,Normal,2229,684,87,1,[684]\r\nNCP-15.zip,2,NCP,429,2012,55,2,\"[2011, 2012]\"\r\nCP-19.zip,1,CP,1788,3197,52,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-15.zip,1,CP,1558,4235,62,3,\"[4234, 4235, 4236]\"\r\nNCP-3.zip,2,NCP,1280,2721,50,1,[2721]\r\nNCP-4.zip,2,NCP,148,1440,150,2,\"[1440, 1441]\"\r\nNormal-22.zip,0,Normal,2582,92,39,1,[92]\r\nNormal-23.zip,0,Normal,2623,133,35,1,[133]\r\nCP-13.zip,1,CP,1496,4090,55,2,\"[4090, 4091]\"\r\nCP-30.zip,1,CP,3835,5779,23,1,[5779]\r\nCP-11.zip,1,CP,1442,3954,139,3,\"[3954, 3955, 3956]\"\r\nNCP-15.zip,2,NCP,429,2011,131,2,\"[2011, 2012]\"\r\nCP-17.zip,1,CP,1621,4309,29,1,[4309]\r\nCP-6.zip,1,CP,1244,3462,87,1,[3462]\r\nNCP-1.zip,2,NCP,1021,2590,181,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-9.zip,2,NCP,2706,2672,51,1,[2672]\r\nNCP-14.zip,2,NCP,391,1932,131,2,\"[1932, 1933]\"\r\nCP-3.zip,1,CP,1134,3352,330,1,[3352]\r\nCP-8.zip,1,CP,1346,3734,53,3,\"[3733, 3734, 3735]\"\r\nNCP-12.zip,2,NCP,320,1789,58,2,\"[1788, 1789]\"\r\nNCP-21.zip,2,NCP,77,1287,126,2,\"[1287, 1288]\"\r\nCP-17.zip,1,CP,1647,4335,23,1,[4335]\r\nCP-11.zip,1,CP,1453,3979,221,3,\"[3979, 3980, 3981]\"\r\nNormal-2.zip,0,Normal,1759,1117,65,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-17.zip,2,NCP,481,2118,68,2,\"[2117, 2118]\"\r\nNCP-3.zip,2,NCP,1279,2720,66,1,[2720]\r\nCP-8.zip,1,CP,1346,3733,53,3,\"[3733, 3734, 3735]\"\r\nNormal-10.zip,0,Normal,1954,409,88,1,[409]\r\nCP-17.zip,1,CP,1648,4336,29,1,[4336]\r\nCP-14.zip,1,CP,1524,4154,58,3,\"[4152, 4153, 4154]\"\r\nNormal-18.zip,0,Normal,2216,671,97,1,[671]\r\nNCP-27.zip,2,NCP,179,1503,43,2,\"[1503, 1502]\"\r\nCP-19.zip,1,CP,1788,3202,55,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-12.zip,1,CP,1482,4048,75,3,\"[4047, 4048, 4049]\"\r\nNCP-29.zip,2,NCP,913,2455,268,1,[2455]\r\nCP-19.zip,1,CP,2444,2919,112,2,\"[2918, 2919]\"\r\nCP-22.zip,1,CP,639,3001,136,1,[3001]\r\nNCP-2.zip,2,NCP,121,1383,100,2,\"[1383, 1384]\"\r\nCP-8.zip,1,CP,1324,3685,58,2,\"[3684, 3685]\"\r\nCP-11.zip,1,CP,1430,3929,77,2,\"[3928, 3929]\"\r\nNCP-15.zip,2,NCP,401,1952,58,2,\"[1951, 1952]\"\r\nNormal-4.zip,0,Normal,788,223,336,1,[223]\r\nNormal-27.zip,0,Normal,3898,5428,74,1,[5428]\r\nNormal-21.zip,0,Normal,2312,767,88,1,[767]\r\nNormal-17.zip,0,Normal,2170,625,62,1,[625]\r\nNCP-3.zip,2,NCP,130,1406,59,2,\"[1405, 1406]\"\r\nCP-3.zip,1,CP,1154,3372,169,1,[3372]\r\nNormal-3.zip,0,Normal,1765,1148,60,2,\"[1147, 1148]\"\r\nNormal-11.zip,0,Normal,1962,417,78,1,[417]\r\nCP-18.zip,1,CP,1667,4355,26,1,[4355]\r\nCP-1.zip,1,CP,1066,3105,59,1,[3105]\r\nNCP-1.zip,2,NCP,1047,2619,473,1,[2619]\r\nNCP-10.zip,2,NCP,2711,2704,44,1,[2704]\r\nNormal-19.zip,0,Normal,2237,692,85,1,[692]\r\nNCP-11.zip,2,NCP,289,1723,110,2,\"[1723, 1724]\"\r\nNCP-7.zip,2,NCP,240,1626,66,2,\"[1625, 1626]\"\r\nNormal-11.zip,0,Normal,1974,429,96,1,[429]\r\nNormal-26.zip,0,Normal,3887,5401,67,3,\"[5400, 5401, 5404]\"\r\nNormal-26.zip,0,Normal,3891,5411,67,2,\"[5411, 5412]\"\r\nNormal-18.zip,0,Normal,2191,646,106,1,[646]\r\nNCP-28.zip,2,NCP,840,2355,55,1,[2355]\r\nNormal-6.zip,0,Normal,1814,269,88,1,[269]\r\nNCP-12.zip,2,NCP,329,1807,66,2,\"[1806, 1807]\"\r\nCP-24.zip,1,CP,686,3048,133,1,[3048]\r\nCP-19.zip,1,CP,2432,2894,124,1,[2894]\r\nNormal-10.zip,0,Normal,1952,407,107,1,[407]\r\nCP-13.zip,1,CP,1515,4133,57,3,\"[4131, 4132, 4133]\"\r\nCP-8.zip,1,CP,1347,3737,34,3,\"[3736, 3737, 3738]\"\r\nNormal-2.zip,0,Normal,1754,1095,69,4,\"[1093, 1094, 1095, 1096]\"\r\nCP-22.zip,1,CP,622,2984,459,1,[2984]\r\nCP-3.zip,1,CP,1141,3359,350,1,[3359]\r\nCP-14.zip,1,CP,1533,4173,100,3,\"[4173, 4174, 4175]\"\r\nNormal-10.zip,0,Normal,1935,390,91,1,[390]\r\nNormal-22.zip,0,Normal,2320,775,91,1,[775]\r\nNormal-25.zip,0,Normal,3859,5371,216,1,[5371]\r\nNormal-12.zip,0,Normal,2018,473,93,1,[473]\r\nCP-9.zip,1,CP,1359,3764,181,3,\"[3764, 3765, 3766]\"\r\nCP-20.zip,1,CP,2452,2931,298,1,[2931]\r\nNCP-23.zip,2,NCP,90,1316,100,2,\"[1316, 1317]\"\r\nNormal-2.zip,0,Normal,1744,1058,71,2,\"[1058, 1059]\"\r\nNCP-18.zip,2,NCP,492,2141,58,2,\"[2140, 2141]\"\r\nNormal-13.zip,0,Normal,2053,508,81,1,[508]\r\nNormal-17.zip,0,Normal,2156,611,82,1,[611]\r\nNCP-19.zip,2,NCP,541,2239,121,2,\"[2239, 2240]\"\r\nNCP-19.zip,2,NCP,531,2221,58,2,\"[2220, 2221]\"\r\nCP-19.zip,1,CP,2448,2925,104,2,\"[2925, 2926]\"\r\nCP-31.zip,1,CP,4044,5593,276,1,[5593]\r\nCP-8.zip,1,CP,1345,3732,55,2,\"[3731, 3732]\"\r\nNormal-3.zip,0,Normal,743,178,340,1,[178]\r\nNormal-23.zip,0,Normal,2613,123,40,1,[123]\r\nNormal-1.zip,0,Normal,1714,983,71,3,\"[982, 983, 984]\"\r\nNCP-8.zip,2,NCP,268,1683,53,2,\"[1682, 1683]\"\r\nCP-8.zip,1,CP,1347,3738,34,3,\"[3736, 3737, 3738]\"\r\nCP-25.zip,1,CP,718,3080,466,1,[3080]\r\nNormal-13.zip,0,Normal,2024,479,86,1,[479]\r\nNormal-1.zip,0,Normal,1668,780,63,4,\"[778, 779, 780, 781]\"\r\nCP-17.zip,1,CP,1636,4324,26,1,[4324]\r\nNCP-20.zip,2,NCP,55,1244,63,2,\"[1243, 1244]\"\r\nCP-32.zip,1,CP,2463,3227,77,1,[3227]\r\nNCP-16.zip,2,NCP,435,2023,153,2,\"[2023, 2024]\"\r\nNCP-2.zip,2,NCP,106,1350,63,2,\"[1349, 1350]\"\r\nCP-27.zip,1,CP,3753,5697,20,1,[5697]\r\nNCP-15.zip,2,NCP,415,1983,63,2,\"[1982, 1983]\"\r\nNCP-5.zip,2,NCP,191,1527,54,2,\"[1526, 1527]\"\r\nCP-3.zip,1,CP,1142,3360,138,1,[3360]\r\nNCP-19.zip,2,NCP,531,2220,139,2,\"[2220, 2221]\"\r\nCP-14.zip,1,CP,1524,4153,58,3,\"[4152, 4153, 4154]\"\r\nCP-19.zip,1,CP,1788,3196,49,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-14.zip,1,CP,1533,4175,42,3,\"[4173, 4174, 4175]\"\r\nNCP-7.zip,2,NCP,244,1635,69,2,\"[1634, 1635]\"\r\nNormal-7.zip,0,Normal,1848,303,95,1,[303]\r\nCP-24.zip,1,CP,703,3065,120,1,[3065]\r\nNormal-1.zip,0,Normal,1731,1024,36,1,[1024]\r\nCP-11.zip,1,CP,1448,3969,62,2,\"[3969, 3970]\"\r\nNormal-25.zip,0,Normal,3850,5362,180,1,[5362]\r\nCP-7.zip,1,CP,13,3172,255,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-14.zip,1,CP,1533,4174,42,3,\"[4173, 4174, 4175]\"\r\nNCP-3.zip,2,NCP,130,1405,140,2,\"[1405, 1406]\"\r\nCP-23.zip,1,CP,647,3009,384,1,[3009]\r\nNormal-24.zip,0,Normal,2637,147,36,1,[147]\r\nNCP-28.zip,2,NCP,848,2367,283,2,\"[2366, 2367]\"\r\nNormal-9.zip,0,Normal,1903,358,86,1,[358]\r\nNormal-26.zip,0,Normal,3889,5408,65,2,\"[5407, 5408]\"\r\nNCP-20.zip,2,NCP,567,2293,60,2,\"[2292, 2293]\"\r\nCP-22.zip,1,CP,621,2983,174,1,[2983]\r\nCP-10.zip,1,CP,1389,3835,51,3,\"[3833, 3834, 3835]\"\r\nCP-9.zip,1,CP,1362,3773,61,2,\"[3772, 3773]\"\r\nNormal-27.zip,0,Normal,3897,5426,72,4,\"[5423, 5424, 5426, 5427]\"\r\nNCP-15.zip,2,NCP,428,2010,53,2,\"[2009, 2010]\"\r\nNormal-3.zip,0,Normal,759,194,297,1,[194]\r\nCP-13.zip,1,CP,1497,4092,68,3,\"[4092, 4093, 4094]\"\r\nNormal-19.zip,0,Normal,2246,701,87,1,[701]\r\nCP-3.zip,1,CP,1130,3348,166,1,[3348]\r\nCP-14.zip,1,CP,1552,4222,62,2,\"[4221, 4222]\"\r\nNCP-26.zip,2,NCP,3994,5518,52,1,[5518]\r\nNCP-27.zip,2,NCP,328,1805,43,2,\"[1804, 1805]\"\r\nNCP-13.zip,2,NCP,369,1889,138,2,\"[1889, 1890]\"\r\nCP-20.zip,1,CP,2756,3287,56,1,[3287]\r\nCP-22.zip,1,CP,638,3000,116,1,[3000]\r\nCP-6.zip,1,CP,1250,3468,451,1,[3468]\r\nCP-19.zip,1,CP,2437,2905,316,3,\"[2905, 2906, 2907]\"\r\nNormal-16.zip,0,Normal,2130,585,88,1,[585]\r\nNCP-14.zip,2,NCP,376,1904,142,2,\"[1904, 1905]\"\r\nNormal-10.zip,0,Normal,1932,387,91,1,[387]\r\nNCP-16.zip,2,NCP,453,2060,121,2,\"[2060, 2061]\"\r\nNCP-5.zip,2,NCP,191,1526,128,2,\"[1526, 1527]\"\r\nCP-12.zip,1,CP,1476,4034,53,2,\"[4033, 4034]\"\r\nNCP-5.zip,2,NCP,175,1495,55,2,\"[1494, 1495]\"\r\nNCP-21.zip,2,NCP,71,1275,53,2,\"[1274, 1275]\"\r\nNormal-10.zip,0,Normal,1925,380,90,1,[380]\r\nNCP-30.zip,2,NCP,994,2548,226,2,\"[2547, 2548]\"\r\nCP-4.zip,1,CP,1192,3410,184,1,[3410]\r\nNormal-23.zip,0,Normal,2631,141,38,1,[141]\r\nNCP-9.zip,2,NCP,2684,2697,50,1,[2697]\r\nCP-27.zip,1,CP,3757,5701,22,1,[5701]\r\nNCP-3.zip,2,NCP,1288,2729,61,1,[2729]\r\nNCP-18.zip,2,NCP,505,2166,157,2,\"[2166, 2167]\"\r\nCP-8.zip,1,CP,1348,3741,59,3,\"[3739, 3740, 3741]\"\r\nNormal-24.zip,0,Normal,2651,161,34,1,[161]\r\nNormal-23.zip,0,Normal,2618,128,35,1,[128]\r\nCP-8.zip,1,CP,1331,3702,62,2,\"[3701, 3702]\"\r\nNCP-14.zip,2,NCP,398,1947,70,2,\"[1946, 1947]\"\r\nNCP-4.zip,2,NCP,158,1460,122,2,\"[1460, 1461]\"\r\nNCP-23.zip,2,NCP,89,1312,157,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-2.zip,0,Normal,1759,1116,64,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-17.zip,1,CP,1645,4333,26,1,[4333]\r\nCP-10.zip,1,CP,1408,3880,59,3,\"[3878, 3879, 3880]\"\r\nCP-30.zip,1,CP,3917,5541,62,1,[5541]\r\nNCP-30.zip,2,NCP,933,2475,23,1,[2475]\r\nCP-8.zip,1,CP,1344,3728,142,3,\"[3728, 3729, 3730]\"\r\nNCP-17.zip,2,NCP,459,2072,133,2,\"[2072, 2073]\"\r\nNCP-4.zip,2,NCP,150,1445,75,2,\"[1444, 1445]\"\r\nCP-12.zip,1,CP,1455,3986,58,3,\"[3985, 3986, 3987]\"\r\nNormal-27.zip,0,Normal,3897,5427,72,4,\"[5423, 5424, 5426, 5427]\"\r\nCP-18.zip,1,CP,1772,3177,81,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-27.zip,1,CP,3745,5689,23,1,[5689]\r\nNCP-29.zip,2,NCP,920,2462,183,1,[2462]\r\nNCP-9.zip,2,NCP,2688,2655,56,1,[2655]\r\nNormal-8.zip,0,Normal,1887,342,94,1,[342]\r\nCP-1.zip,1,CP,1076,3120,70,1,[3120]\r\nNormal-15.zip,0,Normal,2100,555,94,1,[555]\r\nNCP-11.zip,2,NCP,285,1716,62,2,\"[1715, 1716]\"\r\nCP-8.zip,1,CP,1344,3729,59,3,\"[3728, 3729, 3730]\"\r\nNormal-12.zip,0,Normal,2021,476,85,1,[476]\r\nNormal-15.zip,0,Normal,2105,560,87,1,[560]\r\nCP-9.zip,1,CP,1366,3784,57,3,\"[3782, 3783, 3784]\"\r\nCP-18.zip,1,CP,1772,3181,75,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-15.zip,2,NCP,426,2006,58,2,\"[2005, 2006]\"\r\nNCP-1.zip,2,NCP,1020,2586,45,1,[2586]\r\nNCP-13.zip,2,NCP,356,1863,124,2,\"[1863, 1864]\"\r\nNormal-8.zip,0,Normal,1865,320,99,1,[320]\r\nNCP-30.zip,2,NCP,994,2547,226,2,\"[2547, 2548]\"\r\nNormal-12.zip,0,Normal,2011,466,93,1,[466]\r\nCP-18.zip,1,CP,1773,3185,67,4,\"[3182, 3183, 3184, 3185]\"\r\nNCP-18.zip,2,NCP,505,2167,66,2,\"[2166, 2167]\"\r\nCP-8.zip,1,CP,1328,3694,69,2,\"[3693, 3694]\"\r\nNCP-2.zip,2,NCP,1278,2719,61,1,[2719]\r\nCP-25.zip,1,CP,736,3098,494,1,[3098]\r\nCP-24.zip,1,CP,7,3512,299,2,\"[3511, 3512]\"\r\nNormal-27.zip,0,Normal,3913,5455,71,2,\"[5454, 5455]\"\r\nNCP-6.zip,2,NCP,218,1580,139,2,\"[1580, 1581]\"\r\nNormal-4.zip,0,Normal,795,230,120,1,[230]\r\nNCP-6.zip,2,NCP,207,1559,46,2,\"[1558, 1559]\"\r\nNCP-5.zip,2,NCP,189,1523,58,2,\"[1522, 1523]\"\r\nNormal-22.zip,0,Normal,2314,769,84,1,[769]\r\nCP-14.zip,1,CP,1541,4195,58,3,\"[4194, 4195, 4196]\"\r\nNormal-26.zip,0,Normal,3866,5378,27,1,[5378]\r\nNCP-30.zip,2,NCP,938,2481,78,2,\"[2480, 2481]\"\r\nNCP-1.zip,2,NCP,1041,2612,126,1,[2612]\r\nNormal-24.zip,0,Normal,2664,174,28,1,[174]\r\nCP-14.zip,1,CP,1542,4198,54,3,\"[4197, 4198, 4199]\"\r\nCP-8.zip,1,CP,1332,3704,41,2,\"[3703, 3704]\"\r\nCP-14.zip,1,CP,1527,4160,142,3,\"[4160, 4161, 4162]\"\r\nNormal-2.zip,0,Normal,1749,1071,66,4,\"[1069, 1070, 1071, 1072]\"\r\nCP-7.zip,1,CP,13,3170,271,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-20.zip,1,CP,2769,3300,36,1,[3300]\r\nNormal-11.zip,0,Normal,1973,428,90,1,[428]\r\nCP-28.zip,1,CP,3783,5727,26,1,[5727]\r\nNCP-12.zip,2,NCP,320,1788,139,2,\"[1788, 1789]\"\r\nNormal-10.zip,0,Normal,1929,384,91,1,[384]\r\nNormal-7.zip,0,Normal,1841,296,79,1,[296]\r\nNormal-8.zip,0,Normal,1881,336,91,1,[336]\r\nNCP-25.zip,2,NCP,3964,5475,41,1,[5475]\r\nCP-12.zip,1,CP,1480,4043,54,2,\"[4042, 4043]\"\r\nNCP-23.zip,2,NCP,91,1319,43,2,\"[1318, 1319]\"\r\nNCP-11.zip,2,NCP,30,1193,56,1,[1193]\r\nNCP-29.zip,2,NCP,924,2466,18,1,[2466]\r\nCP-16.zip,1,CP,1614,4302,23,1,[4302]\r\nNormal-14.zip,0,Normal,2061,516,88,1,[516]\r\nNCP-27.zip,2,NCP,826,2339,54,1,[2339]\r\nNormal-13.zip,0,Normal,2038,493,80,1,[493]\r\nNormal-1.zip,0,Normal,1715,985,71,2,\"[985, 986]\"\r\nCP-28.zip,1,CP,3782,5726,25,1,[5726]\r\nCP-21.zip,1,CP,2777,3308,22,1,[3308]\r\nCP-8.zip,1,CP,1328,3693,69,2,\"[3693, 3694]\"\r\nNCP-17.zip,2,NCP,468,2091,154,2,\"[2091, 2092]\"\r\nNCP-13.zip,2,NCP,36,1205,59,2,\"[1204, 1205]\"\r\nNormal-12.zip,0,Normal,2000,455,93,1,[455]\r\nCP-19.zip,1,CP,2448,2926,102,2,\"[2925, 2926]\"\r\nNCP-10.zip,2,NCP,2728,2711,54,1,[2711]\r\nNCP-8.zip,2,NCP,263,1672,177,2,\"[1672, 1673]\"\r\nCP-30.zip,1,CP,3831,5775,25,1,[5775]\r\nNormal-1.zip,0,Normal,1709,973,61,2,\"[973, 974]\"\r\nCP-27.zip,1,CP,3751,5695,22,1,[5695]\r\nCP-11.zip,1,CP,1453,3981,56,3,\"[3979, 3980, 3981]\"\r\nCP-16.zip,1,CP,1617,4305,23,1,[4305]\r\nCP-19.zip,1,CP,1788,3198,53,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-19.zip,1,CP,2447,2924,86,2,\"[2923, 2924]\"\r\nNCP-20.zip,2,NCP,55,1243,150,2,\"[1243, 1244]\"\r\nCP-1.zip,1,CP,1069,3108,77,4,\"[3108, 3109, 3110, 3111]\"\r\nCP-29.zip,1,CP,3827,5771,26,1,[5771]\r\nCP-16.zip,1,CP,1599,4287,17,1,[4287]\r\nNCP-12.zip,2,NCP,34,1201,64,2,\"[1200, 1201]\"\r\nNCP-19.zip,2,NCP,523,2202,148,2,\"[2202, 2203]\"\r\nCP-19.zip,1,CP,2429,2890,100,1,[2890]\r\nNCP-9.zip,2,NCP,2695,2661,45,1,[2661]\r\nNormal-1.zip,0,Normal,1730,1022,59,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nCP-24.zip,1,CP,7,3511,298,2,\"[3511, 3512]\"\r\nNCP-27.zip,2,NCP,1045,2617,30,1,[2617]\r\nNormal-15.zip,0,Normal,2088,543,75,1,[543]\r\nNormal-25.zip,0,Normal,3853,5365,205,1,[5365]\r\nNormal-14.zip,0,Normal,2076,531,77,1,[531]\r\nNCP-22.zip,2,NCP,84,1301,127,2,\"[1301, 1302]\"\r\nCP-18.zip,1,CP,1660,4348,23,1,[4348]\r\nNCP-26.zip,2,NCP,3980,5487,38,1,[5487]\r\nCP-20.zip,1,CP,2758,3289,35,1,[3289]\r\nNormal-6.zip,0,Normal,1808,263,95,1,[263]\r\nNormal-2.zip,0,Normal,1739,1044,56,3,\"[1042, 1043, 1044]\"\r\nCP-1.zip,1,CP,1068,3107,62,1,[3107]\r\nNormal-14.zip,0,Normal,2083,538,87,1,[538]\r\nCP-12.zip,1,CP,1484,4054,46,3,\"[4053, 4054, 4055]\"\r\nCP-29.zip,1,CP,3811,5755,23,1,[5755]\r\nCP-14.zip,1,CP,1548,4213,51,2,\"[4213, 4214]\"\r\nNCP-20.zip,2,NCP,561,2281,58,2,\"[2280, 2281]\"\r\nCP-14.zip,1,CP,1544,4204,51,3,\"[4203, 4204, 4205]\"\r\nNCP-27.zip,2,NCP,1062,2639,176,1,[2639]\r\nCP-25.zip,1,CP,735,3097,110,1,[3097]\r\nCP-2.zip,1,CP,1115,3333,180,1,[3333]\r\nCP-27.zip,1,CP,3756,5700,20,1,[5700]\r\nNormal-5.zip,0,Normal,813,248,136,1,[248]\r\nNormal-19.zip,0,Normal,2221,676,103,1,[676]\r\nNormal-27.zip,0,Normal,3902,5434,73,1,[5434]\r\nCP-11.zip,1,CP,1437,3943,57,2,\"[3942, 3943]\"\r\nNCP-2.zip,2,NCP,126,1398,64,2,\"[1396, 1398]\"\r\nNormal-20.zip,0,Normal,2265,720,87,1,[720]\r\nCP-16.zip,1,CP,1589,4277,23,1,[4277]\r\nNormal-16.zip,0,Normal,2149,604,85,1,[604]\r\nNCP-19.zip,2,NCP,523,2203,62,2,\"[2202, 2203]\"\r\nCP-12.zip,1,CP,1455,3985,138,3,\"[3985, 3986, 3987]\"\r\nCP-30.zip,1,CP,4040,5589,38,1,[5589]\r\nNCP-1.zip,2,NCP,1049,2622,205,1,[2622]\r\nNormal-1.zip,0,Normal,1674,811,74,2,\"[810, 811]\"\r\nNCP-19.zip,2,NCP,539,2236,55,2,\"[2235, 2236]\"\r\nNormal-1.zip,0,Normal,1668,779,60,4,\"[778, 779, 780, 781]\"\r\nNCP-19.zip,2,NCP,542,2241,130,2,\"[2241, 2242]\"\r\nCP-25.zip,1,CP,739,3101,112,1,[3101]\r\nCP-9.zip,1,CP,1367,3785,140,3,\"[3785, 3786, 3787]\"\r\nCP-14.zip,1,CP,1549,4215,61,2,\"[4215, 4216]\"\r\nNCP-19.zip,2,NCP,53,1239,144,2,\"[1239, 1240]\"\r\nNormal-1.zip,0,Normal,1730,1021,294,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-29.zip,2,NCP,918,2460,213,1,[2460]\r\nNCP-23.zip,2,NCP,89,1311,138,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-2.zip,0,Normal,1759,1119,66,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-9.zip,1,CP,1359,3765,46,3,\"[3764, 3765, 3766]\"\r\nNCP-25.zip,2,NCP,3706,5533,54,1,[5533]\r\nCP-8.zip,1,CP,1325,3686,65,2,\"[3686, 3687]\"\r\nNCP-19.zip,2,NCP,545,2248,57,2,\"[2247, 2248]\"\r\nNCP-15.zip,2,NCP,418,1989,143,2,\"[1989, 1990]\"\r\nCP-7.zip,1,CP,1261,3479,198,1,[3479]\r\nNCP-29.zip,2,NCP,895,2435,143,2,\"[2435, 2436]\"\r\nCP-12.zip,1,CP,1483,4051,62,3,\"[4050, 4051, 4052]\"\r\nCP-12.zip,1,CP,1460,3999,60,2,\"[3998, 3999]\"\r\nCP-12.zip,1,CP,1456,3988,122,3,\"[3988, 3989, 3990]\"\r\nNormal-12.zip,0,Normal,2014,469,98,1,[469]\r\nCP-14.zip,1,CP,1542,4197,180,3,\"[4197, 4198, 4199]\"\r\nNormal-2.zip,0,Normal,1755,1098,73,4,\"[1097, 1098, 1099, 1100]\"\r\nNCP-14.zip,2,NCP,382,1917,58,2,\"[1916, 1917]\"\r\nNCP-4.zip,2,NCP,153,1451,58,2,\"[1450, 1451]\"\r\nNormal-27.zip,0,Normal,3913,5454,68,2,\"[5454, 5455]\"\r\nNormal-1.zip,0,Normal,1674,810,74,2,\"[810, 811]\"\r\nNormal-2.zip,0,Normal,1736,1036,55,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNormal-1.zip,0,Normal,1682,852,81,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNormal-4.zip,0,Normal,796,231,287,1,[231]\r\nNCP-11.zip,2,NCP,292,1729,138,2,\"[1729, 1730]\"\r\nNCP-12.zip,2,NCP,327,1803,55,2,\"[1802, 1803]\"\r\nNormal-25.zip,0,Normal,3712,5342,28,1,[5342]\r\nCP-4.zip,1,CP,1182,3400,130,1,[3400]\r\nCP-2.zip,1,CP,1113,3331,197,1,[3331]\r\nNCP-22.zip,2,NCP,888,2426,55,1,[2426]\r\nNormal-25.zip,0,Normal,3846,5358,209,1,[5358]\r\nCP-9.zip,1,CP,1362,3772,61,2,\"[3772, 3773]\"\r\nCP-4.zip,1,CP,1193,3411,190,1,[3411]\r\nNormal-5.zip,0,Normal,802,237,298,1,[237]\r\nCP-23.zip,1,CP,655,3017,511,1,[3017]\r\nNCP-13.zip,2,NCP,360,1871,121,2,\"[1871, 1872]\"\r\nNCP-30.zip,2,NCP,977,2521,257,1,[2521]\r\nNCP-26.zip,2,NCP,3990,5514,51,1,[5514]\r\nNormal-3.zip,0,Normal,768,203,130,1,[203]\r\nNormal-1.zip,0,Normal,1713,980,71,2,\"[980, 981]\"\r\nCP-26.zip,1,CP,3732,5672,53,2,\"[5671, 5672]\"\r\nCP-20.zip,1,CP,2762,3293,33,1,[3293]\r\nNormal-20.zip,0,Normal,2267,722,100,1,[722]\r\nNCP-5.zip,2,NCP,189,1522,139,2,\"[1522, 1523]\"\r\nNCP-28.zip,2,NCP,848,2366,57,2,\"[2366, 2367]\"\r\nNCP-6.zip,2,NCP,215,1575,65,2,\"[1574, 1575]\"\r\nNormal-27.zip,0,Normal,3905,5438,58,2,\"[5437, 5438]\"\r\nCP-4.zip,1,CP,1163,3381,239,1,[3381]\r\nCP-18.zip,1,CP,1665,4353,25,1,[4353]\r\nNormal-25.zip,0,Normal,3842,5354,189,1,[5354]\r\nNormal-22.zip,0,Normal,2583,93,46,1,[93]\r\nNCP-11.zip,2,NCP,308,1763,116,2,\"[1763, 1764]\"\r\nCP-4.zip,1,CP,1180,3398,150,1,[3398]\r\nCP-7.zip,1,CP,1316,3668,63,3,\"[3667, 3668, 3669]\"\r\nCP-5.zip,1,CP,1213,3431,159,1,[3431]\r\nNormal-10.zip,0,Normal,1947,402,89,1,[402]\r\nCP-24.zip,1,CP,698,3060,124,1,[3060]\r\nCP-15.zip,1,CP,1562,4243,55,2,\"[4243, 4244]\"\r\nNCP-25.zip,2,NCP,3962,5473,58,1,[5473]\r\nCP-18.zip,1,CP,1772,3180,75,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNormal-13.zip,0,Normal,2029,484,94,1,[484]\r\nNCP-16.zip,2,NCP,443,2041,50,2,\"[2040, 2041]\"\r\nNCP-24.zip,2,NCP,984,2529,259,2,\"[2529, 2530]\"\r\nCP-18.zip,1,CP,1773,3183,61,4,\"[3182, 3183, 3184, 3185]\"\r\nCP-5.zip,1,CP,1194,3412,158,1,[3412]\r\nNCP-14.zip,2,NCP,39,1211,58,2,\"[1210, 1211]\"\r\nCP-13.zip,1,CP,15,3174,98,1,[3174]\r\nCP-28.zip,1,CP,3775,5719,29,1,[5719]\r\nNCP-17.zip,2,NCP,477,2110,58,2,\"[2109, 2110]\"\r\nNormal-16.zip,0,Normal,2133,588,73,1,[588]\r\nNCP-4.zip,2,NCP,150,1444,181,2,\"[1444, 1445]\"\r\nCP-4.zip,1,CP,1188,3406,308,1,[3406]\r\nNCP-8.zip,2,NCP,251,1649,55,2,\"[1648, 1649]\"\r\nCP-1.zip,1,CP,1094,3312,329,1,[3312]\r\nNCP-12.zip,2,NCP,327,1802,130,2,\"[1802, 1803]\"\r\nNormal-7.zip,0,Normal,1830,285,84,1,[285]\r\nCP-12.zip,1,CP,1481,4045,58,3,\"[4044, 4045, 4046]\"\r\nNCP-19.zip,2,NCP,52,1238,57,2,\"[1237, 1238]\"\r\nNCP-20.zip,2,NCP,557,2271,132,2,\"[2271, 2272]\"\r\nNCP-14.zip,2,NCP,398,1946,167,2,\"[1946, 1947]\"\r\nNCP-8.zip,2,NCP,260,1667,68,2,\"[1666, 1667]\"\r\nNormal-2.zip,0,Normal,1754,1094,73,4,\"[1093, 1094, 1095, 1096]\"\r\nNormal-2.zip,0,Normal,1736,1032,124,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNCP-11.zip,2,NCP,292,1730,58,2,\"[1729, 1730]\"\r\nCP-25.zip,1,CP,725,3087,80,1,[3087]\r\nCP-15.zip,1,CP,1558,4234,62,3,\"[4234, 4235, 4236]\"\r\nNormal-17.zip,0,Normal,2161,616,99,1,[616]\r\nNCP-23.zip,2,NCP,970,2513,62,1,[2513]\r\nNCP-10.zip,2,NCP,277,1700,152,2,\"[1700, 1701]\"\r\nNCP-14.zip,2,NCP,395,1941,71,2,\"[1940, 1941]\"\r\nNormal-2.zip,0,Normal,1755,1100,71,4,\"[1097, 1098, 1099, 1100]\"\r\nCP-26.zip,1,CP,3718,5648,254,2,\"[5647, 5648]\"\r\nNormal-25.zip,0,Normal,3841,5353,188,1,[5353]\r\nNormal-23.zip,0,Normal,2621,131,41,1,[131]\r\nNCP-20.zip,2,NCP,555,2267,133,2,\"[2267, 2268]\"\r\nNCP-7.zip,2,NCP,244,1634,165,2,\"[1634, 1635]\"\r\nNormal-6.zip,0,Normal,1821,276,102,1,[276]\r\nNCP-17.zip,2,NCP,459,2073,56,2,\"[2072, 2073]\"\r\nNCP-2.zip,2,NCP,124,1390,58,2,\"[1389, 1390]\"\r\nNormal-18.zip,0,Normal,2185,640,100,1,[640]\r\nNCP-5.zip,2,NCP,193,1530,124,2,\"[1530, 1531]\"\r\nNCP-8.zip,2,NCP,253,1652,139,2,\"[1652, 1653]\"\r\nNCP-23.zip,2,NCP,89,1313,58,4,\"[1311, 1312, 1313, 1315]\"\r\nCP-5.zip,1,CP,1216,3434,307,1,[3434]\r\nNCP-30.zip,2,NCP,979,2523,345,1,[2523]\r\nNCP-23.zip,2,NCP,97,1331,41,2,\"[1330, 1331]\"\r\nNCP-20.zip,2,NCP,555,2268,56,2,\"[2267, 2268]\"\r\nNormal-16.zip,0,Normal,2126,581,84,1,[581]\r\nNCP-18.zip,2,NCP,488,2133,58,2,\"[2131, 2133]\"\r\nNCP-10.zip,2,NCP,28,1189,61,2,\"[1188, 1189]\"\r\nNCP-15.zip,2,NCP,41,1214,151,2,\"[1214, 1215]\"\r\nNCP-12.zip,2,NCP,32,1196,145,2,\"[1196, 1197]\"\r\nCP-26.zip,1,CP,3722,5656,50,2,\"[5656, 5657]\"\r\nCP-15.zip,1,CP,1573,4261,22,1,[4261]\r\nNCP-27.zip,2,NCP,1028,2598,147,1,[2598]\r\nNormal-18.zip,0,Normal,2197,652,105,1,[652]\r\nNormal-16.zip,0,Normal,2152,607,66,1,[607]\r\nNCP-14.zip,2,NCP,380,1913,62,2,\"[1912, 1913]\"\r\nNormal-15.zip,0,Normal,2093,548,72,1,[548]\r\nNCP-3.zip,2,NCP,1299,2740,63,1,[2740]\r\nCP-8.zip,1,CP,1348,3740,59,3,\"[3739, 3740, 3741]\"\r\nNormal-6.zip,0,Normal,1822,277,101,1,[277]\r\nNormal-4.zip,0,Normal,800,235,116,1,[235]\r\nCP-10.zip,1,CP,1386,3827,66,2,\"[3827, 3828]\"\r\nNormal-12.zip,0,Normal,2004,459,106,1,[459]\r\nNCP-25.zip,2,NCP,3957,5470,47,1,[5470]\r\nCP-5.zip,1,CP,1204,3422,294,1,[3422]\r\nCP-11.zip,1,CP,1420,3905,59,2,\"[3905, 3906]\"\r\nCP-17.zip,1,CP,1649,4337,23,1,[4337]\r\nCP-28.zip,1,CP,3769,5713,18,1,[5713]\r\nNormal-8.zip,0,Normal,1868,323,91,1,[323]\r\nCP-1.zip,1,CP,1087,3219,400,1,[3219]\r\nCP-26.zip,1,CP,3640,5599,295,1,[5599]\r\nNCP-7.zip,2,NCP,248,1642,139,2,\"[1642, 1643]\"\r\nNCP-2.zip,2,NCP,125,1395,55,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-25.zip,2,NCP,3941,5538,38,1,[5538]\r\nCP-15.zip,1,CP,1558,4236,62,3,\"[4234, 4235, 4236]\"\r\nNCP-16.zip,2,NCP,443,2040,117,2,\"[2040, 2041]\"\r\nNormal-15.zip,0,Normal,2102,557,100,1,[557]\r\nNormal-2.zip,0,Normal,1755,1097,73,4,\"[1097, 1098, 1099, 1100]\"\r\nNormal-9.zip,0,Normal,1924,379,98,1,[379]\r\nCP-13.zip,1,CP,1517,4136,64,2,\"[4136, 4137]\"\r\nCP-1.zip,1,CP,1,3146,70,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNormal-19.zip,0,Normal,2226,681,99,1,[681]\r\nCP-13.zip,1,CP,1517,4137,64,2,\"[4136, 4137]\"\r\nNCP-23.zip,2,NCP,95,1326,165,2,\"[1326, 1327]\"\r\nNCP-19.zip,2,NCP,538,2234,60,2,\"[2233, 2234]\"\r\nCP-6.zip,1,CP,1253,3471,130,1,[3471]\r\nNCP-7.zip,2,NCP,242,1629,133,2,\"[1629, 1630]\"\r\nCP-8.zip,1,CP,1337,3714,60,2,\"[3714, 3715]\"\r\nNCP-23.zip,2,NCP,912,2454,373,1,[2454]\r\nNormal-23.zip,0,Normal,2622,132,38,1,[132]\r\nNormal-8.zip,0,Normal,1871,326,73,1,[326]\r\nNCP-5.zip,2,NCP,193,1531,52,2,\"[1530, 1531]\"\r\nNormal-24.zip,0,Normal,2646,156,41,1,[156]\r\nCP-14.zip,1,CP,1538,4185,159,3,\"[4185, 4186, 4187]\"\r\nCP-23.zip,1,CP,667,3029,226,1,[3029]\r\nCP-1.zip,1,CP,1,3147,70,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNCP-27.zip,2,NCP,1006,2566,42,2,\"[2566, 2567]\"\r\nNormal-1.zip,0,Normal,1711,977,63,2,\"[977, 978]\"\r\nNCP-14.zip,2,NCP,374,1899,139,2,\"[1899, 1900]\"\r\nNCP-16.zip,2,NCP,457,2069,57,2,\"[2068, 2069]\"\r\nCP-22.zip,1,CP,634,2996,680,1,[2996]\r\nNCP-23.zip,2,NCP,905,2447,26,1,[2447]\r\nNormal-2.zip,0,Normal,1759,1118,65,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-11.zip,2,NCP,290,1725,103,2,\"[1725, 1726]\"\r\nNCP-21.zip,2,NCP,77,1288,53,2,\"[1287, 1288]\"\r\nCP-30.zip,1,CP,4018,5567,33,1,[5567]\r\nCP-12.zip,1,CP,1483,4052,62,3,\"[4050, 4051, 4052]\"\r\nCP-24.zip,1,CP,692,3054,74,1,[3054]\r\nNCP-6.zip,2,NCP,204,1552,139,2,\"[1552, 1553]\"\r\nNCP-7.zip,2,NCP,24,1179,146,2,\"[1179, 1180]\"\r\nCP-6.zip,1,CP,1251,3469,133,1,[3469]\r\nNormal-1.zip,0,Normal,1682,857,70,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNCP-10.zip,2,NCP,2712,2705,42,1,[2705]\r\nCP-2.zip,1,CP,1100,3318,201,1,[3318]\r\nNormal-1.zip,0,Normal,1671,795,67,3,\"[793, 794, 795]\"\r\nNCP-17.zip,2,NCP,461,2077,67,2,\"[2076, 2077]\"\r\nCP-15.zip,1,CP,1564,4249,51,2,\"[4248, 4249]\"\r\nNCP-4.zip,2,NCP,153,1450,137,2,\"[1450, 1451]\"\r\nCP-4.zip,1,CP,1166,3384,202,1,[3384]\r\nNCP-28.zip,2,NCP,851,2370,145,1,[2370]\r\nNCP-23.zip,2,NCP,95,1327,69,2,\"[1326, 1327]\"\r\nNormal-18.zip,0,Normal,2196,651,95,1,[651]\r\nCP-27.zip,1,CP,3749,5693,20,1,[5693]\r\nNormal-6.zip,0,Normal,1797,252,85,1,[252]\r\nCP-14.zip,1,CP,1544,4203,122,3,\"[4203, 4204, 4205]\"\r\nCP-8.zip,1,CP,1345,3731,55,2,\"[3731, 3732]\"\r\nNCP-8.zip,2,NCP,2678,2649,55,1,[2649]\r\nNCP-23.zip,2,NCP,89,1315,66,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-17.zip,0,Normal,2167,622,76,1,[622]\r\nCP-22.zip,1,CP,631,2993,130,1,[2993]\r\nCP-16.zip,1,CP,1618,4306,26,1,[4306]\r\nNCP-17.zip,2,NCP,471,2097,139,2,\"[2097, 2098]\"\r\nNCP-15.zip,2,NCP,416,1986,58,2,\"[1984, 1986]\"\r\nCP-10.zip,1,CP,1389,3833,121,3,\"[3833, 3834, 3835]\"\r\nCP-24.zip,1,CP,696,3058,74,1,[3058]\r\nNCP-26.zip,2,NCP,3996,5494,37,1,[5494]\r\nCP-15.zip,1,CP,1565,4251,66,2,\"[4250, 4251]\"\r\nNCP-7.zip,2,NCP,248,1643,58,2,\"[1642, 1643]\"\r\nNCP-30.zip,2,NCP,932,2474,20,1,[2474]\r\nCP-8.zip,1,CP,1332,3703,41,2,\"[3703, 3704]\"\r\nNormal-2.zip,0,Normal,1754,1093,73,4,\"[1093, 1094, 1095, 1096]\"\r\nNCP-3.zip,2,NCP,131,1408,50,2,\"[1407, 1408]\"\r\nNCP-13.zip,2,NCP,37,1206,147,2,\"[1206, 1207]\"\r\nNCP-7.zip,2,NCP,242,1630,56,2,\"[1629, 1630]\"\r\nCP-26.zip,1,CP,3643,5603,257,2,\"[5602, 5603]\"\r\nNormal-24.zip,0,Normal,2639,149,28,1,[149]\r\nNormal-13.zip,0,Normal,2037,492,82,1,[492]\r\nCP-16.zip,1,CP,1610,4298,22,1,[4298]\r\nNCP-15.zip,2,NCP,415,1982,149,2,\"[1982, 1983]\"\r\nNCP-2.zip,2,NCP,125,1394,55,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNormal-23.zip,0,Normal,2616,126,39,1,[126]\r\nCP-26.zip,1,CP,3635,5594,291,1,[5594]\r\nNormal-18.zip,0,Normal,2211,666,85,1,[666]\r\nNCP-17.zip,2,NCP,481,2117,163,2,\"[2117, 2118]\"\r\nNCP-13.zip,2,NCP,37,1207,62,2,\"[1206, 1207]\"\r\nNormal-2.zip,0,Normal,1749,1070,61,4,\"[1069, 1070, 1071, 1072]\"\r\nNCP-29.zip,2,NCP,927,2469,20,1,[2469]\r\nCP-6.zip,1,CP,1226,3444,190,1,[3444]\r\nNCP-14.zip,2,NCP,394,1938,147,2,\"[1938, 1939]\"\r\nCP-19.zip,1,CP,1791,3212,71,4,\"[3210, 3211, 3212, 3213]\"\r\nCP-8.zip,1,CP,1334,3708,56,2,\"[3707, 3708]\"\r\nNCP-12.zip,2,NCP,324,1796,120,2,\"[1796, 1797]\"\r\nCP-30.zip,1,CP,3929,5626,71,2,\"[5626, 5627]\"\r\nNormal-7.zip,0,Normal,1832,287,91,1,[287]\r\nNormal-1.zip,0,Normal,1713,981,71,2,\"[980, 981]\"\r\nNCP-2.zip,2,NCP,111,1363,133,2,\"[1363, 1364]\"\r\nNormal-3.zip,0,Normal,1764,1144,66,4,\"[1143, 1144, 1145, 1146]\"\r\nCP-15.zip,1,CP,1560,4239,63,2,\"[4239, 4240]\"\r\nNCP-22.zip,2,NCP,84,1302,54,2,\"[1301, 1302]\"\r\nNormal-2.zip,0,Normal,1744,1059,71,2,\"[1058, 1059]\"\r\nCP-21.zip,1,CP,590,2952,86,1,[2952]\r\nNormal-9.zip,0,Normal,1901,356,83,1,[356]\r\nNCP-17.zip,2,NCP,461,2076,160,2,\"[2076, 2077]\"\r\nCP-24.zip,1,CP,683,3045,138,1,[3045]\r\nNormal-11.zip,0,Normal,1983,438,105,1,[438]\r\nNCP-14.zip,2,NCP,39,1210,139,2,\"[1210, 1211]\"\r\nNCP-18.zip,2,NCP,494,2144,156,2,\"[2144, 2145]\"\r\nNCP-14.zip,2,NCP,388,1927,68,2,\"[1926, 1927]\"\r\nNCP-28.zip,2,NCP,853,2373,664,1,[2373]\r\nNormal-22.zip,0,Normal,2588,98,33,1,[98]\r\nNCP-17.zip,2,NCP,46,1225,124,2,\"[1225, 1226]\"\r\nNCP-2.zip,2,NCP,126,1396,152,2,\"[1396, 1398]\"\r\nNCP-15.zip,2,NCP,418,1990,58,2,\"[1989, 1990]\"\r\nNormal-3.zip,0,Normal,765,200,136,1,[200]\r\nCP-9.zip,1,CP,1370,3792,62,2,\"[3792, 3793]\"\r\nCP-13.zip,1,CP,1490,4071,166,3,\"[4071, 4072, 4073]\"\r\nCP-5.zip,1,CP,1212,3430,187,1,[3430]\r\nNCP-29.zip,2,NCP,894,2434,16,1,[2434]\r\nCP-19.zip,1,CP,1788,3199,58,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-12.zip,1,CP,1466,4011,52,2,\"[4011, 4012]\"\r\nCP-1.zip,1,CP,1088,3223,50,4,\"[3220, 3221, 3222, 3223]\"\r\nNCP-25.zip,2,NCP,3947,5503,41,1,[5503]\r\nCP-30.zip,1,CP,3931,5632,143,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-2.zip,2,NCP,124,1389,139,2,\"[1389, 1390]\"\r\nCP-7.zip,1,CP,1307,3645,53,4,\"[3645, 3646, 3647, 3648]\"\r\nNCP-27.zip,2,NCP,823,2334,183,1,[2334]\r\nNormal-1.zip,0,Normal,1728,1016,72,4,\"[1013, 1014, 1015, 1016]\"\r\nNormal-2.zip,0,Normal,1754,1096,69,4,\"[1093, 1094, 1095, 1096]\"\r\nCP-12.zip,1,CP,1473,4026,51,3,\"[4026, 4027, 4028]\"\r\nNormal-3.zip,0,Normal,1764,1146,62,4,\"[1143, 1144, 1145, 1146]\"\r\nCP-2.zip,1,CP,1103,3321,180,1,[3321]\r\nCP-4.zip,1,CP,1181,3399,238,1,[3399]\r\nCP-19.zip,1,CP,2436,2904,138,1,[2904]\r\nCP-28.zip,1,CP,3795,5739,23,1,[5739]\r\nCP-29.zip,1,CP,3805,5749,20,1,[5749]\r\nNCP-3.zip,2,NCP,1300,2741,60,1,[2741]\r\nNCP-23.zip,2,NCP,898,2439,48,1,[2439]\r\nNormal-23.zip,0,Normal,2612,122,31,1,[122]\r\nNCP-7.zip,2,NCP,24,1180,61,2,\"[1179, 1180]\"\r\nNormal-6.zip,0,Normal,1807,262,95,1,[262]\r\nNCP-30.zip,2,NCP,996,2551,189,2,\"[2551, 2552]\"\r\nNormal-9.zip,0,Normal,1893,348,82,1,[348]\r\nNCP-11.zip,2,NCP,290,1726,44,2,\"[1725, 1726]\"\r\nNCP-21.zip,2,NCP,80,1293,129,2,\"[1293, 1294]\"\r\nNormal-24.zip,0,Normal,2655,165,37,1,[165]\r\nNCP-30.zip,2,NCP,996,2552,218,2,\"[2551, 2552]\"\r\nCP-18.zip,1,CP,1653,4341,29,1,[4341]\r\nNCP-5.zip,2,NCP,187,1518,136,2,\"[1518, 1519]\"\r\nNCP-26.zip,2,NCP,3993,5517,39,1,[5517]\r\nNCP-10.zip,2,NCP,273,1692,128,2,\"[1692, 1693]\"\r\nNCP-5.zip,2,NCP,179,1502,122,2,\"[1503, 1502]\"\r\nNormal-26.zip,0,Normal,3887,5400,67,3,\"[5400, 5401, 5404]\"\r\nNCP-7.zip,2,NCP,234,1613,139,2,\"[1613, 1614]\"\r\nNormal-1.zip,0,Normal,1725,1006,60,1,[1006]\r\nNCP-15.zip,2,NCP,419,1992,55,2,\"[1991, 1992]\"\r\nCP-14.zip,1,CP,1523,4151,65,2,\"[4150, 4151]\"\r\nNCP-23.zip,2,NCP,938,2480,195,2,\"[2480, 2481]\"\r\nNCP-13.zip,2,NCP,342,1835,149,2,\"[1835, 1836]\"\r\nCP-24.zip,1,CP,680,3042,86,1,[3042]\r\nNCP-14.zip,2,NCP,394,1939,62,2,\"[1938, 1939]\"\r\nNCP-11.zip,2,NCP,288,1722,49,2,\"[1721, 1722]\"\r\nCP-14.zip,1,CP,1527,4162,58,3,\"[4160, 4161, 4162]\"\r\nCP-6.zip,1,CP,1241,3459,132,1,[3459]\r\nCP-10.zip,1,CP,1408,3878,198,3,\"[3878, 3879, 3880]\"\r\nNCP-14.zip,2,NCP,397,1945,66,2,\"[1944, 1945]\"\r\nCP-1.zip,1,CP,1,3145,248,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNormal-15.zip,0,Normal,2111,566,95,1,[566]\r\nNormal-12.zip,0,Normal,2007,462,85,1,[462]\r\nNCP-6.zip,2,NCP,222,1589,52,2,\"[1588, 1589]\"\r\nNormal-25.zip,0,Normal,3856,5368,220,1,[5368]\r\nCP-6.zip,1,CP,1245,3463,306,1,[3463]\r\nCP-9.zip,1,CP,1380,3814,56,1,[3814]\r\nCP-11.zip,1,CP,1442,3955,58,3,\"[3954, 3955, 3956]\"\r\nNormal-26.zip,0,Normal,3889,5407,68,2,\"[5407, 5408]\"\r\nNormal-4.zip,0,Normal,773,208,321,1,[208]\r\nCP-23.zip,1,CP,671,3033,448,1,[3033]\r\nCP-23.zip,1,CP,674,3036,126,1,[3036]\r\nCP-19.zip,1,CP,1788,3200,54,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNCP-12.zip,2,NCP,328,1804,112,2,\"[1804, 1805]\"\r\nNormal-22.zip,0,Normal,2581,91,44,1,[91]\r\nCP-7.zip,1,CP,1316,3669,62,3,\"[3667, 3668, 3669]\"\r\nNCP-1.zip,2,NCP,1046,2618,70,1,[2618]\r\nNCP-16.zip,2,NCP,456,2067,57,2,\"[2066, 2067]\"\r\nNormal-1.zip,0,Normal,1730,1020,63,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-17.zip,2,NCP,468,2092,65,2,\"[2091, 2092]\"\r\nNCP-7.zip,2,NCP,2488,2688,40,1,[2688]\r\nCP-10.zip,1,CP,1396,3852,58,3,\"[3851, 3852, 3853]\"\r\nNCP-16.zip,2,NCP,447,2049,58,2,\"[2048, 2049]\"\r\nNormal-8.zip,0,Normal,1864,319,88,1,[319]\r\nCP-15.zip,1,CP,1560,4240,63,2,\"[4239, 4240]\"\r\nCP-12.zip,1,CP,1484,4055,46,3,\"[4053, 4054, 4055]\"\r\nNormal-1.zip,0,Normal,1682,853,81,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNormal-22.zip,0,Normal,2580,90,37,1,[90]\r\nCP-2.zip,1,CP,1128,3346,196,1,[3346]\r\nNCP-7.zip,2,NCP,240,1625,158,2,\"[1625, 1626]\"\r\nNormal-15.zip,0,Normal,2086,541,91,1,[541]\r\nNormal-7.zip,0,Normal,1837,292,94,1,[292]\r\nCP-1.zip,1,CP,1069,3111,77,4,\"[3108, 3109, 3110, 3111]\"\r\nCP-14.zip,1,CP,1549,4216,61,2,\"[4215, 4216]\"\r\nNormal-11.zip,0,Normal,1970,425,88,1,[425]\r\nNCP-13.zip,2,NCP,342,1836,61,2,\"[1835, 1836]\"\r\nCP-25.zip,1,CP,728,3090,86,1,[3090]\r\nNCP-21.zip,2,NCP,68,1268,115,2,\"[1268, 1269]\"\r\nCP-8.zip,1,CP,1342,3725,58,3,\"[3723, 3724, 3725]\"\r\nCP-12.zip,1,CP,1481,4046,58,3,\"[4044, 4045, 4046]\"\r\nCP-5.zip,1,CP,1210,3428,156,1,[3428]\r\nNCP-3.zip,2,NCP,136,1417,53,2,\"[1416, 1417]\"\r\nNCP-2.zip,2,NCP,125,1393,54,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-23.zip,2,NCP,97,1330,97,2,\"[1330, 1331]\"\r\nNCP-1.zip,2,NCP,1021,2588,209,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-12.zip,2,NCP,317,1782,50,2,\"[1781, 1782]\"\r\nNCP-14.zip,2,NCP,388,1926,162,2,\"[1926, 1927]\"\r\nCP-26.zip,1,CP,3641,5600,300,1,[5600]\r\nNormal-3.zip,0,Normal,760,195,117,1,[195]\r\nNCP-12.zip,2,NCP,325,1798,117,2,\"[1798, 1799]\"\r\nNormal-1.zip,0,Normal,1671,793,72,3,\"[793, 794, 795]\"\r\nNormal-5.zip,0,Normal,807,242,132,1,[242]\r\nCP-19.zip,1,CP,1791,3211,55,4,\"[3210, 3211, 3212, 3213]\"\r\nNormal-4.zip,0,Normal,792,227,108,1,[227]\r\nCP-15.zip,1,CP,1564,4248,51,2,\"[4248, 4249]\"\r\nNCP-12.zip,2,NCP,324,1797,51,2,\"[1796, 1797]\"\r\nCP-13.zip,1,CP,1514,4130,61,2,\"[4129, 4130]\"\r\nCP-30.zip,1,CP,4013,5562,29,1,[5562]\r\nCP-7.zip,1,CP,13,3173,255,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-5.zip,1,CP,1214,3432,282,1,[3432]\r\nNormal-8.zip,0,Normal,1878,333,88,1,[333]\r\nNormal-21.zip,0,Normal,2297,752,83,1,[752]\r\nCP-19.zip,1,CP,1789,3205,59,4,\"[3204, 3205, 3206, 3207]\"\r\nCP-4.zip,1,CP,1176,3394,161,1,[3394]\r\nCP-10.zip,1,CP,1397,3855,60,2,\"[3854, 3855]\"\r\nCP-16.zip,1,CP,1594,4282,26,1,[4282]\r\nCP-1.zip,1,CP,1077,3121,74,2,\"[3121, 3122]\"\r\nCP-29.zip,1,CP,3819,5763,31,1,[5763]\r\nCP-12.zip,1,CP,1468,4016,54,3,\"[4015, 4016, 4017]\"\r\nCP-3.zip,1,CP,1139,3357,332,1,[3357]\r\nNormal-14.zip,0,Normal,2070,525,104,1,[525]\r\nNormal-1.zip,0,Normal,1672,798,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-11.zip,1,CP,1435,3939,46,2,\"[3938, 3939]\"\r\nCP-30.zip,1,CP,4019,5568,38,1,[5568]\r\nCP-18.zip,1,CP,1777,3540,67,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nCP-23.zip,1,CP,666,3028,192,1,[3028]\r\nNormal-1.zip,0,Normal,1703,959,70,2,\"[959, 960]\"\r\nCP-3.zip,1,CP,1133,3351,213,1,[3351]\r\nCP-13.zip,1,CP,1504,4107,64,1,[4107]\r\nNormal-3.zip,0,Normal,745,180,105,1,[180]\r\nNormal-26.zip,0,Normal,3869,5381,27,1,[5381]\r\nCP-18.zip,1,CP,1774,3528,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-21.zip,0,Normal,2301,756,88,1,[756]\r\nCP-18.zip,1,CP,1771,3519,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-22.zip,1,CP,643,3005,126,1,[3005]\r\nCP-26.zip,1,CP,3723,5658,43,1,[5658]\r\nNormal-8.zip,0,Normal,1884,339,82,1,[339]\r\nCP-15.zip,1,CP,1586,4274,23,1,[4274]\r\nCP-8.zip,1,CP,1349,3743,58,3,\"[3742, 3743, 3744]\"\r\nNormal-22.zip,0,Normal,2586,96,30,1,[96]\r\nNormal-4.zip,0,Normal,785,220,292,1,[220]\r\nCP-19.zip,1,CP,2428,2887,124,1,[2887]\r\nNCP-13.zip,2,NCP,352,1856,58,2,\"[1855, 1856]\"\r\nNCP-2.zip,2,NCP,109,1355,143,2,\"[1355, 1356]\"\r\nCP-13.zip,1,CP,1493,4080,125,3,\"[4080, 4081, 4082]\"\r\nCP-4.zip,1,CP,1191,3409,220,1,[3409]\r\nCP-17.zip,1,CP,1642,4330,25,1,[4330]\r\nCP-7.zip,1,CP,1304,3635,232,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-27.zip,2,NCP,1058,2635,46,1,[2635]\r\nNormal-14.zip,0,Normal,2071,526,103,1,[526]\r\nCP-26.zip,1,CP,3719,5650,55,3,\"[5649, 5650, 5651]\"\r\nNormal-24.zip,0,Normal,2663,173,48,1,[173]\r\nNCP-3.zip,2,NCP,1298,2739,60,1,[2739]\r\nCP-19.zip,1,CP,2430,2891,102,2,\"[2891, 2892]\"\r\nCP-12.zip,1,CP,1458,3993,69,3,\"[3992, 3993, 3994]\"\r\nNormal-1.zip,0,Normal,1677,823,64,4,\"[823, 824, 825, 826]\"\r\nCP-12.zip,1,CP,1469,4018,47,2,\"[4018, 4019]\"\r\nCP-7.zip,1,CP,1268,3486,336,1,[3486]\r\nNormal-18.zip,0,Normal,2203,658,75,1,[658]\r\nCP-21.zip,1,CP,593,2955,100,1,[2955]\r\nNormal-16.zip,0,Normal,2143,598,87,1,[598]\r\nNCP-20.zip,2,NCP,552,2261,146,2,\"[2261, 2262]\"\r\nNCP-11.zip,2,NCP,309,1766,69,2,\"[1766, 1765]\"\r\nNCP-19.zip,2,NCP,520,2197,55,2,\"[2196, 2197]\"\r\nCP-14.zip,1,CP,1550,4217,64,2,\"[4217, 4218]\"\r\nNCP-26.zip,2,NCP,3976,5484,32,1,[5484]\r\nNCP-31.zip,2,NCP,998,2555,44,1,[2555]\r\nNCP-2.zip,2,NCP,107,1351,146,2,\"[1351, 1352]\"\r\nNormal-16.zip,0,Normal,2136,591,83,1,[591]\r\nCP-12.zip,1,CP,1463,4006,49,2,\"[4005, 4006]\"\r\nNCP-4.zip,2,NCP,156,1457,58,2,\"[1456, 1457]\"\r\nNCP-1.zip,2,NCP,1002,2561,58,1,[2561]\r\nNormal-1.zip,0,Normal,1672,801,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNormal-14.zip,0,Normal,2078,533,73,1,[533]\r\nNCP-5.zip,2,NCP,185,1514,121,2,\"[1514, 1515]\"\r\nCP-14.zip,1,CP,1530,4168,60,1,[4168]\r\nNCP-15.zip,2,NCP,413,1976,128,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-5.zip,1,CP,1224,3442,204,1,[3442]\r\nCP-5.zip,1,CP,1215,3433,165,1,[3433]\r\nNormal-26.zip,0,Normal,3886,5399,76,1,[5399]\r\nNormal-24.zip,0,Normal,2640,150,41,1,[150]\r\nNCP-28.zip,2,NCP,836,2351,52,1,[2351]\r\nNCP-4.zip,2,NCP,146,1436,123,2,\"[1436, 1437]\"\r\nNormal-17.zip,0,Normal,2155,610,89,1,[610]\r\nCP-30.zip,1,CP,3939,5547,38,1,[5547]\r\nCP-19.zip,1,CP,1784,3590,112,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-10.zip,1,CP,1399,3859,45,2,\"[3858, 3859]\"\r\nNCP-19.zip,2,NCP,519,2194,126,2,\"[2194, 2195]\"\r\nNCP-11.zip,2,NCP,297,1739,144,2,\"[1739, 1741]\"\r\nNCP-22.zip,2,NCP,88,1309,170,2,\"[1309, 1310]\"\r\nCP-18.zip,1,CP,1778,3547,65,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-30.zip,2,NCP,968,2511,61,1,[2511]\r\nCP-9.zip,1,CP,1360,3769,67,3,\"[3767, 3768, 3769]\"\r\nCP-26.zip,1,CP,3638,5597,285,1,[5597]\r\nNCP-13.zip,2,NCP,353,1857,167,2,\"[1857, 1858]\"\r\nCP-30.zip,1,CP,3932,5634,71,2,\"[5634, 5635]\"\r\nNCP-21.zip,2,NCP,62,1257,144,2,\"[1257, 1258]\"\r\nCP-2.zip,1,CP,1127,3345,278,1,[3345]\r\nNCP-12.zip,2,NCP,337,1823,58,2,\"[1822, 1823]\"\r\nNCP-14.zip,2,NCP,390,1931,53,2,\"[1930, 1931]\"\r\nNCP-15.zip,2,NCP,417,1988,58,2,\"[1987, 1988]\"\r\nCP-24.zip,1,CP,689,3051,58,1,[3051]\r\nCP-9.zip,1,CP,1377,3808,58,2,\"[3808, 3809]\"\r\nCP-13.zip,1,CP,1505,4110,54,3,\"[4108, 4109, 4110]\"\r\nCP-13.zip,1,CP,1492,4078,58,3,\"[4077, 4078, 4079]\"\r\nNCP-4.zip,2,NCP,159,1463,61,2,\"[1462, 1463]\"\r\nNCP-6.zip,2,NCP,220,1585,67,2,\"[1584, 1585]\"\r\nNCP-29.zip,2,NCP,884,2421,23,1,[2421]\r\nNormal-3.zip,0,Normal,757,192,110,1,[192]\r\nCP-21.zip,1,CP,4,3505,298,4,\"[3505, 3506, 3507, 3508]\"\r\nCP-16.zip,1,CP,1608,4296,23,1,[4296]\r\nCP-4.zip,1,CP,1169,3387,171,1,[3387]\r\nNormal-4.zip,0,Normal,797,232,112,1,[232]\r\nNCP-19.zip,2,NCP,540,2238,54,2,\"[2237, 2238]\"\r\nNormal-14.zip,0,Normal,2068,523,81,1,[523]\r\nNormal-11.zip,0,Normal,1985,440,96,1,[440]\r\nCP-9.zip,1,CP,1353,3748,140,3,\"[3748, 3749, 3750]\"\r\nNCP-6.zip,2,NCP,224,1592,136,2,\"[1592, 1593]\"\r\nCP-10.zip,1,CP,1397,3854,60,2,\"[3854, 3855]\"\r\nNCP-12.zip,2,NCP,318,1784,63,2,\"[1783, 1784]\"\r\nNCP-21.zip,2,NCP,59,1251,122,2,\"[1251, 1252]\"\r\nNormal-17.zip,0,Normal,2184,639,86,1,[639]\r\nNCP-18.zip,2,NCP,493,2143,56,2,\"[2142, 2143]\"\r\nNCP-25.zip,2,NCP,3954,5467,42,1,[5467]\r\nNormal-2.zip,0,Normal,1763,1137,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nCP-23.zip,1,CP,675,3037,124,1,[3037]\r\nCP-9.zip,1,CP,1365,3780,60,3,\"[3779, 3780, 3781]\"\r\nCP-6.zip,1,CP,1256,3474,140,1,[3474]\r\nNCP-16.zip,2,NCP,441,2037,49,2,\"[2036, 2037]\"\r\nNCP-7.zip,2,NCP,2484,2643,46,1,[2643]\r\nCP-20.zip,1,CP,2771,3302,37,1,[3302]\r\nNCP-10.zip,2,NCP,2714,2707,53,1,[2707]\r\nNormal-4.zip,0,Normal,772,207,363,1,[207]\r\nNCP-16.zip,2,NCP,440,2035,53,2,\"[2034, 2035]\"\r\nCP-17.zip,1,CP,1646,4334,26,1,[4334]\r\nNCP-11.zip,2,NCP,284,1713,139,2,\"[1713, 1714]\"\r\nCP-23.zip,1,CP,656,3018,575,1,[3018]\r\nCP-2.zip,1,CP,1104,3322,164,1,[3322]\r\nNCP-22.zip,2,NCP,85,1303,139,2,\"[1303, 1304]\"\r\nCP-30.zip,1,CP,3933,5637,38,2,\"[5636, 5637]\"\r\nNormal-7.zip,0,Normal,1839,294,94,1,[294]\r\nNCP-6.zip,2,NCP,223,1590,132,2,\"[1590, 1591]\"\r\nCP-2.zip,1,CP,1119,3337,157,1,[3337]\r\nCP-11.zip,1,CP,1431,3931,61,2,\"[3930, 3931]\"\r\nCP-7.zip,1,CP,1304,3634,47,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-11.zip,2,NCP,299,1745,58,2,\"[1744, 1745]\"\r\nNCP-15.zip,2,NCP,405,1960,60,2,\"[1959, 1960]\"\r\nNCP-20.zip,2,NCP,574,2307,58,2,\"[2306, 2307]\"\r\nCP-10.zip,1,CP,1412,3887,66,2,\"[3887, 3888]\"\r\nNCP-4.zip,2,NCP,167,1479,60,2,\"[1478, 1479]\"\r\nNCP-4.zip,2,NCP,157,1459,49,2,\"[1458, 1459]\"\r\nNCP-13.zip,2,NCP,349,1849,135,2,\"[1849, 1850]\"\r\nCP-18.zip,1,CP,1771,3520,51,4,\"[3518, 3519, 3520, 3521]\"\r\nNCP-14.zip,2,NCP,372,1895,109,2,\"[1895, 1896]\"\r\nNCP-18.zip,2,NCP,503,2162,146,2,\"[2162, 2163]\"\r\nNCP-6.zip,2,NCP,199,1543,58,2,\"[1542, 1543]\"\r\nCP-18.zip,1,CP,1662,4350,19,1,[4350]\r\nCP-9.zip,1,CP,1377,3809,57,2,\"[3808, 3809]\"\r\nNormal-1.zip,0,Normal,1727,1009,63,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-20.zip,2,NCP,566,2290,160,2,\"[2290, 2291]\"\r\nCP-29.zip,1,CP,3821,5765,29,1,[5765]\r\nNCP-5.zip,2,NCP,190,1525,64,2,\"[1524, 1525]\"\r\nNormal-2.zip,0,Normal,1746,1064,68,2,\"[1063, 1064]\"\r\nCP-27.zip,1,CP,3744,5688,17,1,[5688]\r\nCP-2.zip,1,CP,1111,3329,204,1,[3329]\r\nNormal-10.zip,0,Normal,1948,403,98,1,[403]\r\nNCP-12.zip,2,NCP,338,1824,150,2,\"[1824, 1825]\"\r\nNCP-13.zip,2,NCP,348,1847,112,2,\"[1847, 1848]\"\r\nCP-24.zip,1,CP,700,3062,86,1,[3062]\r\nCP-18.zip,1,CP,1655,4343,23,1,[4343]\r\nCP-27.zip,1,CP,3736,5680,16,1,[5680]\r\nNormal-24.zip,0,Normal,2654,164,31,1,[164]\r\nNCP-13.zip,2,NCP,359,1869,145,2,\"[1869, 1870]\"\r\nNCP-16.zip,2,NCP,437,2027,142,2,\"[2027, 2028]\"\r\nCP-27.zip,1,CP,3741,5685,17,1,[5685]\r\nCP-24.zip,1,CP,693,3055,273,1,[3055]\r\nCP-24.zip,1,CP,682,3044,149,1,[3044]\r\nNormal-17.zip,0,Normal,2175,630,80,1,[630]\r\nNCP-6.zip,2,NCP,223,1591,56,2,\"[1590, 1591]\"\r\nNCP-2.zip,2,NCP,1051,2626,178,2,\"[2625, 2626]\"\r\nCP-11.zip,1,CP,1454,3982,125,3,\"[3982, 3983, 3984]\"\r\nNormal-20.zip,0,Normal,2253,708,70,1,[708]\r\nNormal-20.zip,0,Normal,2252,707,84,1,[707]\r\nNormal-21.zip,0,Normal,2308,763,85,1,[763]\r\nNCP-18.zip,2,NCP,516,2189,57,2,\"[2188, 2189]\"\r\nNCP-12.zip,2,NCP,313,1774,62,2,\"[1773, 1774]\"\r\nCP-2.zip,1,CP,1126,3344,204,1,[3344]\r\nNormal-20.zip,0,Normal,2257,712,83,1,[712]\r\nNCP-6.zip,2,NCP,203,1551,59,2,\"[1550, 1551]\"\r\nCP-13.zip,1,CP,1503,4106,64,3,\"[4104, 4105, 4106]\"\r\nNormal-20.zip,0,Normal,2280,735,82,1,[735]\r\nCP-19.zip,1,CP,2443,2915,112,3,\"[2915, 2916, 2917]\"\r\nCP-20.zip,1,CP,2451,2930,136,1,[2930]\r\nCP-1.zip,1,CP,1093,3311,173,1,[3311]\r\nCP-13.zip,1,CP,1518,4138,160,3,\"[4138, 4139, 4140]\"\r\nCP-20.zip,1,CP,2773,3304,30,1,[3304]\r\nNCP-15.zip,2,NCP,414,1981,51,2,\"[1980, 1981]\"\r\nNCP-23.zip,2,NCP,96,1328,145,2,\"[1328, 1329]\"\r\nCP-11.zip,1,CP,1422,3909,59,3,\"[3908, 3909, 3910]\"\r\nNormal-20.zip,0,Normal,2258,713,74,1,[713]\r\nNCP-29.zip,2,NCP,882,2417,52,2,\"[2417, 2418]\"\r\nNormal-2.zip,0,Normal,1737,1038,79,4,\"[1037, 1038, 1039, 1040]\"\r\nNormal-13.zip,0,Normal,2025,480,101,1,[480]\r\nNCP-5.zip,2,NCP,173,1490,139,2,\"[1490, 1491]\"\r\nCP-6.zip,1,CP,1257,3475,155,1,[3475]\r\nNCP-23.zip,2,NCP,952,2495,379,1,[2495]\r\nNormal-1.zip,0,Normal,1700,954,64,2,\"[953, 954]\"\r\nNCP-17.zip,2,NCP,465,2085,31,3,\"[2084, 2085, 2086]\"\r\nNormal-16.zip,0,Normal,2122,577,85,1,[577]\r\nCP-13.zip,1,CP,1502,4102,73,2,\"[4102, 4103]\"\r\nNormal-17.zip,0,Normal,2153,608,82,1,[608]\r\nNormal-24.zip,0,Normal,2650,160,40,1,[160]\r\nNCP-27.zip,2,NCP,1031,2602,231,2,\"[2601, 2602]\"\r\nNCP-14.zip,2,NCP,393,1937,62,2,\"[1936, 1937]\"\r\nCP-5.zip,1,CP,12,3169,233,2,\"[3168, 3169]\"\r\nNormal-11.zip,0,Normal,1986,441,88,1,[441]\r\nCP-19.zip,1,CP,2433,2897,108,1,[2897]\r\nNCP-4.zip,2,NCP,151,1447,54,2,\"[1446, 1447]\"\r\nNCP-13.zip,2,NCP,370,1891,128,2,\"[1891, 1892]\"\r\nNormal-17.zip,0,Normal,2168,623,89,1,[623]\r\nNCP-29.zip,2,NCP,880,2415,312,1,[2415]\r\nNCP-12.zip,2,NCP,338,1825,63,2,\"[1824, 1825]\"\r\nNormal-23.zip,0,Normal,2634,144,37,1,[144]\r\nNCP-14.zip,2,NCP,396,1942,170,2,\"[1942, 1943]\"\r\nNCP-16.zip,2,NCP,439,2032,162,2,\"[2032, 2033]\"\r\nNCP-8.zip,2,NCP,266,1678,137,2,\"[1678, 1679]\"\r\nCP-11.zip,1,CP,1423,3911,204,3,\"[3911, 3912, 3913]\"\r\nCP-11.zip,1,CP,1454,3984,53,3,\"[3982, 3983, 3984]\"\r\nCP-28.zip,1,CP,3792,5736,20,1,[5736]\r\nNormal-1.zip,0,Normal,1727,1011,66,4,\"[1009, 1010, 1011, 1012]\"\r\nNormal-19.zip,0,Normal,2234,689,89,1,[689]\r\nNCP-13.zip,2,NCP,35,1203,58,2,\"[1202, 1203]\"\r\nNCP-18.zip,2,NCP,51,1236,59,2,\"[1235, 1236]\"\r\nNCP-2.zip,2,NCP,113,1368,58,2,\"[1367, 1368]\"\r\nNormal-2.zip,0,Normal,1757,1107,68,4,\"[1105, 1106, 1107, 1108]\"\r\nNCP-12.zip,2,NCP,319,1785,158,2,\"[1785, 1787]\"\r\nNormal-22.zip,0,Normal,2322,777,88,1,[777]\r\nCP-21.zip,1,CP,584,2946,116,1,[2946]\r\nCP-9.zip,1,CP,1365,3781,60,3,\"[3779, 3780, 3781]\"\r\nNCP-12.zip,2,NCP,322,1792,120,2,\"[1792, 1793]\"\r\nNormal-2.zip,0,Normal,1763,1140,75,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-21.zip,2,NCP,59,1252,52,2,\"[1251, 1252]\"\r\nNCP-5.zip,2,NCP,170,1485,59,2,\"[1484, 1485]\"\r\nNCP-21.zip,2,NCP,72,1276,129,2,\"[1276, 1277]\"\r\nNCP-22.zip,2,NCP,887,2425,38,1,[2425]\r\nCP-2.zip,1,CP,1117,3335,155,1,[3335]\r\nNormal-2.zip,0,Normal,1763,1134,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nCP-18.zip,1,CP,1778,3550,64,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nCP-23.zip,1,CP,664,3026,78,1,[3026]\r\nCP-23.zip,1,CP,668,3030,102,1,[3030]\r\nNCP-13.zip,2,NCP,355,1862,53,2,\"[1861, 1862]\"\r\nNCP-13.zip,2,NCP,358,1867,160,2,\"[1867, 1868]\"\r\nCP-14.zip,1,CP,1550,4218,64,2,\"[4217, 4218]\"\r\nCP-26.zip,1,CP,3729,5667,207,3,\"[5665, 5666, 5667]\"\r\nCP-21.zip,1,CP,603,2965,88,1,[2965]\r\nNCP-13.zip,2,NCP,370,1892,54,2,\"[1891, 1892]\"\r\nNCP-13.zip,2,NCP,35,1202,139,2,\"[1202, 1203]\"\r\nCP-3.zip,1,CP,1155,3373,171,1,[3373]\r\nNormal-10.zip,0,Normal,1927,382,99,1,[382]\r\nCP-15.zip,1,CP,1574,4262,26,1,[4262]\r\nCP-13.zip,1,CP,1498,4096,60,2,\"[4095, 4096]\"\r\nNCP-6.zip,2,NCP,205,1555,53,2,\"[1554, 1555]\"\r\nNCP-11.zip,2,NCP,301,1748,147,2,\"[1748, 1749]\"\r\nNCP-11.zip,2,NCP,303,1752,139,2,\"[1752, 1753]\"\r\nCP-12.zip,1,CP,1468,4017,54,3,\"[4015, 4016, 4017]\"\r\nNormal-14.zip,0,Normal,2081,536,93,1,[536]\r\nNormal-2.zip,0,Normal,1763,1141,75,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-22.zip,2,NCP,859,2380,299,2,\"[2380, 2381]\"\r\nNormal-26.zip,0,Normal,3885,5398,63,1,[5398]\r\nCP-13.zip,1,CP,1505,4109,54,3,\"[4108, 4109, 4110]\"\r\nNCP-1.zip,2,NCP,103,1343,150,2,\"[1343, 1344]\"\r\nNCP-14.zip,2,NCP,396,1943,71,2,\"[1942, 1943]\"\r\nNCP-22.zip,2,NCP,871,2402,293,2,\"[2401, 2402]\"\r\nNormal-10.zip,0,Normal,1951,406,105,1,[406]\r\nCP-11.zip,1,CP,1434,3936,63,2,\"[3936, 3937]\"\r\nCP-26.zip,1,CP,3724,5659,51,1,[5659]\r\nCP-12.zip,1,CP,1471,4022,56,2,\"[4022, 4023]\"\r\nNormal-21.zip,0,Normal,2304,759,110,1,[759]\r\nCP-28.zip,1,CP,3777,5721,26,1,[5721]\r\nNCP-28.zip,2,NCP,837,2352,57,1,[2352]\r\nNormal-2.zip,0,Normal,1763,1133,72,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-8.zip,0,Normal,1873,328,104,1,[328]\r\nCP-12.zip,1,CP,1458,3992,165,3,\"[3992, 3993, 3994]\"\r\nNCP-7.zip,2,NCP,230,1604,139,2,\"[1604, 1605]\"\r\nCP-30.zip,1,CP,4042,5591,37,1,[5591]\r\nNormal-4.zip,0,Normal,774,209,134,1,[209]\r\nNormal-19.zip,0,Normal,2228,683,85,1,[683]\r\nNormal-18.zip,0,Normal,2206,661,77,1,[661]\r\nCP-17.zip,1,CP,1628,4316,23,1,[4316]\r\nNormal-11.zip,0,Normal,1969,424,90,1,[424]\r\nNormal-20.zip,0,Normal,2259,714,97,1,[714]\r\nCP-17.zip,1,CP,1640,4328,25,1,[4328]\r\nNCP-8.zip,2,NCP,254,1654,139,2,\"[1654, 1655]\"\r\nNormal-16.zip,0,Normal,2140,595,88,1,[595]\r\nCP-6.zip,1,CP,1249,3467,144,1,[3467]\r\nNCP-23.zip,2,NCP,92,1321,37,2,\"[1320, 1321]\"\r\nCP-18.zip,1,CP,1657,4345,24,1,[4345]\r\nNCP-17.zip,2,NCP,484,2124,58,2,\"[2123, 2124]\"\r\nNormal-2.zip,0,Normal,1743,1057,73,2,\"[1056, 1057]\"\r\nCP-18.zip,1,CP,1778,3545,66,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-30.zip,2,NCP,966,2509,279,1,[2509]\r\nCP-9.zip,1,CP,1376,3807,60,2,\"[3806, 3807]\"\r\nNormal-1.zip,0,Normal,1716,987,71,2,\"[987, 988]\"\r\nCP-7.zip,1,CP,1302,3602,42,4,\"[3602, 3603, 3604, 3605]\"\r\nNCP-18.zip,2,NCP,50,1233,141,2,\"[1233, 1234]\"\r\nCP-32.zip,1,CP,1781,3572,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-5.zip,2,NCP,192,1528,135,2,\"[1528, 1529]\"\r\nNCP-7.zip,2,NCP,2489,2646,40,1,[2646]\r\nCP-11.zip,1,CP,1434,3937,63,2,\"[3936, 3937]\"\r\nCP-23.zip,1,CP,645,3007,124,1,[3007]\r\nNormal-10.zip,0,Normal,1941,396,91,1,[396]\r\nNormal-12.zip,0,Normal,2001,456,86,1,[456]\r\nNormal-3.zip,0,Normal,761,196,120,1,[196]\r\nCP-7.zip,1,CP,1265,3483,166,1,[3483]\r\nNCP-3.zip,2,NCP,1287,2728,66,1,[2728]\r\nNCP-28.zip,2,NCP,835,2350,52,2,\"[2349, 2350]\"\r\nNCP-19.zip,2,NCP,543,2243,128,2,\"[2243, 2244]\"\r\nCP-21.zip,1,CP,4,3507,259,4,\"[3505, 3506, 3507, 3508]\"\r\nCP-17.zip,1,CP,1633,4321,26,1,[4321]\r\nNCP-20.zip,2,NCP,565,2289,57,2,\"[2288, 2289]\"\r\nNCP-22.zip,2,NCP,878,2412,46,2,\"[2412, 2413]\"\r\nCP-14.zip,1,CP,1520,4144,57,3,\"[4143, 4144, 4145]\"\r\nNormal-23.zip,0,Normal,2620,130,36,1,[130]\r\nNCP-23.zip,2,NCP,958,2501,133,1,[2501]\r\nCP-13.zip,1,CP,1513,4128,60,2,\"[4127, 4128]\"\r\nNCP-24.zip,2,NCP,98,1332,139,2,\"[1332, 1333]\"\r\nCP-9.zip,1,CP,1375,3804,60,2,\"[3804, 3805]\"\r\nNCP-2.zip,2,NCP,1051,2625,88,2,\"[2625, 2626]\"\r\nNCP-31.zip,2,NCP,999,2556,41,1,[2556]\r\nCP-18.zip,1,CP,1781,3575,78,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,278,1703,57,2,\"[1702, 1703]\"\r\nNCP-12.zip,2,NCP,313,1773,147,2,\"[1773, 1774]\"\r\nNCP-14.zip,2,NCP,381,1915,60,2,\"[1914, 1915]\"\r\nNCP-11.zip,2,NCP,295,1735,236,2,\"[1735, 1736]\"\r\nCP-11.zip,1,CP,1440,3948,196,3,\"[3948, 3949, 3950]\"\r\nCP-19.zip,1,CP,1795,3597,41,2,\"[3596, 3597]\"\r\nCP-12.zip,1,CP,1467,4013,60,2,\"[4013, 4014]\"\r\nNCP-12.zip,2,NCP,322,1793,51,2,\"[1792, 1793]\"\r\nCP-9.zip,1,CP,1353,3750,59,3,\"[3748, 3749, 3750]\"\r\nCP-19.zip,1,CP,1784,3591,50,4,\"[3590, 3591, 3592, 3593]\"\r\nNCP-9.zip,2,NCP,2699,2665,51,1,[2665]\r\nNCP-12.zip,2,NCP,331,1810,158,2,\"[1810, 1811]\"\r\nNCP-12.zip,2,NCP,334,1817,59,2,\"[1816, 1817]\"\r\nNCP-1.zip,2,NCP,1009,2571,29,2,\"[2570, 2571]\"\r\nCP-30.zip,1,CP,4041,5590,31,1,[5590]\r\nCP-24.zip,1,CP,705,3067,168,1,[3067]\r\nNormal-24.zip,0,Normal,2665,175,33,1,[175]\r\nNCP-12.zip,2,NCP,332,1813,70,2,\"[1812, 1813]\"\r\nCP-11.zip,1,CP,1444,3962,58,3,\"[3960, 3961, 3962]\"\r\nCP-22.zip,1,CP,614,2976,100,1,[2976]\r\nNormal-23.zip,0,Normal,2630,140,38,1,[140]\r\nNormal-8.zip,0,Normal,1876,331,97,1,[331]\r\nNCP-1.zip,2,NCP,1001,2559,141,1,[2559]\r\nNCP-22.zip,2,NCP,845,2361,148,4,\"[2360, 2361, 2362, 2363]\"\r\nCP-26.zip,1,CP,3646,5606,36,1,[5606]\r\nNormal-9.zip,0,Normal,1907,362,92,1,[362]\r\nNormal-1.zip,0,Normal,1672,800,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-12.zip,2,NCP,333,1815,68,2,\"[1814, 1815]\"\r\nCP-17.zip,1,CP,1634,4322,23,1,[4322]\r\nNormal-12.zip,0,Normal,2009,464,93,1,[464]\r\nCP-26.zip,1,CP,3731,5670,215,1,[5670]\r\nNormal-25.zip,0,Normal,3714,5344,22,1,[5344]\r\nNormal-19.zip,0,Normal,2231,686,85,1,[686]\r\nNCP-23.zip,2,NCP,940,2483,22,1,[2483]\r\nNormal-25.zip,0,Normal,3851,5363,201,1,[5363]\r\nNCP-6.zip,2,NCP,209,1562,139,2,\"[1562, 1563]\"\r\nNCP-13.zip,2,NCP,347,1846,53,2,\"[1845, 1846]\"\r\nNCP-11.zip,2,NCP,312,1772,62,2,\"[1771, 1772]\"\r\nCP-5.zip,1,CP,1196,3414,186,1,[3414]\r\nNCP-21.zip,2,NCP,74,1282,54,2,\"[1281, 1282]\"\r\nCP-23.zip,1,CP,662,3024,114,1,[3024]\r\nNCP-7.zip,2,NCP,23,1177,151,2,\"[1177, 1178]\"\r\nCP-16.zip,1,CP,1591,4279,23,1,[4279]\r\nNormal-12.zip,0,Normal,1995,450,95,1,[450]\r\nNormal-20.zip,0,Normal,2264,719,82,1,[719]\r\nNCP-30.zip,2,NCP,948,2491,365,1,[2491]\r\nNormal-12.zip,0,Normal,1998,453,99,1,[453]\r\nNCP-19.zip,2,NCP,522,2201,58,2,\"[2200, 2201]\"\r\nCP-13.zip,1,CP,1510,4121,60,2,\"[4121, 4122]\"\r\nNCP-15.zip,2,NCP,406,1962,61,2,\"[1961, 1962]\"\r\nNCP-4.zip,2,NCP,162,1468,148,2,\"[1468, 1469]\"\r\nCP-11.zip,1,CP,1431,3930,61,2,\"[3930, 3931]\"\r\nCP-15.zip,1,CP,1569,4257,20,1,[4257]\r\nCP-9.zip,1,CP,1379,3813,52,2,\"[3812, 3813]\"\r\nNCP-30.zip,2,NCP,981,2525,40,2,\"[2525, 2526]\"\r\nNCP-8.zip,2,NCP,2679,2650,42,1,[2650]\r\nNCP-25.zip,2,NCP,3951,5465,43,1,[5465]\r\nNCP-7.zip,2,NCP,2460,2684,36,1,[2684]\r\nCP-25.zip,1,CP,734,3096,106,1,[3096]\r\nNCP-6.zip,2,NCP,209,1563,58,2,\"[1562, 1563]\"\r\nNormal-22.zip,0,Normal,2593,103,38,1,[103]\r\nNCP-16.zip,2,NCP,438,2029,149,2,\"[2029, 2030]\"\r\nCP-7.zip,1,CP,1304,3638,43,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNormal-8.zip,0,Normal,1885,340,101,1,[340]\r\nNCP-17.zip,2,NCP,484,2123,137,2,\"[2123, 2124]\"\r\nNCP-20.zip,2,NCP,565,2288,135,2,\"[2288, 2289]\"\r\nNCP-5.zip,2,NCP,185,1515,51,2,\"[1514, 1515]\"\r\nNCP-29.zip,2,NCP,877,2411,65,1,[2411]\r\nNCP-6.zip,2,NCP,216,1577,58,2,\"[1576, 1577]\"\r\nNormal-24.zip,0,Normal,2658,168,37,1,[168]\r\nCP-28.zip,1,CP,3779,5723,26,1,[5723]\r\nNormal-15.zip,0,Normal,2090,545,83,1,[545]\r\nNormal-2.zip,0,Normal,1750,1077,69,3,\"[1074, 1077, 1078]\"\r\nNCP-24.zip,2,NCP,98,1333,58,2,\"[1332, 1333]\"\r\nCP-5.zip,1,CP,1199,3417,180,1,[3417]\r\nCP-3.zip,1,CP,1146,3364,161,1,[3364]\r\nCP-11.zip,1,CP,1449,3971,50,2,\"[3971, 3972]\"\r\nNormal-3.zip,0,Normal,1767,1154,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-22.zip,0,Normal,2585,95,41,1,[95]\r\nCP-29.zip,1,CP,3816,5760,29,1,[5760]\r\nNCP-21.zip,2,NCP,62,1258,60,2,\"[1257, 1258]\"\r\nNCP-2.zip,2,NCP,1056,2632,473,1,[2632]\r\nNCP-19.zip,2,NCP,525,2206,144,2,\"[2206, 2207]\"\r\nNormal-22.zip,0,Normal,2600,110,41,1,[110]\r\nCP-3.zip,1,CP,1161,3379,310,1,[3379]\r\nNCP-12.zip,2,NCP,316,1779,139,2,\"[1779, 1780]\"\r\nNCP-28.zip,2,NCP,868,2396,200,2,\"[2395, 2396]\"\r\nCP-7.zip,1,CP,1301,3600,52,4,\"[3598, 3599, 3600, 3601]\"\r\nNCP-11.zip,2,NCP,301,1749,62,2,\"[1748, 1749]\"\r\nNormal-9.zip,0,Normal,1917,372,96,1,[372]\r\nNCP-20.zip,2,NCP,571,2300,163,2,\"[2300, 2301]\"\r\nNormal-3.zip,0,Normal,1767,1152,68,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-1.zip,0,Normal,1716,988,71,2,\"[987, 988]\"\r\nNCP-28.zip,2,NCP,842,2357,42,1,[2357]\r\nNCP-27.zip,2,NCP,309,1765,162,2,\"[1766, 1765]\"\r\nCP-12.zip,1,CP,1479,4040,60,3,\"[4039, 4040, 4041]\"\r\nNCP-6.zip,2,NCP,22,1175,163,2,\"[1175, 1176]\"\r\nNCP-28.zip,2,NCP,868,2395,51,2,\"[2395, 2396]\"\r\nCP-14.zip,1,CP,1532,4171,50,2,\"[4171, 4172]\"\r\nNormal-11.zip,0,Normal,1984,439,86,1,[439]\r\nNormal-24.zip,0,Normal,2643,153,39,1,[153]\r\nCP-20.zip,1,CP,2765,3296,42,1,[3296]\r\nNormal-2.zip,0,Normal,1763,1132,72,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-2.zip,2,NCP,109,1356,60,2,\"[1355, 1356]\"\r\nNCP-7.zip,2,NCP,241,1628,55,2,\"[1627, 1628]\"\r\nNormal-22.zip,0,Normal,2587,97,44,1,[97]\r\nCP-20.zip,1,CP,2753,3284,37,1,[3284]\r\nNormal-1.zip,0,Normal,1670,790,63,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-15.zip,0,Normal,2103,558,88,1,[558]\r\nCP-13.zip,1,CP,1503,4104,64,3,\"[4104, 4105, 4106]\"\r\nNormal-21.zip,0,Normal,2313,768,94,1,[768]\r\nCP-9.zip,1,CP,1382,3818,200,3,\"[3818, 3819, 3820]\"\r\nNormal-2.zip,0,Normal,1756,1102,64,4,\"[1101, 1102, 1103, 1104]\"\r\nNCP-12.zip,2,NCP,334,1816,140,2,\"[1816, 1817]\"\r\nCP-13.zip,1,CP,1518,4140,67,3,\"[4138, 4139, 4140]\"\r\nCP-13.zip,1,CP,1492,4077,139,3,\"[4077, 4078, 4079]\"\r\nNormal-11.zip,0,Normal,1982,437,99,1,[437]\r\nNCP-6.zip,2,NCP,213,1570,159,2,\"[1570, 1571]\"\r\nCP-18.zip,1,CP,1779,3551,59,2,\"[3551, 3552]\"\r\nNCP-12.zip,2,NCP,321,1790,122,2,\"[1790, 1791]\"\r\nNCP-4.zip,2,NCP,159,1462,144,2,\"[1462, 1463]\"\r\nCP-24.zip,1,CP,684,3046,161,1,[3046]\r\nCP-29.zip,1,CP,3828,5772,26,1,[5772]\r\nCP-12.zip,1,CP,1462,4004,51,3,\"[4002, 4003, 4004]\"\r\nNormal-1.zip,0,Normal,1707,969,65,2,\"[969, 970]\"\r\nCP-24.zip,1,CP,685,3047,168,1,[3047]\r\nNCP-16.zip,2,NCP,444,2043,61,2,\"[2042, 2043]\"\r\nCP-19.zip,1,CP,2430,2892,106,2,\"[2891, 2892]\"\r\nNormal-25.zip,0,Normal,3857,5369,222,1,[5369]\r\nCP-28.zip,1,CP,3774,5718,20,1,[5718]\r\nCP-21.zip,1,CP,591,2953,124,1,[2953]\r\nNormal-1.zip,0,Normal,1670,792,66,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNCP-14.zip,2,NCP,387,1925,54,2,\"[1924, 1925]\"\r\nCP-10.zip,1,CP,14,3515,115,1,[3515]\r\nNCP-4.zip,2,NCP,16,1164,113,2,\"[1164, 1165]\"\r\nNormal-17.zip,0,Normal,2162,617,96,1,[617]\r\nCP-13.zip,1,CP,1513,4127,60,2,\"[4127, 4128]\"\r\nNCP-11.zip,2,NCP,300,1746,139,2,\"[1746, 1747]\"\r\nNCP-21.zip,2,NCP,577,2312,61,2,\"[2311, 2312]\"\r\nNormal-8.zip,0,Normal,1875,330,93,1,[330]\r\nNormal-27.zip,0,Normal,3906,5439,62,1,[5439]\r\nNCP-7.zip,2,NCP,249,1645,58,2,\"[1644, 1645]\"\r\nNCP-20.zip,2,NCP,552,2262,61,2,\"[2261, 2262]\"\r\nNCP-9.zip,2,NCP,2701,2667,56,1,[2667]\r\nNCP-15.zip,2,NCP,417,1987,139,2,\"[1987, 1988]\"\r\nNCP-9.zip,2,NCP,2705,2671,56,1,[2671]\r\nNormal-3.zip,0,Normal,1767,1160,71,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-15.zip,1,CP,1585,4273,23,1,[4273]\r\nCP-27.zip,1,CP,3742,5686,17,1,[5686]\r\nCP-14.zip,1,CP,1521,4146,57,2,\"[4146, 4147]\"\r\nNormal-1.zip,0,Normal,1703,960,70,2,\"[959, 960]\"\r\nCP-21.zip,1,CP,6,3510,36,1,[3510]\r\nNCP-19.zip,2,NCP,54,1242,62,2,\"[1241, 1242]\"\r\nNCP-5.zip,2,NCP,17,1166,143,2,\"[1166, 1167]\"\r\nNCP-15.zip,2,NCP,413,1977,47,4,\"[1975, 1976, 1977, 1979]\"\r\nNCP-22.zip,2,NCP,845,2360,53,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-2.zip,2,NCP,120,1381,139,2,\"[1381, 1382]\"\r\nCP-5.zip,1,CP,1207,3425,189,1,[3425]\r\nCP-27.zip,1,CP,3758,5702,23,1,[5702]\r\nCP-16.zip,1,CP,1592,4280,25,1,[4280]\r\nCP-21.zip,1,CP,4,3506,275,4,\"[3505, 3506, 3507, 3508]\"\r\nNCP-21.zip,2,NCP,72,1277,55,2,\"[1276, 1277]\"\r\nNCP-17.zip,2,NCP,475,2105,156,2,\"[2105, 2106]\"\r\nNCP-13.zip,2,NCP,358,1868,67,2,\"[1867, 1868]\"\r\nNormal-3.zip,0,Normal,764,199,130,1,[199]\r\nCP-9.zip,1,CP,1358,3763,63,3,\"[3761, 3762, 3763]\"\r\nNCP-4.zip,2,NCP,169,1483,56,2,\"[1482, 1483]\"\r\nNormal-1.zip,0,Normal,1707,970,65,2,\"[969, 970]\"\r\nNCP-18.zip,2,NCP,502,2160,140,2,\"[2160, 2161]\"\r\nCP-18.zip,1,CP,1781,3568,67,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,2727,2683,44,1,[2683]\r\nCP-26.zip,1,CP,3719,5651,277,3,\"[5649, 5650, 5651]\"\r\nCP-11.zip,1,CP,1422,3910,58,3,\"[3908, 3909, 3910]\"\r\nNCP-4.zip,2,NCP,168,1480,139,2,\"[1480, 1481]\"\r\nCP-8.zip,1,CP,1329,3695,89,3,\"[3695, 3696, 3697]\"\r\nCP-12.zip,1,CP,1463,4005,49,2,\"[4005, 4006]\"\r\nNormal-27.zip,0,Normal,3915,5458,70,1,[5458]\r\nNormal-18.zip,0,Normal,2209,664,82,1,[664]\r\nCP-13.zip,1,CP,1492,4079,58,3,\"[4077, 4078, 4079]\"\r\nCP-30.zip,1,CP,3830,5774,29,1,[5774]\r\nCP-8.zip,1,CP,1329,3696,45,3,\"[3695, 3696, 3697]\"\r\nNormal-16.zip,0,Normal,2139,594,87,1,[594]\r\nNCP-14.zip,2,NCP,393,1936,149,2,\"[1936, 1937]\"\r\nCP-21.zip,1,CP,4,3508,290,4,\"[3505, 3506, 3507, 3508]\"\r\nNormal-2.zip,0,Normal,1737,1037,79,4,\"[1037, 1038, 1039, 1040]\"\r\nNCP-25.zip,2,NCP,3708,5535,59,1,[5535]\r\nCP-7.zip,1,CP,1301,3601,276,4,\"[3598, 3599, 3600, 3601]\"\r\nNCP-7.zip,2,NCP,249,1644,139,2,\"[1644, 1645]\"\r\nNCP-12.zip,2,NCP,339,1827,51,2,\"[1826, 1827]\"\r\nNCP-2.zip,2,NCP,1275,2716,68,1,[2716]\r\nNCP-13.zip,2,NCP,354,1860,73,2,\"[1859, 1860]\"\r\nNormal-2.zip,0,Normal,1757,1105,71,4,\"[1105, 1106, 1107, 1108]\"\r\nNCP-27.zip,2,NCP,1016,2582,108,3,\"[2580, 2581, 2582]\"\r\nCP-18.zip,1,CP,1777,3541,62,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-1.zip,2,NCP,1008,2569,387,1,[2569]\r\nCP-7.zip,1,CP,1315,3665,59,2,\"[3665, 3666]\"\r\nCP-27.zip,1,CP,3737,5681,17,1,[5681]\r\nNormal-9.zip,0,Normal,1914,369,88,1,[369]\r\nNormal-1.zip,0,Normal,1672,802,75,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-8.zip,2,NCP,25,1181,129,2,\"[1181, 1183]\"\r\nCP-19.zip,1,CP,1789,3207,64,4,\"[3204, 3205, 3206, 3207]\"\r\nCP-11.zip,1,CP,1444,3960,139,3,\"[3960, 3961, 3962]\"\r\nNCP-4.zip,2,NCP,145,1435,58,2,\"[1434, 1435]\"\r\nCP-23.zip,1,CP,659,3021,594,1,[3021]\r\nNormal-25.zip,0,Normal,3716,5346,31,1,[5346]\r\nNormal-10.zip,0,Normal,1936,391,82,1,[391]\r\nNCP-22.zip,2,NCP,821,2331,30,1,[2331]\r\nCP-13.zip,1,CP,1505,4108,54,3,\"[4108, 4109, 4110]\"\r\nNCP-15.zip,2,NCP,411,1972,62,2,\"[1971, 1972]\"\r\nCP-7.zip,1,CP,1304,3633,18,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-27.zip,2,NCP,1048,2621,44,2,\"[2620, 2621]\"\r\nCP-21.zip,1,CP,595,2957,306,1,[2957]\r\nNCP-22.zip,2,NCP,861,2384,197,1,[2384]\r\nCP-7.zip,1,CP,1302,3604,39,4,\"[3602, 3603, 3604, 3605]\"\r\nNCP-17.zip,2,NCP,472,2099,151,2,\"[2099, 2100]\"\r\nNCP-8.zip,2,NCP,26,1185,36,2,\"[1184, 1185]\"\r\nNormal-27.zip,0,Normal,3903,5435,75,1,[5435]\r\nNormal-25.zip,0,Normal,3840,5352,210,1,[5352]\r\nNCP-8.zip,2,NCP,266,1679,58,2,\"[1678, 1679]\"\r\nNormal-16.zip,0,Normal,2120,575,84,1,[575]\r\nNormal-16.zip,0,Normal,2128,583,76,1,[583]\r\nCP-11.zip,1,CP,1449,3972,50,2,\"[3971, 3972]\"\r\nCP-7.zip,1,CP,1304,3636,47,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNormal-22.zip,0,Normal,2597,107,41,1,[107]\r\nNCP-10.zip,2,NCP,2726,2682,50,1,[2682]\r\nNormal-7.zip,0,Normal,1849,304,87,1,[304]\r\nNormal-13.zip,0,Normal,2040,495,95,1,[495]\r\nNormal-16.zip,0,Normal,2125,580,83,1,[580]\r\nCP-25.zip,1,CP,740,3102,193,1,[3102]\r\nNCP-22.zip,2,NCP,871,2401,281,2,\"[2401, 2402]\"\r\nNCP-9.zip,2,NCP,2704,2670,56,1,[2670]\r\nNCP-12.zip,2,NCP,33,1198,147,2,\"[1198, 1199]\"\r\nCP-18.zip,1,CP,1663,4351,26,1,[4351]\r\nNormal-3.zip,0,Normal,1767,1157,28,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-2.zip,0,Normal,1735,1031,76,2,\"[1030, 1031]\"\r\nNormal-10.zip,0,Normal,1938,393,66,1,[393]\r\nNCP-24.zip,2,NCP,975,2518,484,1,[2518]\r\nCP-18.zip,1,CP,1774,3523,65,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-14.zip,2,NCP,381,1914,143,2,\"[1914, 1915]\"\r\nNCP-12.zip,2,NCP,33,1199,62,2,\"[1198, 1199]\"\r\nNCP-13.zip,2,NCP,352,1855,138,2,\"[1855, 1856]\"\r\nNCP-12.zip,2,NCP,333,1814,162,2,\"[1814, 1815]\"\r\nNCP-23.zip,2,NCP,904,2446,667,1,[2446]\r\nNCP-24.zip,2,NCP,985,2531,508,1,[2531]\r\nNCP-6.zip,2,NCP,228,1600,161,2,\"[1600, 1601]\"\r\nNCP-15.zip,2,NCP,414,1980,121,2,\"[1980, 1981]\"\r\nNCP-1.zip,2,NCP,103,1344,63,2,\"[1343, 1344]\"\r\nNormal-3.zip,0,Normal,1767,1155,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-8.zip,1,CP,1349,3744,58,3,\"[3742, 3743, 3744]\"\r\nNCP-8.zip,2,NCP,261,1669,65,2,\"[1668, 1669]\"\r\nNormal-21.zip,0,Normal,2300,755,98,1,[755]\r\nNCP-13.zip,2,NCP,354,1859,177,2,\"[1859, 1860]\"\r\nCP-23.zip,1,CP,665,3027,116,1,[3027]\r\nCP-15.zip,1,CP,1561,4242,49,2,\"[4241, 4242]\"\r\nCP-9.zip,1,CP,1376,3806,60,2,\"[3806, 3807]\"\r\nNormal-1.zip,0,Normal,1727,1012,66,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-28.zip,2,NCP,835,2349,46,2,\"[2349, 2350]\"\r\nCP-8.zip,1,CP,1349,3742,142,3,\"[3742, 3743, 3744]\"\r\nNormal-20.zip,0,Normal,2277,732,95,1,[732]\r\nNCP-28.zip,2,NCP,876,2409,52,1,[2409]\r\nNormal-15.zip,0,Normal,2101,556,85,1,[556]\r\nCP-11.zip,1,CP,1444,3961,58,3,\"[3960, 3961, 3962]\"\r\nNCP-2.zip,2,NCP,1276,2717,61,1,[2717]\r\nNormal-3.zip,0,Normal,1767,1153,68,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-13.zip,0,Normal,2051,506,86,1,[506]\r\nNormal-2.zip,0,Normal,1734,1029,66,2,\"[1028, 1029]\"\r\nNormal-26.zip,0,Normal,3871,5383,22,1,[5383]\r\nNCP-1.zip,2,NCP,1009,2570,39,2,\"[2570, 2571]\"\r\nNormal-2.zip,0,Normal,1763,1139,65,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-13.zip,2,NCP,359,1870,61,2,\"[1869, 1870]\"\r\nNormal-19.zip,0,Normal,2220,675,78,1,[675]\r\nCP-9.zip,1,CP,1382,3819,60,3,\"[3818, 3819, 3820]\"\r\nCP-20.zip,1,CP,2752,3283,26,1,[3283]\r\nCP-13.zip,1,CP,1510,4122,60,2,\"[4121, 4122]\"\r\nNCP-16.zip,2,NCP,440,2034,125,2,\"[2034, 2035]\"\r\nCP-12.zip,1,CP,1458,3994,69,3,\"[3992, 3993, 3994]\"\r\nNCP-11.zip,2,NCP,284,1714,58,2,\"[1713, 1714]\"\r\nNCP-11.zip,2,NCP,303,1753,58,2,\"[1752, 1753]\"\r\nNCP-6.zip,2,NCP,205,1554,126,2,\"[1554, 1555]\"\r\nCP-14.zip,1,CP,1535,4179,53,2,\"[4178, 4179]\"\r\nNormal-27.zip,0,Normal,3910,5446,66,2,\"[5445, 5446]\"\r\nNormal-3.zip,0,Normal,742,177,107,1,[177]\r\nNormal-22.zip,0,Normal,2589,99,37,1,[99]\r\nNCP-22.zip,2,NCP,88,1310,71,2,\"[1309, 1310]\"\r\nCP-14.zip,1,CP,1521,4147,57,2,\"[4146, 4147]\"\r\nCP-26.zip,1,CP,3729,5666,179,3,\"[5665, 5666, 5667]\"\r\nCP-28.zip,1,CP,3793,5737,29,1,[5737]\r\nNormal-3.zip,0,Normal,767,202,358,1,[202]\r\nNCP-5.zip,2,NCP,198,1540,144,2,\"[1540, 1541]\"\r\nCP-27.zip,1,CP,3738,5682,19,1,[5682]\r\nCP-27.zip,1,CP,3750,5694,28,1,[5694]\r\nCP-10.zip,1,CP,1416,3898,58,2,\"[3897, 3898]\"\r\nCP-8.zip,1,CP,1322,3680,56,2,\"[3680, 3681]\"\r\nNormal-23.zip,0,Normal,2607,117,38,1,[117]\r\nNCP-3.zip,2,NCP,138,1420,124,2,\"[1420, 1421]\"\r\nCP-11.zip,1,CP,1425,3916,185,3,\"[3916, 3917, 3918]\"\r\nCP-15.zip,1,CP,1581,4269,19,1,[4269]\r\nCP-24.zip,1,CP,706,3068,124,1,[3068]\r\nCP-18.zip,1,CP,1666,4354,23,1,[4354]\r\nNCP-4.zip,2,NCP,161,1466,135,2,\"[1466, 1467]\"\r\nNormal-7.zip,0,Normal,1847,302,102,1,[302]\r\nCP-19.zip,1,CP,1784,3593,69,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-21.zip,1,CP,605,2967,157,1,[2967]\r\nCP-5.zip,1,CP,12,3168,291,2,\"[3168, 3169]\"\r\nNormal-9.zip,0,Normal,1909,364,102,1,[364]\r\nNCP-22.zip,2,NCP,850,2369,52,1,[2369]\r\nCP-24.zip,1,CP,687,3049,135,1,[3049]\r\nNCP-1.zip,2,NCP,1033,2604,39,1,[2604]\r\nNormal-2.zip,0,Normal,1750,1074,65,3,\"[1074, 1077, 1078]\"\r\nCP-9.zip,1,CP,1365,3779,200,3,\"[3779, 3780, 3781]\"\r\nNCP-18.zip,2,NCP,502,2161,59,2,\"[2160, 2161]\"\r\nNormal-3.zip,0,Normal,1767,1162,76,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-1.zip,0,Normal,1672,799,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNormal-3.zip,0,Normal,747,182,100,1,[182]\r\nNCP-12.zip,2,NCP,319,1787,66,2,\"[1785, 1787]\"\r\nNCP-15.zip,2,NCP,405,1959,143,2,\"[1959, 1960]\"\r\nCP-18.zip,1,CP,1781,3574,64,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-21.zip,1,CP,600,2962,202,1,[2962]\r\nCP-12.zip,1,CP,1479,4039,60,3,\"[4039, 4040, 4041]\"\r\nNCP-27.zip,2,NCP,827,2340,173,1,[2340]\r\nNCP-24.zip,2,NCP,983,2528,67,1,[2528]\r\nCP-11.zip,1,CP,1424,3915,60,2,\"[3914, 3915]\"\r\nCP-2.zip,1,CP,1105,3323,220,1,[3323]\r\nCP-10.zip,1,CP,1412,3888,66,2,\"[3887, 3888]\"\r\nNCP-18.zip,2,NCP,495,2147,65,2,\"[2146, 2147]\"\r\nNCP-3.zip,2,NCP,134,1412,128,2,\"[1412, 1413]\"\r\nNormal-10.zip,0,Normal,1940,395,74,1,[395]\r\nNormal-17.zip,0,Normal,2163,618,89,1,[618]\r\nCP-9.zip,1,CP,1358,3761,249,3,\"[3761, 3762, 3763]\"\r\nCP-23.zip,1,CP,658,3020,273,1,[3020]\r\nNCP-12.zip,2,NCP,341,1830,129,3,\"[1830, 1832, 1834]\"\r\nCP-14.zip,1,CP,1520,4145,57,3,\"[4143, 4144, 4145]\"\r\nCP-19.zip,1,CP,1783,3588,62,2,\"[3588, 3589]\"\r\nNormal-3.zip,0,Normal,1767,1158,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-7.zip,1,CP,1301,3598,55,4,\"[3598, 3599, 3600, 3601]\"\r\nNormal-6.zip,0,Normal,1810,265,85,1,[265]\r\nNCP-12.zip,2,NCP,321,1791,51,2,\"[1790, 1791]\"\r\nNCP-12.zip,2,NCP,341,1834,54,3,\"[1830, 1832, 1834]\"\r\nCP-11.zip,1,CP,1435,3938,46,2,\"[3938, 3939]\"\r\nNormal-26.zip,0,Normal,3876,5388,30,1,[5388]\r\nNormal-16.zip,0,Normal,2123,578,90,1,[578]\r\nNormal-6.zip,0,Normal,1816,271,76,1,[271]\r\nNCP-26.zip,2,NCP,3992,5516,48,1,[5516]\r\nCP-18.zip,1,CP,1777,3544,66,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-5.zip,2,NCP,173,1491,58,2,\"[1490, 1491]\"\r\nNCP-11.zip,2,NCP,312,1771,148,2,\"[1771, 1772]\"\r\nNCP-19.zip,2,NCP,525,2207,61,2,\"[2206, 2207]\"\r\nNormal-3.zip,0,Normal,752,187,103,1,[187]\r\nNCP-7.zip,2,NCP,23,1178,63,2,\"[1177, 1178]\"\r\nCP-27.zip,1,CP,3762,5706,26,1,[5706]\r\nCP-18.zip,1,CP,1659,4347,26,1,[4347]\r\nCP-20.zip,1,CP,2667,3248,46,3,\"[3246, 3247, 3248]\"\r\nNormal-24.zip,0,Normal,2653,163,39,1,[163]\r\nNormal-4.zip,0,Normal,801,236,107,1,[236]\r\nNormal-20.zip,0,Normal,2272,727,79,1,[727]\r\nNCP-30.zip,2,NCP,988,2539,56,2,\"[2538, 2539]\"\r\nCP-18.zip,1,CP,1774,3527,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-17.zip,0,Normal,2165,620,95,1,[620]\r\nCP-12.zip,1,CP,1479,4041,60,3,\"[4039, 4040, 4041]\"\r\nNormal-21.zip,0,Normal,2299,754,90,1,[754]\r\nCP-22.zip,1,CP,637,2999,118,1,[2999]\r\nNCP-6.zip,2,NCP,217,1578,139,2,\"[1578, 1579]\"\r\nCP-30.zip,1,CP,3919,5544,73,4,\"[5543, 5544, 5545, 5546]\"\r\nCP-13.zip,1,CP,1511,4123,57,2,\"[4123, 4124]\"\r\nNormal-13.zip,0,Normal,2035,490,82,1,[490]\r\nCP-10.zip,1,CP,1417,3899,59,1,[3899]\r\nNCP-8.zip,2,NCP,261,1668,155,2,\"[1668, 1669]\"\r\nCP-20.zip,1,CP,2667,3247,92,3,\"[3246, 3247, 3248]\"\r\nCP-26.zip,1,CP,3636,5595,290,1,[5595]\r\nNormal-2.zip,0,Normal,1763,1136,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-9.zip,0,Normal,1913,368,88,1,[368]\r\nCP-9.zip,1,CP,1375,3805,58,2,\"[3804, 3805]\"\r\nCP-16.zip,1,CP,1606,4294,26,1,[4294]\r\nCP-18.zip,1,CP,1777,3543,68,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNormal-21.zip,0,Normal,2287,742,77,1,[742]\r\nCP-11.zip,1,CP,1422,3908,140,3,\"[3908, 3909, 3910]\"\r\nNCP-22.zip,2,NCP,859,2381,268,2,\"[2380, 2381]\"\r\nNormal-24.zip,0,Normal,2645,155,38,1,[155]\r\nCP-7.zip,1,CP,1302,3605,201,4,\"[3602, 3603, 3604, 3605]\"\r\nCP-23.zip,1,CP,646,3008,128,1,[3008]\r\nCP-11.zip,1,CP,1425,3918,49,3,\"[3916, 3917, 3918]\"\r\nCP-18.zip,1,CP,1781,3569,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-16.zip,2,NCP,436,2025,146,2,\"[2025, 2026]\"\r\nNCP-18.zip,2,NCP,503,2163,61,2,\"[2162, 2163]\"\r\nNCP-4.zip,2,NCP,167,1478,143,2,\"[1478, 1479]\"\r\nNormal-26.zip,0,Normal,3880,5392,32,1,[5392]\r\nNCP-25.zip,2,NCP,3709,5536,65,1,[5536]\r\nNormal-2.zip,0,Normal,1734,1028,66,2,\"[1028, 1029]\"\r\nNormal-17.zip,0,Normal,2169,624,92,1,[624]\r\nNCP-20.zip,2,NCP,546,2249,134,2,\"[2249, 2250]\"\r\nNCP-4.zip,2,NCP,146,1437,52,2,\"[1436, 1437]\"\r\nNCP-26.zip,2,NCP,3995,5493,47,1,[5493]\r\nCP-20.zip,1,CP,2763,3294,119,1,[3294]\r\nNCP-13.zip,2,NCP,349,1850,57,2,\"[1849, 1850]\"\r\nCP-26.zip,1,CP,3644,5604,284,1,[5604]\r\nCP-8.zip,1,CP,1327,3690,253,3,\"[3690, 3691, 3692]\"\r\nCP-20.zip,1,CP,2770,3301,38,1,[3301]\r\nCP-12.zip,1,CP,1471,4023,55,2,\"[4022, 4023]\"\r\nNormal-27.zip,0,Normal,3912,5453,68,1,[5453]\r\nNCP-23.zip,2,NCP,93,1322,157,2,\"[1322, 1323]\"\r\nCP-18.zip,1,CP,1781,3576,64,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-13.zip,2,NCP,347,1845,126,2,\"[1845, 1846]\"\r\nCP-20.zip,1,CP,2454,2935,120,2,\"[2935, 2936]\"\r\nNormal-1.zip,0,Normal,1670,788,58,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-8.zip,0,Normal,1880,335,83,1,[335]\r\nNormal-10.zip,0,Normal,1937,392,90,1,[392]\r\nCP-20.zip,1,CP,2768,3299,38,1,[3299]\r\nNormal-18.zip,0,Normal,2212,667,89,1,[667]\r\nNormal-1.zip,0,Normal,1677,826,65,4,\"[823, 824, 825, 826]\"\r\nCP-26.zip,1,CP,3721,5654,43,2,\"[5654, 5655]\"\r\nNCP-16.zip,2,NCP,439,2033,66,2,\"[2032, 2033]\"\r\nNormal-13.zip,0,Normal,2031,486,81,1,[486]\r\nCP-19.zip,1,CP,1783,3589,62,2,\"[3588, 3589]\"\r\nCP-2.zip,1,CP,1121,3339,156,1,[3339]\r\nCP-22.zip,1,CP,612,2974,84,1,[2974]\r\nNormal-26.zip,0,Normal,3867,5379,29,1,[5379]\r\nNCP-1.zip,2,NCP,102,1342,56,2,\"[1341, 1342]\"\r\nNCP-18.zip,2,NCP,493,2142,133,2,\"[2142, 2143]\"\r\nNCP-12.zip,2,NCP,339,1826,120,2,\"[1826, 1827]\"\r\nNormal-14.zip,0,Normal,2085,540,95,1,[540]\r\nNCP-27.zip,2,NCP,238,1622,57,2,\"[1621, 1622]\"\r\nNormal-2.zip,0,Normal,1737,1039,80,4,\"[1037, 1038, 1039, 1040]\"\r\nCP-30.zip,1,CP,3919,5546,70,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-1.zip,2,NCP,1012,2576,249,1,[2576]\r\nNCP-17.zip,2,NCP,463,2080,144,2,\"[2080, 2081]\"\r\nNCP-2.zip,2,NCP,127,1400,58,2,\"[1399, 1400]\"\r\nNormal-21.zip,0,Normal,2291,746,96,1,[746]\r\nNCP-8.zip,2,NCP,25,1183,45,2,\"[1181, 1183]\"\r\nCP-9.zip,1,CP,1382,3820,60,3,\"[3818, 3819, 3820]\"\r\nNCP-30.zip,2,NCP,967,2510,168,1,[2510]\r\nNormal-27.zip,0,Normal,3910,5445,66,2,\"[5445, 5446]\"\r\nNCP-4.zip,2,NCP,156,1456,138,2,\"[1456, 1457]\"\r\nCP-12.zip,1,CP,1464,4007,63,2,\"[4007, 4008]\"\r\nNCP-4.zip,2,NCP,162,1469,62,2,\"[1468, 1469]\"\r\nCP-13.zip,1,CP,1493,4081,53,3,\"[4080, 4081, 4082]\"\r\nCP-16.zip,1,CP,1602,4290,17,1,[4290]\r\nNCP-6.zip,2,NCP,216,1576,139,2,\"[1576, 1577]\"\r\nCP-25.zip,1,CP,723,3085,104,1,[3085]\r\nNCP-15.zip,2,NCP,411,1971,149,2,\"[1971, 1972]\"\r\nNCP-15.zip,2,NCP,425,2003,139,2,\"[2003, 2004]\"\r\nCP-24.zip,1,CP,688,3050,127,1,[3050]\r\nNormal-13.zip,0,Normal,2033,488,77,1,[488]\r\nNCP-23.zip,2,NCP,96,1329,61,2,\"[1328, 1329]\"\r\nNormal-5.zip,0,Normal,803,238,343,1,[238]\r\nCP-16.zip,1,CP,1595,4283,23,1,[4283]\r\nNCP-27.zip,2,NCP,238,1621,134,2,\"[1621, 1622]\"\r\nNCP-19.zip,2,NCP,529,2214,141,3,\"[2214, 2215, 2217]\"\r\nCP-25.zip,1,CP,710,3072,78,1,[3072]\r\nNormal-19.zip,0,Normal,2243,698,86,1,[698]\r\nCP-11.zip,1,CP,1440,3949,51,3,\"[3948, 3949, 3950]\"\r\nCP-7.zip,1,CP,1260,3478,235,1,[3478]\r\nNormal-1.zip,0,Normal,1672,797,76,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-26.zip,1,CP,3719,5649,52,3,\"[5649, 5650, 5651]\"\r\nNCP-23.zip,2,NCP,969,2512,68,1,[2512]\r\nNCP-5.zip,2,NCP,186,1516,113,2,\"[1516, 1517]\"\r\nCP-13.zip,1,CP,1507,4114,62,2,\"[4113, 4114]\"\r\nCP-19.zip,1,CP,2443,2916,310,3,\"[2915, 2916, 2917]\"\r\nCP-13.zip,1,CP,1503,4105,64,3,\"[4104, 4105, 4106]\"\r\nNormal-10.zip,0,Normal,1934,389,85,1,[389]\r\nCP-20.zip,1,CP,2760,3291,281,1,[3291]\r\nNormal-19.zip,0,Normal,2242,697,86,1,[697]\r\nNCP-22.zip,2,NCP,864,2388,214,2,\"[2388, 2389]\"\r\nNCP-14.zip,2,NCP,377,1906,147,2,\"[1906, 1907]\"\r\nCP-29.zip,1,CP,3818,5762,29,1,[5762]\r\nCP-23.zip,1,CP,676,3038,291,1,[3038]\r\nNCP-14.zip,2,NCP,389,1928,150,2,\"[1928, 1929]\"\r\nCP-27.zip,1,CP,3761,5705,16,1,[5705]\r\nNCP-27.zip,2,NCP,1016,2581,179,3,\"[2580, 2581, 2582]\"\r\nNormal-22.zip,0,Normal,2321,776,90,1,[776]\r\nCP-7.zip,1,CP,1304,3639,212,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-16.zip,2,NCP,438,2030,62,2,\"[2029, 2030]\"\r\nNCP-2.zip,2,NCP,107,1352,61,2,\"[1351, 1352]\"\r\nNCP-11.zip,2,NCP,295,1736,97,2,\"[1735, 1736]\"\r\nCP-2.zip,1,CP,1122,3340,229,1,[3340]\r\nNormal-25.zip,0,Normal,3849,5361,205,1,[5361]\r\nCP-4.zip,1,CP,1189,3407,284,1,[3407]\r\nNCP-4.zip,2,NCP,152,1449,61,2,\"[1448, 1449]\"\r\nNormal-13.zip,0,Normal,2044,499,103,1,[499]\r\nNormal-2.zip,0,Normal,1756,1103,65,4,\"[1101, 1102, 1103, 1104]\"\r\nCP-9.zip,1,CP,1379,3812,52,2,\"[3812, 3813]\"\r\nCP-20.zip,1,CP,2454,2936,116,2,\"[2935, 2936]\"\r\nNCP-3.zip,2,NCP,1294,2735,62,1,[2735]\r\nCP-6.zip,1,CP,1230,3448,37,1,[3448]\r\nNormal-5.zip,0,Normal,815,250,120,1,[250]\r\nCP-13.zip,1,CP,1488,4066,66,3,\"[4064, 4065, 4066]\"\r\nNCP-7.zip,2,NCP,241,1627,131,2,\"[1627, 1628]\"\r\nNCP-6.zip,2,NCP,220,1584,160,2,\"[1584, 1585]\"\r\nNCP-30.zip,2,NCP,982,2527,242,1,[2527]\r\nNormal-2.zip,0,Normal,1735,1030,76,2,\"[1030, 1031]\"\r\nCP-18.zip,1,CP,1781,3573,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-26.zip,1,CP,3642,5601,29,1,[5601]\r\nNCP-5.zip,2,NCP,186,1517,48,2,\"[1516, 1517]\"\r\nNormal-7.zip,0,Normal,1846,301,105,1,[301]\r\nCP-6.zip,1,CP,1252,3470,180,1,[3470]\r\nNCP-8.zip,2,NCP,254,1655,58,2,\"[1654, 1655]\"\r\nNCP-17.zip,2,NCP,460,2075,45,2,\"[2074, 2075]\"\r\nNCP-3.zip,2,NCP,138,1421,52,2,\"[1420, 1421]\"\r\nCP-29.zip,1,CP,3798,5742,21,1,[5742]\r\nNCP-14.zip,2,NCP,389,1929,63,2,\"[1928, 1929]\"\r\nNCP-22.zip,2,NCP,858,2379,52,1,[2379]\r\nNCP-10.zip,2,NCP,2721,2677,37,1,[2677]\r\nNCP-29.zip,2,NCP,882,2418,257,2,\"[2417, 2418]\"\r\nNCP-18.zip,2,NCP,495,2146,156,2,\"[2146, 2147]\"\r\nNormal-18.zip,0,Normal,2210,665,88,1,[665]\r\nCP-7.zip,1,CP,1304,3632,18,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-18.zip,2,NCP,512,2180,149,2,\"[2180, 2181]\"\r\nNormal-1.zip,0,Normal,1672,803,75,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-21.zip,1,CP,2774,3305,31,1,[3305]\r\nCP-9.zip,1,CP,1372,3797,193,3,\"[3797, 3798, 3799]\"\r\nCP-22.zip,1,CP,615,2977,104,1,[2977]\r\nCP-12.zip,1,CP,1469,4019,47,2,\"[4018, 4019]\"\r\nCP-18.zip,1,CP,1774,3522,65,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-17.zip,2,NCP,472,2100,63,2,\"[2099, 2100]\"\r\nNormal-14.zip,0,Normal,2069,524,81,1,[524]\r\nCP-18.zip,1,CP,1774,3529,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-27.zip,2,NCP,1031,2601,216,2,\"[2601, 2602]\"\r\nNCP-22.zip,2,NCP,857,2378,53,1,[2378]\r\nNormal-3.zip,0,Normal,1767,1156,139,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-24.zip,0,Normal,2641,151,41,1,[151]\r\nNCP-9.zip,2,NCP,2696,2662,44,1,[2662]\r\nCP-17.zip,1,CP,1620,4308,24,1,[4308]\r\nNCP-4.zip,2,NCP,149,1443,66,2,\"[1442, 1443]\"\r\nCP-13.zip,1,CP,1488,4064,158,3,\"[4064, 4065, 4066]\"\r\nNormal-22.zip,0,Normal,2315,770,82,1,[770]\r\nNCP-12.zip,2,NCP,316,1780,58,2,\"[1779, 1780]\"\r\nCP-9.zip,1,CP,1360,3767,67,3,\"[3767, 3768, 3769]\"\r\nNCP-18.zip,2,NCP,512,2181,62,2,\"[2180, 2181]\"\r\nNCP-20.zip,2,NCP,547,2252,66,2,\"[2251, 2252]\"\r\nNormal-10.zip,0,Normal,1942,397,81,1,[397]\r\nNCP-5.zip,2,NCP,198,1541,60,2,\"[1540, 1541]\"\r\nNCP-6.zip,2,NCP,199,1542,138,2,\"[1542, 1543]\"\r\nCP-17.zip,1,CP,1631,4319,23,1,[4319]\r\nNCP-13.zip,2,NCP,353,1858,69,2,\"[1857, 1858]\"\r\nNCP-17.zip,2,NCP,463,2081,60,2,\"[2080, 2081]\"\r\nNCP-1.zip,2,NCP,1019,2585,363,1,[2585]\r\nNCP-22.zip,2,NCP,845,2362,48,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-15.zip,2,NCP,425,2004,58,2,\"[2003, 2004]\"\r\nNCP-28.zip,2,NCP,873,2405,52,2,\"[2405, 2406]\"\r\nNCP-4.zip,2,NCP,152,1448,145,2,\"[1448, 1449]\"\r\nNCP-19.zip,2,NCP,543,2244,54,2,\"[2243, 2244]\"\r\nNormal-14.zip,0,Normal,2062,517,84,1,[517]\r\nNCP-17.zip,2,NCP,465,2086,61,3,\"[2084, 2085, 2086]\"\r\nNormal-25.zip,0,Normal,3717,5347,25,1,[5347]\r\nCP-4.zip,1,CP,1178,3396,133,1,[3396]\r\nCP-22.zip,1,CP,620,2982,64,1,[2982]\r\nNormal-1.zip,0,Normal,1677,825,65,4,\"[823, 824, 825, 826]\"\r\nNormal-9.zip,0,Normal,1908,363,81,1,[363]\r\nCP-30.zip,1,CP,3940,5646,33,1,[5646]\r\nNCP-30.zip,2,NCP,942,2485,45,1,[2485]\r\nCP-18.zip,1,CP,1781,3578,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-9.zip,1,CP,1358,3762,126,3,\"[3761, 3762, 3763]\"\r\nCP-27.zip,1,CP,3764,5708,23,1,[5708]\r\nNCP-8.zip,2,NCP,2673,2692,48,1,[2692]\r\nNCP-19.zip,2,NCP,534,2226,49,2,\"[2225, 2226]\"\r\nCP-11.zip,1,CP,1440,3950,51,3,\"[3948, 3949, 3950]\"\r\nNCP-17.zip,2,NCP,465,2084,145,3,\"[2084, 2085, 2086]\"\r\nNCP-19.zip,2,NCP,522,2200,137,2,\"[2200, 2201]\"\r\nCP-12.zip,1,CP,1468,4015,54,3,\"[4015, 4016, 4017]\"\r\nCP-13.zip,1,CP,1498,4095,60,2,\"[4095, 4096]\"\r\nCP-18.zip,1,CP,1778,3548,65,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNormal-1.zip,0,Normal,1670,791,66,6,\"[787, 788, 789, 790, 791, 792]\"\r\nCP-16.zip,1,CP,1611,4299,19,1,[4299]\r\nNormal-14.zip,0,Normal,2080,535,100,1,[535]\r\nNCP-25.zip,2,NCP,3968,5477,44,1,[5477]\r\nNormal-3.zip,0,Normal,755,190,107,1,[190]\r\nNormal-16.zip,0,Normal,2151,606,93,1,[606]\r\nNCP-4.zip,2,NCP,168,1481,58,2,\"[1480, 1481]\"\r\nNormal-21.zip,0,Normal,2289,744,77,1,[744]\r\nNCP-6.zip,2,NCP,224,1593,57,2,\"[1592, 1593]\"\r\nCP-13.zip,1,CP,1502,4103,73,2,\"[4102, 4103]\"\r\nNCP-22.zip,2,NCP,865,2390,34,2,\"[2390, 2391]\"\r\nCP-28.zip,1,CP,3787,5731,27,1,[5731]\r\nNCP-5.zip,2,NCP,170,1484,141,2,\"[1484, 1485]\"\r\nNormal-20.zip,0,Normal,2271,726,81,1,[726]\r\nNCP-7.zip,2,NCP,2485,2644,46,1,[2644]\r\nNCP-17.zip,2,NCP,475,2106,63,2,\"[2105, 2106]\"\r\nNCP-21.zip,2,NCP,74,1281,127,2,\"[1281, 1282]\"\r\nCP-13.zip,1,CP,1507,4113,62,2,\"[4113, 4114]\"\r\nCP-18.zip,1,CP,1781,3570,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-12.zip,1,CP,1462,4002,193,3,\"[4002, 4003, 4004]\"\r\nNormal-1.zip,0,Normal,1672,796,76,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-12.zip,2,NCP,337,1822,139,2,\"[1822, 1823]\"\r\nCP-9.zip,1,CP,1353,3749,60,3,\"[3748, 3749, 3750]\"\r\nNormal-15.zip,0,Normal,2087,542,83,1,[542]\r\nNCP-12.zip,2,NCP,331,1811,66,2,\"[1810, 1811]\"\r\nCP-22.zip,1,CP,617,2979,110,1,[2979]\r\nCP-18.zip,1,CP,1771,3518,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-26.zip,1,CP,3730,5668,212,2,\"[5668, 5669]\"\r\nNormal-24.zip,0,Normal,2660,170,38,1,[170]\r\nNormal-11.zip,0,Normal,1967,422,97,1,[422]\r\nNCP-4.zip,2,NCP,149,1442,159,2,\"[1442, 1443]\"\r\nCP-30.zip,1,CP,3834,5778,26,1,[5778]\r\nNCP-19.zip,2,NCP,540,2237,127,2,\"[2237, 2238]\"\r\nNormal-26.zip,0,Normal,3862,5374,188,1,[5374]\r\nNormal-7.zip,0,Normal,1842,297,77,1,[297]\r\nNormal-26.zip,0,Normal,3868,5380,30,1,[5380]\r\nNormal-12.zip,0,Normal,2003,458,85,1,[458]\r\nNCP-5.zip,2,NCP,17,1167,58,2,\"[1166, 1167]\"\r\nNCP-2.zip,2,NCP,117,1375,130,2,\"[1375, 1376]\"\r\nCP-13.zip,1,CP,1511,4124,57,2,\"[4123, 4124]\"\r\nCP-18.zip,1,CP,1778,3546,66,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-19.zip,2,NCP,529,2217,58,3,\"[2214, 2215, 2217]\"\r\nCP-14.zip,1,CP,1520,4143,57,3,\"[4143, 4144, 4145]\"\r\nNormal-16.zip,0,Normal,2131,586,95,1,[586]\r\nNCP-28.zip,2,NCP,873,2406,228,2,\"[2405, 2406]\"\r\nNCP-3.zip,2,NCP,137,1418,126,2,\"[1418, 1419]\"\r\nNCP-10.zip,2,NCP,279,1705,58,2,\"[1704, 1705]\"\r\nCP-28.zip,1,CP,3796,5740,28,1,[5740]\r\nNCP-19.zip,2,NCP,54,1241,147,2,\"[1241, 1242]\"\r\nCP-28.zip,1,CP,3768,5712,19,1,[5712]\r\nNCP-2.zip,2,NCP,120,1382,58,2,\"[1381, 1382]\"\r\nCP-16.zip,1,CP,1603,4291,22,1,[4291]\r\nCP-2.zip,1,CP,1118,3336,173,1,[3336]\r\nNCP-30.zip,2,NCP,939,2482,49,1,[2482]\r\nNormal-8.zip,0,Normal,1874,329,90,1,[329]\r\nNormal-3.zip,0,Normal,746,181,110,1,[181]\r\nCP-21.zip,1,CP,608,2970,86,1,[2970]\r\nNormal-22.zip,0,Normal,2601,111,37,1,[111]\r\nNCP-4.zip,2,NCP,16,1165,48,2,\"[1164, 1165]\"\r\nNCP-1.zip,2,NCP,1036,2607,441,1,[2607]\r\nNCP-19.zip,2,NCP,528,2213,59,2,\"[2212, 2213]\"\r\nNCP-6.zip,2,NCP,217,1579,58,2,\"[1578, 1579]\"\r\nCP-10.zip,1,CP,1416,3897,58,2,\"[3897, 3898]\"\r\nCP-30.zip,1,CP,4043,5592,41,1,[5592]\r\nCP-30.zip,1,CP,3933,5636,69,2,\"[5636, 5637]\"\r\nCP-20.zip,1,CP,2667,3246,24,3,\"[3246, 3247, 3248]\"\r\nNormal-1.zip,0,Normal,1677,824,64,4,\"[823, 824, 825, 826]\"\r\nCP-18.zip,1,CP,1779,3552,59,2,\"[3551, 3552]\"\r\nNormal-25.zip,0,Normal,3855,5367,209,1,[5367]\r\nCP-24.zip,1,CP,691,3053,72,1,[3053]\r\nCP-6.zip,1,CP,1239,3457,134,1,[3457]\r\nCP-21.zip,1,CP,602,2964,84,1,[2964]\r\nNCP-1.zip,2,NCP,105,1348,61,2,\"[1347, 1348]\"\r\nCP-3.zip,1,CP,1151,3369,158,1,[3369]\r\nNCP-15.zip,2,NCP,413,1975,110,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-8.zip,1,CP,1327,3691,64,3,\"[3690, 3691, 3692]\"\r\nCP-6.zip,1,CP,1237,3455,178,1,[3455]\r\nNormal-11.zip,0,Normal,1959,414,97,1,[414]\r\nNormal-25.zip,0,Normal,3713,5343,27,1,[5343]\r\nCP-21.zip,1,CP,597,2959,305,1,[2959]\r\nCP-9.zip,1,CP,1356,3757,60,2,\"[3756, 3757]\"\r\nNCP-7.zip,2,NCP,2483,2686,40,1,[2686]\r\nNCP-27.zip,2,NCP,1048,2620,58,2,\"[2620, 2621]\"\r\nNormal-3.zip,0,Normal,1767,1159,28,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-5.zip,1,CP,1219,3437,179,1,[3437]\r\nNCP-4.zip,2,NCP,145,1434,139,2,\"[1434, 1435]\"\r\nCP-15.zip,1,CP,1575,4263,20,1,[4263]\r\nNCP-18.zip,2,NCP,516,2188,135,2,\"[2188, 2189]\"\r\nCP-9.zip,1,CP,1360,3768,67,3,\"[3767, 3768, 3769]\"\r\nCP-13.zip,1,CP,1488,4065,66,3,\"[4064, 4065, 4066]\"\r\nCP-1.zip,1,CP,1077,3122,74,2,\"[3121, 3122]\"\r\nNormal-14.zip,0,Normal,2084,539,92,1,[539]\r\nNormal-3.zip,0,Normal,1767,1163,76,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-2.zip,0,Normal,1746,1063,68,2,\"[1063, 1064]\"\r\nNCP-12.zip,2,NCP,332,1812,167,2,\"[1812, 1813]\"\r\nNormal-12.zip,0,Normal,1990,445,97,1,[445]\r\nCP-7.zip,1,CP,1301,3599,294,4,\"[3598, 3599, 3600, 3601]\"\r\nCP-1.zip,1,CP,1070,3112,104,1,[3112]\r\nCP-13.zip,1,CP,1493,4082,53,3,\"[4080, 4081, 4082]\"\r\nNCP-19.zip,2,NCP,520,2196,129,2,\"[2196, 2197]\"\r\nNCP-3.zip,2,NCP,137,1419,53,2,\"[1418, 1419]\"\r\nNCP-30.zip,2,NCP,937,2479,22,1,[2479]\r\nNCP-22.zip,2,NCP,865,2391,260,2,\"[2390, 2391]\"\r\nNCP-7.zip,2,NCP,230,1605,58,2,\"[1604, 1605]\"\r\nCP-7.zip,1,CP,1302,3603,207,4,\"[3602, 3603, 3604, 3605]\"\r\nCP-16.zip,1,CP,1588,4276,20,1,[4276]\r\nNormal-18.zip,0,Normal,2195,650,79,1,[650]\r\nNormal-17.zip,0,Normal,2173,628,96,1,[628]\r\nNCP-22.zip,2,NCP,878,2413,117,2,\"[2412, 2413]\"\r\nNormal-18.zip,0,Normal,2188,643,88,1,[643]\r\nCP-18.zip,1,CP,1774,3526,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-6.zip,0,Normal,1815,270,91,1,[270]\r\nCP-5.zip,1,CP,1208,3426,321,1,[3426]\r\nNCP-6.zip,2,NCP,22,1176,68,2,\"[1175, 1176]\"\r\nNCP-15.zip,2,NCP,413,1979,54,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-18.zip,1,CP,1771,3521,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-4.zip,1,CP,1172,3390,195,1,[3390]\r\nCP-26.zip,1,CP,3721,5655,206,2,\"[5654, 5655]\"\r\nCP-27.zip,1,CP,3754,5698,21,1,[5698]\r\nCP-19.zip,1,CP,1784,3592,69,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-9.zip,1,CP,1372,3799,49,3,\"[3797, 3798, 3799]\"\r\nNCP-2.zip,2,NCP,113,1367,137,2,\"[1367, 1368]\"\r\nNormal-22.zip,0,Normal,2318,773,105,1,[773]\r\nCP-18.zip,1,CP,1770,3517,57,1,[3517]\r\nNormal-21.zip,0,Normal,2293,748,88,1,[748]\r\nNormal-22.zip,0,Normal,2595,105,43,1,[105]\r\nNCP-18.zip,2,NCP,50,1234,59,2,\"[1233, 1234]\"\r\nNormal-2.zip,0,Normal,1757,1106,71,4,\"[1105, 1106, 1107, 1108]\"\r\nCP-8.zip,1,CP,1327,3692,64,3,\"[3690, 3691, 3692]\"\r\nCP-18.zip,1,CP,1781,3577,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-30.zip,1,CP,3932,5635,67,2,\"[5634, 5635]\"\r\nNCP-20.zip,2,NCP,566,2291,67,2,\"[2290, 2291]\"\r\nNCP-10.zip,2,NCP,2715,2708,51,1,[2708]\r\nCP-23.zip,1,CP,660,3022,82,1,[3022]\r\nNormal-9.zip,0,Normal,1916,371,106,1,[371]\r\nCP-20.zip,1,CP,2757,3288,211,1,[3288]\r\nNormal-7.zip,0,Normal,1845,300,99,1,[300]\r\nNormal-13.zip,0,Normal,2050,505,74,1,[505]\r\nCP-1.zip,1,CP,1092,3310,216,1,[3310]\r\nNormal-2.zip,0,Normal,1763,1135,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-9.zip,0,Normal,1898,353,72,1,[353]\r\nNCP-21.zip,2,NCP,576,2310,124,1,[2310]\r\nNormal-1.zip,0,Normal,1701,956,70,2,\"[955, 956]\"\r\nNormal-17.zip,0,Normal,2178,633,85,1,[633]\r\nCP-8.zip,1,CP,1322,3681,56,2,\"[3680, 3681]\"\r\nNormal-6.zip,0,Normal,1802,257,107,1,[257]\r\nNCP-20.zip,2,NCP,547,2251,159,2,\"[2251, 2252]\"\r\nNCP-3.zip,2,NCP,1285,2726,66,1,[2726]\r\nNormal-7.zip,0,Normal,1828,283,96,1,[283]\r\nNCP-20.zip,2,NCP,546,2250,57,2,\"[2249, 2250]\"\r\nNormal-2.zip,0,Normal,1750,1078,69,3,\"[1074, 1077, 1078]\"\r\nNormal-9.zip,0,Normal,1892,347,77,1,[347]\r\nNCP-19.zip,2,NCP,534,2225,115,2,\"[2225, 2226]\"\r\nCP-29.zip,1,CP,3806,5750,20,1,[5750]\r\nNCP-13.zip,2,NCP,355,1861,125,2,\"[1861, 1862]\"\r\nNormal-6.zip,0,Normal,1813,268,80,1,[268]\r\nNormal-2.zip,0,Normal,1756,1101,66,4,\"[1101, 1102, 1103, 1104]\"\r\nCP-20.zip,1,CP,2759,3290,36,1,[3290]\r\nNormal-17.zip,0,Normal,2183,638,110,1,[638]\r\nNCP-6.zip,2,NCP,228,1601,67,2,\"[1600, 1601]\"\r\nNCP-5.zip,2,NCP,197,1539,53,2,\"[1538, 1539]\"\r\nCP-28.zip,1,CP,3766,5710,24,1,[5710]\r\nCP-10.zip,1,CP,1399,3858,45,2,\"[3858, 3859]\"\r\nNormal-14.zip,0,Normal,2074,529,82,1,[529]\r\nNormal-2.zip,0,Normal,1733,1026,71,2,\"[1026, 1027]\"\r\nNCP-11.zip,2,NCP,300,1747,58,2,\"[1746, 1747]\"\r\nCP-17.zip,1,CP,1650,4338,31,1,[4338]\r\nCP-20.zip,1,CP,2455,2937,116,1,[2937]\r\nNormal-20.zip,0,Normal,2279,734,78,1,[734]\r\nCP-8.zip,1,CP,1329,3697,45,3,\"[3695, 3696, 3697]\"\r\nNCP-16.zip,2,NCP,444,2042,146,2,\"[2042, 2043]\"\r\nNormal-12.zip,0,Normal,1999,454,78,1,[454]\r\nCP-17.zip,1,CP,1624,4312,20,1,[4312]\r\nNCP-10.zip,2,NCP,2720,2676,45,1,[2676]\r\nCP-2.zip,1,CP,1107,3325,183,1,[3325]\r\nCP-18.zip,1,CP,1777,3542,62,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-15.zip,2,NCP,403,1955,110,2,\"[1955, 1956]\"\r\nNCP-3.zip,2,NCP,134,1413,54,2,\"[1412, 1413]\"\r\nCP-13.zip,1,CP,1500,4099,97,1,[4099]\r\nCP-25.zip,1,CP,712,3074,118,1,[3074]\r\nCP-23.zip,1,CP,648,3010,104,1,[3010]\r\nCP-19.zip,1,CP,2443,2917,98,3,\"[2915, 2916, 2917]\"\r\nNCP-16.zip,2,NCP,441,2036,115,2,\"[2036, 2037]\"\r\nNormal-23.zip,0,Normal,2628,138,34,1,[138]\r\nCP-19.zip,1,CP,1795,3596,41,2,\"[3596, 3597]\"\r\nNCP-27.zip,2,NCP,1016,2580,20,3,\"[2580, 2581, 2582]\"\r\nNormal-24.zip,0,Normal,2659,169,39,1,[169]\r\nCP-17.zip,1,CP,1619,4307,29,1,[4307]\r\nNormal-26.zip,0,Normal,3861,5373,211,1,[5373]\r\nNCP-19.zip,2,NCP,519,2195,53,2,\"[2194, 2195]\"\r\nNCP-6.zip,2,NCP,213,1571,66,2,\"[1570, 1571]\"\r\nNormal-25.zip,0,Normal,3860,5372,212,1,[5372]\r\nNCP-5.zip,2,NCP,192,1529,57,2,\"[1528, 1529]\"\r\nCP-3.zip,1,CP,1153,3371,179,1,[3371]\r\nCP-3.zip,1,CP,1159,3377,287,1,[3377]\r\nNCP-30.zip,2,NCP,931,2473,21,1,[2473]\r\nCP-6.zip,1,CP,1255,3473,107,1,[3473]\r\nNCP-4.zip,2,NCP,169,1482,133,2,\"[1482, 1483]\"\r\nNCP-12.zip,2,NCP,340,1828,128,2,\"[1828, 1829]\"\r\nCP-26.zip,1,CP,3729,5665,36,3,\"[5665, 5666, 5667]\"\r\nNormal-11.zip,0,Normal,1976,431,74,1,[431]\r\nCP-9.zip,1,CP,1372,3798,49,3,\"[3797, 3798, 3799]\"\r\nNCP-4.zip,2,NCP,161,1467,57,2,\"[1466, 1467]\"\r\nCP-22.zip,1,CP,613,2975,78,1,[2975]\r\nNCP-17.zip,2,NCP,460,2074,106,2,\"[2074, 2075]\"\r\nNCP-21.zip,2,NCP,577,2311,145,2,\"[2311, 2312]\"\r\nCP-25.zip,1,CP,741,3103,523,1,[3103]\r\nCP-14.zip,1,CP,1532,4172,50,2,\"[4171, 4172]\"\r\nNCP-11.zip,2,NCP,299,1744,139,2,\"[1744, 1745]\"\r\nNCP-14.zip,2,NCP,372,1896,45,2,\"[1895, 1896]\"\r\nCP-9.zip,1,CP,1356,3756,60,2,\"[3756, 3757]\"\r\nNormal-11.zip,0,Normal,1968,423,96,1,[423]\r\nCP-14.zip,1,CP,1525,4156,60,2,\"[4155, 4156]\"\r\nCP-22.zip,1,CP,618,2980,166,1,[2980]\r\nCP-17.zip,1,CP,1639,4327,26,1,[4327]\r\nNormal-19.zip,0,Normal,2245,700,83,1,[700]\r\nCP-13.zip,1,CP,1518,4139,67,3,\"[4138, 4139, 4140]\"\r\nNCP-11.zip,2,NCP,29,1190,132,2,\"[1190, 1191]\"\r\nCP-16.zip,1,CP,1615,4303,29,1,[4303]\r\nCP-29.zip,1,CP,3823,5767,26,1,[5767]\r\nNCP-20.zip,2,NCP,574,2306,139,2,\"[2306, 2307]\"\r\nNCP-12.zip,2,NCP,340,1829,54,2,\"[1828, 1829]\"\r\nNormal-21.zip,0,Normal,2285,740,68,1,[740]\r\nNCP-16.zip,2,NCP,455,2065,56,2,\"[2064, 2065]\"\r\nNCP-16.zip,2,NCP,436,2026,61,2,\"[2025, 2026]\"\r\nNCP-14.zip,2,NCP,383,1918,139,2,\"[1918, 1919]\"\r\nNCP-30.zip,2,NCP,988,2538,287,2,\"[2538, 2539]\"\r\nNCP-7.zip,2,NCP,247,1641,66,2,\"[1640, 1641]\"\r\nCP-15.zip,1,CP,1571,4259,16,1,[4259]\r\nNormal-16.zip,0,Normal,2137,592,94,1,[592]\r\nCP-7.zip,1,CP,1304,3637,218,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nCP-6.zip,1,CP,1235,3453,155,1,[3453]\r\nNormal-4.zip,0,Normal,776,211,353,1,[211]\r\nNormal-18.zip,0,Normal,2189,644,82,1,[644]\r\nNormal-6.zip,0,Normal,1799,254,97,1,[254]\r\nNormal-15.zip,0,Normal,2113,568,93,1,[568]\r\nCP-3.zip,1,CP,1131,3349,157,1,[3349]\r\nNormal-6.zip,0,Normal,1819,274,91,1,[274]\r\nCP-18.zip,1,CP,1781,3571,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-16.zip,2,NCP,455,2064,132,2,\"[2064, 2065]\"\r\nNormal-8.zip,0,Normal,1888,343,99,1,[343]\r\nNCP-20.zip,2,NCP,571,2301,68,2,\"[2300, 2301]\"\r\nNCP-7.zip,2,NCP,247,1640,159,2,\"[1640, 1641]\"\r\nCP-3.zip,1,CP,1137,3355,147,1,[3355]\r\nCP-11.zip,1,CP,1423,3913,53,3,\"[3911, 3912, 3913]\"\r\nNCP-1.zip,2,NCP,105,1347,145,2,\"[1347, 1348]\"\r\nNCP-14.zip,2,NCP,377,1907,62,2,\"[1906, 1907]\"\r\nCP-14.zip,1,CP,1535,4178,53,2,\"[4178, 4179]\"\r\nNormal-9.zip,0,Normal,1900,355,93,1,[355]\r\nCP-2.zip,1,CP,1125,3343,115,1,[3343]\r\nCP-6.zip,1,CP,1243,3461,176,1,[3461]\r\nNCP-6.zip,2,NCP,203,1550,140,2,\"[1550, 1551]\"\r\nNormal-1.zip,0,Normal,1670,789,63,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNCP-29.zip,2,NCP,909,2451,401,1,[2451]\r\nNCP-25.zip,2,NCP,3949,5463,35,1,[5463]\r\nNormal-26.zip,0,Normal,3879,5391,28,1,[5391]\r\nNCP-11.zip,2,NCP,29,1191,56,2,\"[1190, 1191]\"\r\nCP-3.zip,1,CP,1129,3347,158,1,[3347]\r\nNCP-15.zip,2,NCP,406,1961,146,2,\"[1961, 1962]\"\r\nNCP-4.zip,2,NCP,151,1446,129,2,\"[1446, 1447]\"\r\nCP-19.zip,1,CP,1789,3206,64,4,\"[3204, 3205, 3206, 3207]\"\r\nNCP-6.zip,2,NCP,227,1599,61,2,\"[1598, 1599]\"\r\nCP-12.zip,1,CP,1462,4003,51,3,\"[4002, 4003, 4004]\"\r\nCP-3.zip,1,CP,1147,3365,164,1,[3365]\r\nNormal-23.zip,0,Normal,2629,139,36,1,[139]\r\nNormal-1.zip,0,Normal,1700,953,64,2,\"[953, 954]\"\r\nCP-15.zip,1,CP,1561,4241,49,2,\"[4241, 4242]\"\r\nNCP-16.zip,2,NCP,437,2028,60,2,\"[2027, 2028]\"\r\nCP-18.zip,1,CP,1654,4342,23,1,[4342]\r\nNormal-20.zip,0,Normal,2273,728,75,1,[728]\r\nNormal-14.zip,0,Normal,2067,522,94,1,[522]\r\nNCP-29.zip,2,NCP,911,2453,48,1,[2453]\r\nNormal-2.zip,0,Normal,1756,1104,65,4,\"[1101, 1102, 1103, 1104]\"\r\nNormal-11.zip,0,Normal,1989,444,105,1,[444]\r\nNCP-15.zip,2,NCP,403,1956,47,2,\"[1955, 1956]\"\r\nNCP-13.zip,2,NCP,348,1848,48,2,\"[1847, 1848]\"\r\nNCP-28.zip,2,NCP,844,2359,594,1,[2359]\r\nNCP-18.zip,2,NCP,51,1235,141,2,\"[1235, 1236]\"\r\nCP-28.zip,1,CP,3789,5733,26,1,[5733]\r\nNormal-2.zip,0,Normal,1763,1138,65,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-10.zip,2,NCP,278,1702,137,2,\"[1702, 1703]\"\r\nCP-28.zip,1,CP,3770,5714,23,1,[5714]\r\nNCP-23.zip,2,NCP,93,1323,66,2,\"[1322, 1323]\"\r\nNCP-14.zip,2,NCP,390,1930,126,2,\"[1930, 1931]\"\r\nNCP-8.zip,2,NCP,26,1184,82,2,\"[1184, 1185]\"\r\nNormal-2.zip,0,Normal,1763,1142,71,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-6.zip,2,NCP,201,1547,62,2,\"[1546, 1547]\"\r\nNormal-23.zip,0,Normal,2626,136,33,1,[136]\r\nNCP-25.zip,2,NCP,3707,5534,50,1,[5534]\r\nNormal-21.zip,0,Normal,2305,760,104,1,[760]\r\nNormal-6.zip,0,Normal,1818,273,87,1,[273]\r\nCP-22.zip,1,CP,641,3003,136,1,[3003]\r\nNormal-7.zip,0,Normal,1836,291,104,1,[291]\r\nNormal-27.zip,0,Normal,3894,5417,287,1,[5417]\r\nNCP-30.zip,2,NCP,981,2526,23,2,\"[2525, 2526]\"\r\nNCP-1.zip,2,NCP,102,1341,132,2,\"[1341, 1342]\"\r\nNCP-14.zip,2,NCP,387,1924,128,2,\"[1924, 1925]\"\r\nNCP-2.zip,2,NCP,117,1376,55,2,\"[1375, 1376]\"\r\nNCP-5.zip,2,NCP,190,1524,152,2,\"[1524, 1525]\"\r\nCP-26.zip,1,CP,3639,5598,241,1,[5598]\r\nNormal-1.zip,0,Normal,1670,787,58,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-2.zip,0,Normal,1757,1108,68,4,\"[1105, 1106, 1107, 1108]\"\r\nNormal-13.zip,0,Normal,2043,498,84,1,[498]\r\nCP-2.zip,1,CP,1099,3317,198,1,[3317]\r\nCP-7.zip,1,CP,1318,3673,56,1,[3673]\r\nNormal-9.zip,0,Normal,1899,354,88,1,[354]\r\nCP-12.zip,1,CP,1467,4014,60,2,\"[4013, 4014]\"\r\nNCP-5.zip,2,NCP,197,1538,124,2,\"[1538, 1539]\"\r\nCP-26.zip,1,CP,3730,5669,202,2,\"[5668, 5669]\"\r\nNCP-22.zip,2,NCP,845,2363,428,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-2.zip,2,NCP,127,1399,139,2,\"[1399, 1400]\"\r\nNormal-26.zip,0,Normal,3893,5416,63,1,[5416]\r\nNCP-8.zip,2,NCP,2669,2689,37,1,[2689]\r\nCP-18.zip,1,CP,1778,3549,64,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nCP-25.zip,1,CP,722,3084,70,1,[3084]\r\nNCP-4.zip,2,NCP,157,1458,114,2,\"[1458, 1459]\"\r\nNCP-23.zip,2,NCP,92,1320,87,2,\"[1320, 1321]\"\r\nCP-11.zip,1,CP,1424,3914,60,2,\"[3914, 3915]\"\r\nNCP-19.zip,2,NCP,529,2215,33,3,\"[2214, 2215, 2217]\"\r\nCP-24.zip,1,CP,704,3066,417,1,[3066]\r\nNCP-6.zip,2,NCP,201,1546,149,2,\"[1546, 1547]\"\r\nNormal-17.zip,0,Normal,2177,632,88,1,[632]\r\nNCP-14.zip,2,NCP,383,1919,58,2,\"[1918, 1919]\"\r\nNormal-2.zip,0,Normal,1737,1040,80,4,\"[1037, 1038, 1039, 1040]\"\r\nNormal-26.zip,0,Normal,3881,5393,22,1,[5393]\r\nNormal-3.zip,0,Normal,1767,1161,71,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-14.zip,1,CP,1525,4155,60,2,\"[4155, 4156]\"\r\nNCP-12.zip,2,NCP,341,1832,55,3,\"[1830, 1832, 1834]\"\r\nNormal-13.zip,0,Normal,2034,489,91,1,[489]\r\nNCP-26.zip,2,NCP,3978,5485,49,1,[5485]\r\nNCP-22.zip,2,NCP,864,2389,221,2,\"[2388, 2389]\"\r\nNCP-9.zip,2,NCP,2682,2652,47,1,[2652]\r\nNCP-7.zip,2,NCP,2461,2642,42,1,[2642]\r\nNormal-21.zip,0,Normal,2303,758,110,1,[758]\r\nNCP-8.zip,2,NCP,2670,2690,41,1,[2690]\r\nCP-7.zip,1,CP,1315,3666,59,2,\"[3665, 3666]\"\r\nCP-19.zip,1,CP,2449,2927,118,1,[2927]\r\nCP-19.zip,1,CP,1789,3204,59,4,\"[3204, 3205, 3206, 3207]\"\r\nNormal-6.zip,0,Normal,1803,258,100,1,[258]\r\nNormal-1.zip,0,Normal,1675,812,73,1,[812]\r\nNCP-25.zip,2,NCP,3705,5532,63,1,[5532]\r\nNormal-1.zip,0,Normal,1727,1010,63,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-3.zip,2,NCP,1283,2724,70,1,[2724]\r\nCP-18.zip,1,CP,1774,3524,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nCP-18.zip,1,CP,1774,3525,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nCP-30.zip,1,CP,3919,5543,66,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-22.zip,2,NCP,85,1304,58,2,\"[1303, 1304]\"\r\nNormal-18.zip,0,Normal,2192,647,79,1,[647]\r\nCP-30.zip,1,CP,3935,5641,70,1,[5641]\r\nNCP-6.zip,2,NCP,227,1598,146,2,\"[1598, 1599]\"\r\nNormal-20.zip,0,Normal,2250,705,76,1,[705]\r\nCP-12.zip,1,CP,1464,4008,63,2,\"[4007, 4008]\"\r\nCP-29.zip,1,CP,3807,5751,20,1,[5751]\r\nNormal-12.zip,0,Normal,1993,448,97,1,[448]\r\nNCP-19.zip,2,NCP,528,2212,140,2,\"[2212, 2213]\"\r\nNCP-26.zip,2,NCP,3987,5511,60,1,[5511]\r\nNCP-25.zip,2,NCP,3969,5478,50,1,[5478]\r\nCP-17.zip,1,CP,1638,4326,25,1,[4326]\r\nCP-17.zip,1,CP,1643,4331,24,1,[4331]\r\nCP-17.zip,1,CP,1629,4317,23,1,[4317]\r\nCP-11.zip,1,CP,1423,3912,53,3,\"[3911, 3912, 3913]\"\r\nNormal-2.zip,0,Normal,1743,1056,73,2,\"[1056, 1057]\"\r\nNormal-9.zip,0,Normal,1915,370,91,1,[370]\r\nNormal-22.zip,0,Normal,2590,100,41,1,[100]\r\nNCP-11.zip,2,NCP,297,1741,60,2,\"[1739, 1741]\"\r\nCP-30.zip,1,CP,3919,5545,70,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-25.zip,2,NCP,3971,5480,50,1,[5480]\r\nCP-11.zip,1,CP,1454,3983,53,3,\"[3982, 3983, 3984]\"\r\nNormal-21.zip,0,Normal,2282,737,69,1,[737]\r\nNCP-12.zip,2,NCP,318,1783,150,2,\"[1783, 1784]\"\r\nNCP-10.zip,2,NCP,279,1704,139,2,\"[1704, 1705]\"\r\nCP-2.zip,1,CP,1108,3326,135,1,[3326]\r\nNormal-2.zip,0,Normal,1733,1027,71,2,\"[1026, 1027]\"\r\nCP-32.zip,1,CP,1781,3567,67,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,2725,2681,51,1,[2681]\r\nCP-11.zip,1,CP,1425,3917,49,3,\"[3916, 3917, 3918]\"\r\nNormal-1.zip,0,Normal,1701,955,70,2,\"[955, 956]\"\r\nCP-19.zip,1,CP,1787,3195,59,1,[3195]\r\n"
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_fold1_valid.csv",
    "content": "zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids\r\nNormal-2.zip,0,Normal,1740,1050,21,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-10.zip,1,CP,1387,3830,51,2,\"[3829, 3830]\"\r\nNCP-10.zip,2,NCP,2719,2675,44,1,[2675]\r\nCP-1.zip,1,CP,1065,3104,58,1,[3104]\r\nCP-10.zip,1,CP,1392,3843,62,2,\"[3843, 3844]\"\r\nCP-13.zip,1,CP,1508,4117,57,3,\"[4115, 4116, 4117]\"\r\nNCP-22.zip,2,NCP,863,2387,282,2,\"[2386, 2387]\"\r\nNormal-3.zip,0,Normal,763,198,102,1,[198]\r\nNormal-23.zip,0,Normal,2635,145,27,1,[145]\r\nNCP-20.zip,2,NCP,572,2303,58,2,\"[2302, 2303]\"\r\nNormal-1.zip,0,Normal,1683,862,65,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-10.zip,1,CP,1398,3856,44,2,\"[3856, 3857]\"\r\nCP-15.zip,1,CP,1566,4252,54,2,\"[4252, 4253]\"\r\nNCP-10.zip,2,NCP,280,1707,51,2,\"[1706, 1707]\"\r\nCP-19.zip,1,CP,1785,3187,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-15.zip,1,CP,1570,4258,22,1,[4258]\r\nCP-10.zip,1,CP,1413,3890,66,2,\"[3889, 3890]\"\r\nCP-7.zip,1,CP,1303,3618,42,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,2435,2903,295,3,\"[2901, 2902, 2903]\"\r\nNCP-22.zip,2,NCP,860,2382,212,2,\"[2382, 2383]\"\r\nNCP-22.zip,2,NCP,883,2419,52,2,\"[2419, 2420]\"\r\nNormal-2.zip,0,Normal,1751,1079,61,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNormal-25.zip,0,Normal,3852,5364,195,1,[5364]\r\nNCP-20.zip,2,NCP,559,2275,127,2,\"[2275, 2276]\"\r\nNCP-18.zip,2,NCP,498,2153,58,2,\"[2152, 2153]\"\r\nNormal-27.zip,0,Normal,3911,5448,64,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNormal-17.zip,0,Normal,2158,613,100,1,[613]\r\nNCP-7.zip,2,NCP,246,1639,58,2,\"[1638, 1639]\"\r\nNCP-17.zip,2,NCP,473,2102,61,2,\"[2101, 2102]\"\r\nNormal-2.zip,0,Normal,1732,1025,73,1,[1025]\r\nNCP-10.zip,2,NCP,271,1688,146,2,\"[1688, 1689]\"\r\nCP-7.zip,1,CP,1303,3627,252,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-11.zip,2,NCP,286,1717,121,2,\"[1717, 1718]\"\r\nNormal-19.zip,0,Normal,2223,678,95,1,[678]\r\nNCP-22.zip,2,NCP,822,2333,31,2,\"[2332, 2333]\"\r\nNCP-28.zip,2,NCP,870,2400,47,2,\"[2399, 2400]\"\r\nNCP-21.zip,2,NCP,75,1284,54,2,\"[1283, 1284]\"\r\nNCP-17.zip,2,NCP,469,2094,66,2,\"[2093, 2094]\"\r\nNCP-8.zip,2,NCP,255,1656,139,2,\"[1656, 1657]\"\r\nNCP-6.zip,2,NCP,211,1566,137,2,\"[1566, 1567]\"\r\nNCP-25.zip,2,NCP,3966,5476,43,1,[5476]\r\nNCP-21.zip,2,NCP,575,2309,61,2,\"[2308, 2309]\"\r\nNormal-2.zip,0,Normal,1740,1045,102,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNormal-1.zip,0,Normal,1681,845,69,1,[845]\r\nNCP-11.zip,2,NCP,310,1768,70,2,\"[1767, 1768]\"\r\nNCP-22.zip,2,NCP,87,1307,145,2,\"[1307, 1308]\"\r\nNormal-4.zip,0,Normal,786,221,124,1,[221]\r\nNormal-20.zip,0,Normal,2270,725,86,1,[725]\r\nNCP-18.zip,2,NCP,515,2187,58,2,\"[2186, 2187]\"\r\nNCP-5.zip,2,NCP,172,1488,139,2,\"[1488, 1489]\"\r\nNCP-20.zip,2,NCP,551,2260,65,2,\"[2259, 2260]\"\r\nNCP-21.zip,2,NCP,61,1256,60,2,\"[1255, 1256]\"\r\nCP-13.zip,1,CP,1508,4116,57,3,\"[4115, 4116, 4117]\"\r\nNCP-22.zip,2,NCP,863,2386,228,2,\"[2386, 2387]\"\r\nCP-10.zip,1,CP,1413,3889,67,2,\"[3889, 3890]\"\r\nNormal-2.zip,0,Normal,1740,1047,60,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-19.zip,1,CP,1785,3188,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-7.zip,1,CP,1303,3624,224,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-1.zip,0,Normal,1683,868,64,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-7.zip,1,CP,1303,3611,257,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3610,51,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-21.zip,2,NCP,61,1255,142,2,\"[1255, 1256]\"\r\nNormal-1.zip,0,Normal,1683,865,72,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-7.zip,1,CP,1303,3630,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1392,3844,62,2,\"[3843, 3844]\"\r\nCP-15.zip,1,CP,1566,4253,54,2,\"[4252, 4253]\"\r\nNormal-27.zip,0,Normal,3911,5447,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNormal-27.zip,0,Normal,3911,5449,64,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-22.zip,2,NCP,87,1308,61,2,\"[1307, 1308]\"\r\nNormal-1.zip,0,Normal,1683,861,65,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-7.zip,1,CP,1303,3613,232,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-20.zip,2,NCP,551,2259,154,2,\"[2259, 2260]\"\r\nCP-19.zip,1,CP,1785,3191,79,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-18.zip,2,NCP,515,2186,139,2,\"[2186, 2187]\"\r\nNormal-2.zip,0,Normal,1740,1048,60,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-7.zip,1,CP,1303,3626,51,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3606,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,2435,2902,100,3,\"[2901, 2902, 2903]\"\r\nCP-7.zip,1,CP,1303,3612,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3619,213,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-5.zip,2,NCP,172,1489,59,2,\"[1488, 1489]\"\r\nCP-7.zip,1,CP,1303,3617,27,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1398,3857,44,2,\"[3856, 3857]\"\r\nCP-7.zip,1,CP,1303,3608,55,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-28.zip,2,NCP,870,2399,247,2,\"[2399, 2400]\"\r\nNCP-22.zip,2,NCP,883,2420,200,2,\"[2419, 2420]\"\r\nCP-7.zip,1,CP,1303,3609,271,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1387,3829,51,2,\"[3829, 3830]\"\r\nNCP-8.zip,2,NCP,255,1657,58,2,\"[1656, 1657]\"\r\nNormal-2.zip,0,Normal,1740,1051,59,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNormal-2.zip,0,Normal,1751,1081,62,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-17.zip,2,NCP,469,2093,159,2,\"[2093, 2094]\"\r\nCP-7.zip,1,CP,1303,3621,230,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3607,247,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5452,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-17.zip,2,NCP,473,2101,145,2,\"[2101, 2102]\"\r\nCP-19.zip,1,CP,2435,2901,104,3,\"[2901, 2902, 2903]\"\r\nNormal-2.zip,0,Normal,1740,1049,21,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-19.zip,1,CP,1785,3189,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-10.zip,2,NCP,271,1689,61,2,\"[1688, 1689]\"\r\nCP-7.zip,1,CP,1303,3629,244,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3631,242,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5451,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-22.zip,2,NCP,822,2332,36,2,\"[2332, 2333]\"\r\nCP-7.zip,1,CP,1303,3622,28,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,1785,3190,79,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-11.zip,2,NCP,310,1767,169,2,\"[1767, 1768]\"\r\nNormal-2.zip,0,Normal,1751,1080,61,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-21.zip,2,NCP,575,2308,144,2,\"[2308, 2309]\"\r\nCP-19.zip,1,CP,1785,3186,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-21.zip,2,NCP,75,1283,128,2,\"[1283, 1284]\"\r\nNCP-11.zip,2,NCP,286,1718,51,2,\"[1717, 1718]\"\r\nCP-7.zip,1,CP,1303,3628,50,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-2.zip,0,Normal,1740,1046,300,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-7.zip,1,CP,1303,3620,45,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3614,27,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5450,68,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNormal-1.zip,0,Normal,1683,864,72,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNCP-20.zip,2,NCP,572,2302,138,2,\"[2302, 2303]\"\r\nNormal-2.zip,0,Normal,1751,1084,67,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nCP-7.zip,1,CP,1303,3625,32,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-2.zip,0,Normal,1740,1052,59,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNCP-10.zip,2,NCP,280,1706,121,2,\"[1706, 1707]\"\r\nNCP-18.zip,2,NCP,498,2152,139,2,\"[2152, 2153]\"\r\nCP-7.zip,1,CP,1303,3623,45,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3615,44,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3616,209,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-6.zip,2,NCP,211,1567,58,2,\"[1566, 1567]\"\r\nNormal-2.zip,0,Normal,1751,1083,67,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-22.zip,2,NCP,860,2383,183,2,\"[2382, 2383]\"\r\nNCP-20.zip,2,NCP,559,2276,54,2,\"[2275, 2276]\"\r\nNormal-2.zip,0,Normal,1751,1082,62,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nCP-13.zip,1,CP,1508,4115,57,3,\"[4115, 4116, 4117]\"\r\nNormal-1.zip,0,Normal,1683,869,64,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNCP-7.zip,2,NCP,246,1638,139,2,\"[1638, 1639]\"\r\nNCP-5.zip,2,NCP,18,1169,57,2,\"[1168, 1169]\"\r\nNormal-15.zip,0,Normal,2096,551,93,1,[551]\r\nCP-21.zip,1,CP,2776,3307,31,1,[3307]\r\nNCP-16.zip,2,NCP,449,2053,61,2,\"[2052, 2053]\"\r\nNCP-15.zip,2,NCP,404,1958,46,2,\"[1957, 1958]\"\r\nNCP-6.zip,2,NCP,210,1565,55,2,\"[1564, 1565]\"\r\nCP-3.zip,1,CP,1144,3362,159,1,[3362]\r\nNormal-8.zip,0,Normal,1879,334,88,1,[334]\r\nNormal-1.zip,0,Normal,1721,1000,75,4,\"[1000, 997, 998, 999]\"\r\nNCP-21.zip,2,NCP,583,2323,147,2,\"[2323, 2324]\"\r\nNCP-1.zip,2,NCP,1039,2610,45,1,[2610]\r\nNormal-8.zip,0,Normal,1882,337,86,1,[337]\r\nNormal-21.zip,0,Normal,2307,762,80,1,[762]\r\nCP-14.zip,1,CP,1528,4163,61,2,\"[4163, 4164]\"\r\nCP-11.zip,1,CP,1443,3958,58,3,\"[3957, 3958, 3959]\"\r\nNCP-18.zip,2,NCP,496,2149,70,2,\"[2148, 2149]\"\r\nCP-7.zip,1,CP,1270,3489,204,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-7.zip,0,Normal,1834,289,82,1,[289]\r\nNCP-13.zip,2,NCP,351,1853,145,2,\"[1853, 1854]\"\r\nCP-18.zip,1,CP,1782,3584,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1676,816,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-11.zip,1,CP,1428,3923,221,3,\"[3923, 3924, 3925]\"\r\nCP-8.zip,1,CP,1330,3699,58,3,\"[3698, 3699, 3700]\"\r\nNormal-19.zip,0,Normal,2233,688,76,1,[688]\r\nNCP-18.zip,2,NCP,514,2184,160,2,\"[2184, 2185]\"\r\nNormal-6.zip,0,Normal,1804,259,102,1,[259]\r\nNormal-22.zip,0,Normal,2598,108,38,1,[108]\r\nCP-14.zip,1,CP,1534,4176,58,2,\"[4176, 4177]\"\r\nCP-5.zip,1,CP,1217,3435,320,1,[3435]\r\nNCP-14.zip,2,NCP,378,1908,168,2,\"[1908, 1909]\"\r\nCP-18.zip,1,CP,1782,3582,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-25.zip,2,NCP,3963,5474,56,1,[5474]\r\nNCP-22.zip,2,NCP,82,1298,55,2,\"[1297, 1298]\"\r\nNCP-2.zip,2,NCP,1274,2715,55,1,[2715]\r\nCP-22.zip,1,CP,619,2981,102,1,[2981]\r\nNormal-24.zip,0,Normal,2661,171,31,1,[171]\r\nCP-14.zip,1,CP,1540,4192,58,3,\"[4191, 4192, 4193]\"\r\nNCP-10.zip,2,NCP,2724,2680,43,1,[2680]\r\nNormal-2.zip,0,Normal,1742,1055,60,1,[1055]\r\nCP-12.zip,1,CP,1486,4060,63,2,\"[4059, 4060]\"\r\nNCP-19.zip,2,NCP,527,2211,48,2,\"[2210, 2211]\"\r\nCP-10.zip,1,CP,1393,3846,60,2,\"[3845, 3846]\"\r\nNormal-1.zip,0,Normal,1721,997,68,4,\"[1000, 997, 998, 999]\"\r\nNormal-25.zip,0,Normal,3839,5351,220,1,[5351]\r\nNormal-12.zip,0,Normal,1991,446,306,1,[446]\r\nCP-19.zip,1,CP,1794,3595,38,2,\"[3594, 3595]\"\r\nNormal-1.zip,0,Normal,1669,785,54,5,\"[782, 783, 784, 785, 786]\"\r\nCP-18.zip,1,CP,1782,3580,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-11.zip,0,Normal,1963,418,95,1,[418]\r\nCP-11.zip,1,CP,1428,3924,56,3,\"[3923, 3924, 3925]\"\r\nNormal-9.zip,0,Normal,1918,373,85,1,[373]\r\nNormal-16.zip,0,Normal,2118,573,89,1,[573]\r\nNCP-4.zip,2,NCP,140,1424,128,2,\"[1424, 1425]\"\r\nNormal-16.zip,0,Normal,2142,597,84,1,[597]\r\nNCP-15.zip,2,NCP,410,1969,143,2,\"[1969, 1970]\"\r\nNormal-3.zip,0,Normal,749,184,89,1,[184]\r\nNormal-1.zip,0,Normal,1718,991,66,2,\"[991, 992]\"\r\nNCP-5.zip,2,NCP,176,1497,53,2,\"[1496, 1497]\"\r\nNCP-8.zip,2,NCP,265,1677,50,2,\"[1676, 1677]\"\r\nCP-7.zip,1,CP,1270,3495,148,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-26.zip,2,NCP,3982,5489,34,1,[5489]\r\nNCP-8.zip,2,NCP,2677,2695,51,1,[2695]\r\nNCP-13.zip,2,NCP,357,1866,63,2,\"[1865, 1866]\"\r\nNCP-13.zip,2,NCP,346,1843,139,2,\"[1843, 1844]\"\r\nNormal-1.zip,0,Normal,1676,820,72,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-14.zip,2,NCP,379,1911,62,2,\"[1910, 1911]\"\r\nNCP-1.zip,2,NCP,104,1345,139,2,\"[1345, 1346]\"\r\nNCP-2.zip,2,NCP,116,1373,127,2,\"[1373, 1374]\"\r\nNCP-17.zip,2,NCP,466,2087,145,2,\"[2087, 2088]\"\r\nCP-11.zip,1,CP,1443,3957,139,3,\"[3957, 3958, 3959]\"\r\nNCP-5.zip,2,NCP,181,1507,58,2,\"[1506, 1507]\"\r\nNCP-18.zip,2,NCP,496,2148,168,2,\"[2148, 2149]\"\r\nNCP-8.zip,2,NCP,265,1676,119,2,\"[1676, 1677]\"\r\nNormal-1.zip,0,Normal,1669,782,62,5,\"[782, 783, 784, 785, 786]\"\r\nCP-7.zip,1,CP,1270,3501,420,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1676,822,69,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-21.zip,2,NCP,583,2324,62,2,\"[2323, 2324]\"\r\nNCP-19.zip,2,NCP,527,2210,114,2,\"[2210, 2211]\"\r\nNCP-15.zip,2,NCP,404,1957,108,2,\"[1957, 1958]\"\r\nNCP-17.zip,2,NCP,466,2088,61,2,\"[2087, 2088]\"\r\nNCP-4.zip,2,NCP,140,1425,54,2,\"[1424, 1425]\"\r\nNCP-13.zip,2,NCP,346,1844,58,2,\"[1843, 1844]\"\r\nCP-7.zip,1,CP,1270,3494,129,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3497,133,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-18.zip,1,CP,1782,3579,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1676,818,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-11.zip,1,CP,1428,3925,56,3,\"[3923, 3924, 3925]\"\r\nCP-7.zip,1,CP,1270,3488,287,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3500,160,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1669,784,196,5,\"[782, 783, 784, 785, 786]\"\r\nNormal-1.zip,0,Normal,1669,783,62,5,\"[782, 783, 784, 785, 786]\"\r\nCP-18.zip,1,CP,1782,3586,69,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1721,998,68,4,\"[1000, 997, 998, 999]\"\r\nNormal-1.zip,0,Normal,1676,817,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-14.zip,1,CP,1540,4193,58,3,\"[4191, 4192, 4193]\"\r\nNormal-1.zip,0,Normal,1676,821,72,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNormal-1.zip,0,Normal,1676,819,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-22.zip,2,NCP,82,1297,129,2,\"[1297, 1298]\"\r\nNormal-1.zip,0,Normal,1718,992,66,2,\"[991, 992]\"\r\nCP-7.zip,1,CP,1270,3496,154,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-5.zip,2,NCP,181,1506,139,2,\"[1506, 1507]\"\r\nCP-7.zip,1,CP,1270,3492,137,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-13.zip,2,NCP,357,1865,150,2,\"[1865, 1866]\"\r\nCP-11.zip,1,CP,1443,3959,58,3,\"[3957, 3958, 3959]\"\r\nNormal-1.zip,0,Normal,1669,786,54,5,\"[782, 783, 784, 785, 786]\"\r\nNCP-6.zip,2,NCP,210,1564,131,2,\"[1564, 1565]\"\r\nCP-19.zip,1,CP,1794,3594,38,2,\"[3594, 3595]\"\r\nNCP-15.zip,2,NCP,410,1970,60,2,\"[1969, 1970]\"\r\nNCP-14.zip,2,NCP,379,1910,147,2,\"[1910, 1911]\"\r\nCP-7.zip,1,CP,1270,3491,142,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1721,999,75,4,\"[1000, 997, 998, 999]\"\r\nCP-14.zip,1,CP,1540,4191,221,3,\"[4191, 4192, 4193]\"\r\nCP-12.zip,1,CP,1486,4059,63,2,\"[4059, 4060]\"\r\nCP-14.zip,1,CP,1528,4164,61,2,\"[4163, 4164]\"\r\nNCP-16.zip,2,NCP,449,2052,145,2,\"[2052, 2053]\"\r\nNCP-13.zip,2,NCP,351,1854,61,2,\"[1853, 1854]\"\r\nCP-7.zip,1,CP,1270,3498,247,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-8.zip,1,CP,1330,3698,58,3,\"[3698, 3699, 3700]\"\r\nNCP-2.zip,2,NCP,116,1374,54,2,\"[1373, 1374]\"\r\nNCP-18.zip,2,NCP,514,2185,67,2,\"[2184, 2185]\"\r\nCP-18.zip,1,CP,1782,3587,69,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-8.zip,1,CP,1330,3700,58,3,\"[3698, 3699, 3700]\"\r\nNCP-14.zip,2,NCP,378,1909,69,2,\"[1908, 1909]\"\r\nNCP-1.zip,2,NCP,104,1346,58,2,\"[1345, 1346]\"\r\nCP-14.zip,1,CP,1534,4177,58,2,\"[4176, 4177]\"\r\nCP-7.zip,1,CP,1270,3490,237,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3493,193,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-18.zip,1,CP,1782,3583,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-7.zip,1,CP,1270,3502,21,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-7.zip,1,CP,1270,3499,363,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-5.zip,2,NCP,18,1168,135,2,\"[1168, 1169]\"\r\nCP-18.zip,1,CP,1782,3585,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-5.zip,2,NCP,176,1496,126,2,\"[1496, 1497]\"\r\nCP-18.zip,1,CP,1782,3581,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-10.zip,1,CP,1393,3845,60,2,\"[3845, 3846]\"\r\nNormal-12.zip,0,Normal,2015,470,94,1,[470]\r\nNCP-6.zip,2,NCP,206,1557,58,2,\"[1556, 1557]\"\r\nCP-1.zip,1,CP,1096,3314,196,1,[3314]\r\nNCP-16.zip,2,NCP,43,1220,65,2,\"[1219, 1220]\"\r\nNCP-18.zip,2,NCP,499,2155,58,2,\"[2154, 2155]\"\r\nCP-10.zip,1,CP,1409,3881,66,2,\"[3881, 3882]\"\r\nNormal-4.zip,0,Normal,777,212,83,1,[212]\r\nNCP-9.zip,2,NCP,2708,2701,59,1,[2701]\r\nCP-11.zip,1,CP,1432,3933,60,2,\"[3932, 3933]\"\r\nNCP-4.zip,2,NCP,141,1426,129,2,\"[1426, 1427]\"\r\nCP-23.zip,1,CP,673,3035,76,1,[3035]\r\nNCP-29.zip,2,NCP,879,2414,173,1,[2414]\r\nNCP-19.zip,2,NCP,536,2229,145,2,\"[2229, 2230]\"\r\nNCP-18.zip,2,NCP,504,2165,65,2,\"[2164, 2165]\"\r\nNormal-1.zip,0,Normal,1678,829,34,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-8.zip,2,NCP,264,1674,179,2,\"[1674, 1675]\"\r\nNCP-4.zip,2,NCP,155,1454,139,2,\"[1454, 1455]\"\r\nCP-11.zip,1,CP,1418,3900,180,3,\"[3900, 3901, 3902]\"\r\nNCP-5.zip,2,NCP,194,1532,133,2,\"[1532, 1533]\"\r\nNCP-13.zip,2,NCP,361,1873,143,2,\"[1873, 1874]\"\r\nNormal-1.zip,0,Normal,1710,976,78,2,\"[975, 976]\"\r\nNormal-15.zip,0,Normal,2091,546,106,1,[546]\r\nNCP-19.zip,2,NCP,518,2192,135,2,\"[2192, 2193]\"\r\nNormal-18.zip,0,Normal,2190,645,90,1,[645]\r\nNormal-12.zip,0,Normal,2013,468,87,1,[468]\r\nNCP-11.zip,2,NCP,302,1751,62,2,\"[1750, 1751]\"\r\nNormal-15.zip,0,Normal,2109,564,103,1,[564]\r\nNCP-8.zip,2,NCP,264,1675,75,2,\"[1674, 1675]\"\r\nCP-23.zip,1,CP,653,3015,285,1,[3015]\r\nNCP-7.zip,2,NCP,235,1615,139,2,\"[1615, 1616]\"\r\nCP-19.zip,1,CP,1786,3194,77,3,\"[3192, 3193, 3194]\"\r\nCP-1.zip,1,CP,0,3137,37,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-15.zip,2,NCP,423,1999,133,2,\"[1999, 2000]\"\r\nCP-6.zip,1,CP,1232,3450,91,1,[3450]\r\nCP-14.zip,1,CP,1526,4158,51,3,\"[4157, 4158, 4159]\"\r\nCP-4.zip,1,CP,1184,3402,193,1,[3402]\r\nNCP-17.zip,2,NCP,483,2122,56,2,\"[2121, 2122]\"\r\nCP-12.zip,1,CP,1459,3996,69,3,\"[3995, 3996, 3997]\"\r\nCP-17.zip,1,CP,1637,4325,20,1,[4325]\r\nCP-10.zip,1,CP,1411,3885,66,2,\"[3885, 3886]\"\r\nNCP-9.zip,2,NCP,2707,2673,44,1,[2673]\r\nNCP-29.zip,2,NCP,892,2431,20,1,[2431]\r\nCP-26.zip,1,CP,3720,5653,243,2,\"[5652, 5653]\"\r\nNormal-13.zip,0,Normal,2023,478,96,1,[478]\r\nCP-11.zip,1,CP,1439,3947,62,2,\"[3946, 3947]\"\r\nNormal-6.zip,0,Normal,1801,256,89,1,[256]\r\nNCP-16.zip,2,NCP,442,2038,131,2,\"[2038, 2039]\"\r\nNormal-9.zip,0,Normal,1920,375,100,1,[375]\r\nCP-13.zip,1,CP,1489,4067,457,4,\"[4067, 4068, 4069, 4070]\"\r\nCP-9.zip,1,CP,1378,3811,50,2,\"[3810, 3811]\"\r\nNCP-12.zip,2,NCP,336,1821,50,2,\"[1820, 1821]\"\r\nNCP-3.zip,2,NCP,1295,2736,61,1,[2736]\r\nNormal-20.zip,0,Normal,2268,723,85,1,[723]\r\nNormal-20.zip,0,Normal,2281,736,84,1,[736]\r\nCP-1.zip,1,CP,1083,3128,71,2,\"[3128, 3129]\"\r\nCP-14.zip,1,CP,1545,4207,65,2,\"[4206, 4207]\"\r\nNormal-21.zip,0,Normal,2306,761,103,1,[761]\r\nNCP-13.zip,2,NCP,350,1852,47,2,\"[1851, 1852]\"\r\nCP-8.zip,1,CP,1326,3688,53,2,\"[3688, 3689]\"\r\nNCP-7.zip,2,NCP,236,1617,283,2,\"[1617, 1618]\"\r\nNormal-1.zip,0,Normal,1722,1001,73,2,\"[1001, 1002]\"\r\nNCP-5.zip,2,NCP,177,1498,139,2,\"[1498, 1499]\"\r\nCP-20.zip,1,CP,2668,3259,52,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-1.zip,0,Normal,1708,971,74,2,\"[971, 972]\"\r\nNCP-8.zip,2,NCP,2680,2651,46,1,[2651]\r\nCP-11.zip,1,CP,1436,3940,45,2,\"[3940, 3941]\"\r\nNCP-20.zip,2,NCP,570,2298,139,2,\"[2298, 2299]\"\r\nNormal-1.zip,0,Normal,1723,1004,77,2,\"[1003, 1004]\"\r\nNCP-10.zip,2,NCP,2723,2679,40,1,[2679]\r\nNCP-13.zip,2,NCP,364,1880,56,2,\"[1879, 1880]\"\r\nNormal-21.zip,0,Normal,2302,757,96,1,[757]\r\nNormal-18.zip,0,Normal,2199,654,85,1,[654]\r\nCP-9.zip,1,CP,1369,3790,67,2,\"[3790, 3791]\"\r\nNormal-25.zip,0,Normal,3858,5370,234,1,[5370]\r\nNormal-21.zip,0,Normal,2286,741,84,1,[741]\r\nNCP-21.zip,2,NCP,65,1263,128,2,\"[1263, 1264]\"\r\nCP-23.zip,1,CP,661,3023,116,1,[3023]\r\nCP-30.zip,1,CP,3937,5643,66,2,\"[5643, 5644]\"\r\nCP-25.zip,1,CP,8,3514,36,2,\"[3513, 3514]\"\r\nNormal-1.zip,0,Normal,1720,995,74,2,\"[995, 996]\"\r\nNCP-15.zip,2,NCP,421,1996,67,2,\"[1995, 1996]\"\r\nCP-25.zip,1,CP,738,3100,110,1,[3100]\r\nNCP-11.zip,2,NCP,304,1755,67,2,\"[1754, 1755]\"\r\nNCP-22.zip,2,NCP,834,2348,226,2,\"[2347, 2348]\"\r\nNormal-3.zip,0,Normal,769,204,138,1,[204]\r\nNormal-1.zip,0,Normal,1680,840,66,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-13.zip,1,CP,1519,4141,68,2,\"[4141, 4142]\"\r\nNCP-12.zip,2,NCP,315,1777,107,2,\"[1777, 1778]\"\r\nNormal-2.zip,0,Normal,1753,1088,66,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nCP-8.zip,1,CP,1341,3722,57,1,[3722]\r\nCP-13.zip,1,CP,1491,4075,48,3,\"[4074, 4075, 4076]\"\r\nNCP-9.zip,2,NCP,2687,2654,51,1,[2654]\r\nCP-28.zip,1,CP,3785,5729,28,1,[5729]\r\nNCP-6.zip,2,NCP,212,1568,165,2,\"[1568, 1569]\"\r\nCP-12.zip,1,CP,1477,4035,54,2,\"[4035, 4036]\"\r\nCP-16.zip,1,CP,1605,4293,23,1,[4293]\r\nNCP-29.zip,2,NCP,926,2468,24,1,[2468]\r\nCP-10.zip,1,CP,1394,3847,62,2,\"[3847, 3848]\"\r\nNCP-21.zip,2,NCP,580,2318,58,2,\"[2317, 2318]\"\r\nNCP-19.zip,2,NCP,526,2208,137,2,\"[2208, 2209]\"\r\nCP-13.zip,1,CP,1494,4085,65,3,\"[4083, 4084, 4085]\"\r\nNormal-27.zip,0,Normal,3895,5421,71,4,\"[5418, 5419, 5420, 5421]\"\r\nNCP-8.zip,2,NCP,267,1680,129,2,\"[1680, 1681]\"\r\nNormal-16.zip,0,Normal,2124,579,101,1,[579]\r\nNCP-18.zip,2,NCP,49,1232,61,2,\"[1231, 1232]\"\r\nCP-21.zip,1,CP,589,2951,300,1,[2951]\r\nCP-25.zip,1,CP,8,3513,42,2,\"[3513, 3514]\"\r\nNCP-6.zip,2,NCP,206,1556,139,2,\"[1556, 1557]\"\r\nCP-27.zip,1,CP,3765,5709,20,1,[5709]\r\nNCP-4.zip,2,NCP,147,1438,173,2,\"[1438, 1439]\"\r\nNormal-20.zip,0,Normal,2256,711,86,1,[711]\r\nNormal-27.zip,0,Normal,3904,5436,82,1,[5436]\r\nNCP-14.zip,2,NCP,384,1921,54,2,\"[1920, 1921]\"\r\nCP-18.zip,1,CP,1780,3560,69,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-14.zip,1,CP,1522,4148,61,2,\"[4148, 4149]\"\r\nNCP-8.zip,2,NCP,256,1658,139,2,\"[1658, 1659]\"\r\nCP-10.zip,1,CP,1406,3874,60,2,\"[3874, 3875]\"\r\nCP-4.zip,1,CP,1177,3395,210,1,[3395]\r\nNormal-1.zip,0,Normal,1673,804,291,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNCP-2.zip,2,NCP,122,1385,149,2,\"[1385, 1386]\"\r\nCP-9.zip,1,CP,1354,3752,46,3,\"[3751, 3752, 3753]\"\r\nNCP-23.zip,2,NCP,922,2464,240,1,[2464]\r\nCP-20.zip,1,CP,2668,3251,58,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-6.zip,0,Normal,1796,251,96,1,[251]\r\nCP-9.zip,1,CP,1374,3803,50,2,\"[3802, 3803]\"\r\nNCP-7.zip,2,NCP,237,1620,61,2,\"[1619, 1620]\"\r\nNCP-13.zip,2,NCP,363,1878,58,2,\"[1877, 1878]\"\r\nCP-1.zip,1,CP,1084,3130,67,1,[3130]\r\nNormal-14.zip,0,Normal,2082,537,78,1,[537]\r\nCP-18.zip,1,CP,1656,4344,26,1,[4344]\r\nNCP-18.zip,2,NCP,491,2138,149,2,\"[2138, 2139]\"\r\nCP-22.zip,1,CP,609,2971,76,1,[2971]\r\nNormal-18.zip,0,Normal,2198,653,88,1,[653]\r\nNCP-6.zip,2,NCP,212,1569,69,2,\"[1568, 1569]\"\r\nCP-21.zip,1,CP,607,2969,178,1,[2969]\r\nNCP-9.zip,2,NCP,269,1685,64,2,\"[1684, 1685]\"\r\nCP-9.zip,1,CP,1364,3777,56,3,\"[3776, 3777, 3778]\"\r\nCP-17.zip,1,CP,1622,4310,27,1,[4310]\r\nCP-16.zip,1,CP,1601,4289,19,1,[4289]\r\nCP-10.zip,1,CP,1388,3832,51,2,\"[3831, 3832]\"\r\nNormal-27.zip,0,Normal,3908,5442,56,1,[5442]\r\nCP-25.zip,1,CP,732,3094,159,1,[3094]\r\nNCP-14.zip,2,NCP,40,1212,149,2,\"[1212, 1213]\"\r\nNCP-21.zip,2,NCP,65,1264,54,2,\"[1263, 1264]\"\r\nCP-12.zip,1,CP,1477,4036,54,2,\"[4035, 4036]\"\r\nNormal-10.zip,0,Normal,1953,408,94,1,[408]\r\nCP-15.zip,1,CP,1577,4265,22,1,[4265]\r\nNormal-14.zip,0,Normal,2055,510,91,1,[510]\r\nNormal-17.zip,0,Normal,2154,609,94,1,[609]\r\nNormal-27.zip,0,Normal,3895,5418,61,4,\"[5418, 5419, 5420, 5421]\"\r\nNormal-19.zip,0,Normal,2227,682,73,1,[682]\r\nNormal-11.zip,0,Normal,1975,430,101,1,[430]\r\nCP-15.zip,1,CP,1584,4272,20,1,[4272]\r\nNormal-20.zip,0,Normal,2262,717,84,1,[717]\r\nCP-14.zip,1,CP,1543,4200,190,3,\"[4200, 4201, 4202]\"\r\nNormal-3.zip,0,Normal,753,188,300,1,[188]\r\nCP-12.zip,1,CP,1475,4032,50,2,\"[4031, 4032]\"\r\nNCP-16.zip,2,NCP,458,2071,55,2,\"[2070, 2071]\"\r\nNCP-5.zip,2,NCP,180,1504,136,2,\"[1504, 1505]\"\r\nCP-30.zip,1,CP,3938,5645,94,1,[5645]\r\nCP-9.zip,1,CP,1364,3778,56,3,\"[3776, 3777, 3778]\"\r\nNormal-23.zip,0,Normal,2632,142,39,1,[142]\r\nNormal-5.zip,0,Normal,810,245,324,1,[245]\r\nNCP-5.zip,2,NCP,174,1493,56,2,\"[1492, 1493]\"\r\nCP-17.zip,1,CP,1632,4320,23,1,[4320]\r\nNCP-2.zip,2,NCP,112,1366,56,2,\"[1365, 1366]\"\r\nCP-10.zip,1,CP,1411,3886,66,2,\"[3885, 3886]\"\r\nCP-18.zip,1,CP,1780,3554,67,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-20.zip,1,CP,2668,3252,51,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-6.zip,0,Normal,1820,275,83,1,[275]\r\nNormal-1.zip,0,Normal,1673,809,57,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNormal-18.zip,0,Normal,2204,659,94,1,[659]\r\nCP-14.zip,1,CP,1531,4169,59,2,\"[4169, 4170]\"\r\nCP-12.zip,1,CP,1474,4030,62,2,\"[4029, 4030]\"\r\nNormal-18.zip,0,Normal,2215,670,80,1,[670]\r\nNCP-21.zip,2,NCP,579,2315,150,2,\"[2315, 2316]\"\r\nNCP-28.zip,2,NCP,854,2374,265,1,[2374]\r\nNormal-25.zip,0,Normal,3838,5350,201,1,[5350]\r\nCP-9.zip,1,CP,1352,3747,61,1,[3747]\r\nNormal-1.zip,0,Normal,1719,994,76,2,\"[993, 994]\"\r\nCP-11.zip,1,CP,1418,3901,54,3,\"[3900, 3901, 3902]\"\r\nNCP-28.zip,2,NCP,852,2372,47,2,\"[2371, 2372]\"\r\nNormal-19.zip,0,Normal,2225,680,94,1,[680]\r\nNormal-16.zip,0,Normal,2148,603,86,1,[603]\r\nNCP-19.zip,2,NCP,544,2245,147,2,\"[2245, 2246]\"\r\nCP-29.zip,1,CP,3826,5770,26,1,[5770]\r\nNCP-7.zip,2,NCP,229,1602,156,2,\"[1602, 1603]\"\r\nNormal-1.zip,0,Normal,1673,807,283,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNormal-6.zip,0,Normal,1823,278,85,1,[278]\r\nNCP-27.zip,2,NCP,824,2335,259,1,[2335]\r\nCP-18.zip,1,CP,1776,3535,64,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-18.zip,2,NCP,513,2183,68,2,\"[2182, 2183]\"\r\nCP-30.zip,1,CP,3934,5639,77,3,\"[5638, 5639, 5640]\"\r\nCP-4.zip,1,CP,1168,3386,203,1,[3386]\r\nNCP-12.zip,2,NCP,323,1794,116,2,\"[1794, 1795]\"\r\nCP-8.zip,1,CP,1340,3720,64,2,\"[3720, 3721]\"\r\nCP-5.zip,1,CP,1223,3441,232,1,[3441]\r\nNCP-4.zip,2,NCP,166,1477,58,2,\"[1476, 1477]\"\r\nNCP-6.zip,2,NCP,219,1583,65,2,\"[1582, 1583]\"\r\nNCP-4.zip,2,NCP,155,1455,58,2,\"[1454, 1455]\"\r\nNCP-1.zip,2,NCP,101,1340,57,2,\"[1339, 1340]\"\r\nNCP-11.zip,2,NCP,298,1742,145,2,\"[1742, 1743]\"\r\nNormal-1.zip,0,Normal,1684,874,71,5,\"[870, 871, 873, 874, 875]\"\r\nCP-14.zip,1,CP,1554,4227,41,2,\"[4226, 4227]\"\r\nNCP-18.zip,2,NCP,489,2134,139,2,\"[2134, 2135]\"\r\nNormal-23.zip,0,Normal,2615,125,36,1,[125]\r\nNCP-8.zip,2,NCP,2674,2693,45,1,[2693]\r\nNCP-6.zip,2,NCP,226,1596,142,2,\"[1596, 1597]\"\r\nNCP-10.zip,2,NCP,274,1695,67,2,\"[1694, 1695]\"\r\nNormal-10.zip,0,Normal,1944,399,97,1,[399]\r\nCP-6.zip,1,CP,1236,3454,159,1,[3454]\r\nCP-20.zip,1,CP,2668,3257,53,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-23.zip,1,CP,670,3032,78,1,[3032]\r\nNCP-20.zip,2,NCP,548,2253,144,2,\"[2253, 2254]\"\r\nCP-18.zip,1,CP,1769,3516,23,1,[3516]\r\nNormal-3.zip,0,Normal,754,189,308,1,[189]\r\nNCP-7.zip,2,NCP,239,1623,146,2,\"[1623, 1624]\"\r\nNCP-14.zip,2,NCP,392,1935,58,2,\"[1934, 1935]\"\r\nNormal-6.zip,0,Normal,1824,279,86,1,[279]\r\nNormal-2.zip,0,Normal,1753,1087,77,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-30.zip,2,NCP,997,2554,49,2,\"[2553, 2554]\"\r\nCP-26.zip,1,CP,3727,5663,42,1,[5663]\r\nCP-11.zip,1,CP,1433,3934,62,2,\"[3934, 3935]\"\r\nNormal-18.zip,0,Normal,2187,642,92,1,[642]\r\nNCP-2.zip,2,NCP,112,1365,133,2,\"[1365, 1366]\"\r\nNCP-6.zip,2,NCP,219,1582,156,2,\"[1582, 1583]\"\r\nNormal-10.zip,0,Normal,1939,394,93,1,[394]\r\nCP-18.zip,1,CP,1775,3532,57,4,\"[3530, 3531, 3532, 3533]\"\r\nCP-2.zip,1,CP,11,3165,268,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNormal-12.zip,0,Normal,2012,467,102,1,[467]\r\nCP-21.zip,1,CP,587,2949,151,1,[2949]\r\nNormal-15.zip,0,Normal,2116,571,92,1,[571]\r\nCP-1.zip,1,CP,10,3156,289,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNormal-27.zip,0,Normal,3895,5419,61,4,\"[5418, 5419, 5420, 5421]\"\r\nNormal-25.zip,0,Normal,3854,5366,197,1,[5366]\r\nNormal-4.zip,0,Normal,771,206,306,1,[206]\r\nNCP-3.zip,2,NCP,129,1403,132,2,\"[1403, 1404]\"\r\nNormal-13.zip,0,Normal,2042,497,90,1,[497]\r\nNormal-2.zip,0,Normal,1753,1090,296,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-17.zip,2,NCP,478,2111,145,2,\"[2111, 2112]\"\r\nNormal-17.zip,0,Normal,2171,626,92,1,[626]\r\nCP-10.zip,1,CP,1410,3884,51,2,\"[3883, 3884]\"\r\nCP-3.zip,1,CP,1140,3358,370,1,[3358]\r\nNCP-22.zip,2,NCP,885,2422,52,2,\"[2422, 2423]\"\r\nNCP-27.zip,2,NCP,1050,2624,428,2,\"[2623, 2624]\"\r\nNCP-17.zip,2,NCP,478,2112,61,2,\"[2111, 2112]\"\r\nCP-20.zip,1,CP,2668,3254,47,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-16.zip,2,NCP,433,2019,120,2,\"[2019, 2020]\"\r\nNCP-19.zip,2,NCP,517,2191,58,2,\"[2190, 2191]\"\r\nNormal-24.zip,0,Normal,2657,167,27,1,[167]\r\nCP-8.zip,1,CP,1339,3718,59,2,\"[3718, 3719]\"\r\nNCP-17.zip,2,NCP,482,2119,139,2,\"[2119, 2120]\"\r\nCP-17.zip,1,CP,1635,4323,27,1,[4323]\r\nNormal-10.zip,0,Normal,1930,385,98,1,[385]\r\nNormal-1.zip,0,Normal,1679,837,70,6,\"[833, 834, 835, 836, 837, 838]\"\r\nNCP-19.zip,2,NCP,536,2230,61,2,\"[2229, 2230]\"\r\nNCP-25.zip,2,NCP,3942,5539,37,1,[5539]\r\nNormal-17.zip,0,Normal,2180,635,95,1,[635]\r\nNormal-1.zip,0,Normal,1680,839,66,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNormal-1.zip,0,Normal,1705,965,69,2,\"[965, 966]\"\r\nNCP-5.zip,2,NCP,174,1492,134,2,\"[1492, 1493]\"\r\nNCP-14.zip,2,NCP,386,1923,62,1,[1923]\r\nCP-22.zip,1,CP,625,2987,100,1,[2987]\r\nCP-20.zip,1,CP,2450,2929,90,2,\"[2928, 2929]\"\r\nNormal-10.zip,0,Normal,1949,404,92,1,[404]\r\nCP-14.zip,1,CP,1546,4208,58,2,\"[4208, 4209]\"\r\nNCP-21.zip,2,NCP,63,1260,58,2,\"[1259, 1260]\"\r\nNormal-23.zip,0,Normal,2624,134,38,1,[134]\r\nNCP-10.zip,2,NCP,272,1690,153,2,\"[1690, 1691]\"\r\nCP-5.zip,1,CP,1209,3427,313,1,[3427]\r\nNCP-11.zip,2,NCP,293,1731,122,2,\"[1731, 1732]\"\r\nCP-9.zip,1,CP,1383,3822,71,2,\"[3821, 3822]\"\r\nNormal-4.zip,0,Normal,793,228,94,1,[228]\r\nNCP-2.zip,2,NCP,1057,2633,570,1,[2633]\r\nNormal-1.zip,0,Normal,1679,835,67,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-4.zip,1,CP,1185,3403,131,1,[3403]\r\nCP-11.zip,1,CP,1446,3965,63,2,\"[3965, 3966]\"\r\nCP-15.zip,1,CP,1576,4264,23,1,[4264]\r\nCP-12.zip,1,CP,1487,4062,68,3,\"[4061, 4062, 4063]\"\r\nCP-9.zip,1,CP,1381,3817,66,3,\"[3815, 3816, 3817]\"\r\nCP-28.zip,1,CP,3767,5711,17,1,[5711]\r\nNormal-23.zip,0,Normal,2610,120,41,1,[120]\r\nCP-10.zip,1,CP,1394,3848,62,2,\"[3847, 3848]\"\r\nNCP-4.zip,2,NCP,160,1465,61,2,\"[1464, 1465]\"\r\nCP-14.zip,1,CP,1543,4201,57,3,\"[4200, 4201, 4202]\"\r\nCP-23.zip,1,CP,652,3014,277,1,[3014]\r\nCP-16.zip,1,CP,1607,4295,17,1,[4295]\r\nNormal-18.zip,0,Normal,2213,668,84,1,[668]\r\nNormal-16.zip,0,Normal,2121,576,87,1,[576]\r\nNormal-23.zip,0,Normal,2627,137,41,1,[137]\r\nNCP-21.zip,2,NCP,582,2322,54,2,\"[2321, 2322]\"\r\nCP-19.zip,1,CP,2431,2893,361,1,[2893]\r\nNormal-1.zip,0,Normal,1717,989,67,2,\"[989, 990]\"\r\nCP-10.zip,1,CP,1385,3825,64,2,\"[3825, 3826]\"\r\nCP-5.zip,1,CP,1198,3416,162,1,[3416]\r\nNCP-21.zip,2,NCP,578,2314,55,2,\"[2313, 2314]\"\r\nNCP-20.zip,2,NCP,56,1246,68,2,\"[1245, 1246]\"\r\nNCP-19.zip,2,NCP,532,2222,139,2,\"[2222, 2223]\"\r\nNormal-21.zip,0,Normal,2283,738,87,1,[738]\r\nNormal-19.zip,0,Normal,2222,677,78,1,[677]\r\nCP-9.zip,1,CP,1361,3770,50,2,\"[3770, 3771]\"\r\nNCP-15.zip,2,NCP,420,1993,177,2,\"[1993, 1994]\"\r\nCP-18.zip,1,CP,1776,3538,76,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNormal-1.zip,0,Normal,1706,968,64,2,\"[967, 968]\"\r\nCP-20.zip,1,CP,2668,3253,51,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-5.zip,2,NCP,171,1486,143,2,\"[1486, 1487]\"\r\nNormal-3.zip,0,Normal,750,185,281,1,[185]\r\nCP-18.zip,1,CP,1780,3565,80,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-13.zip,2,NCP,362,1876,63,2,\"[1875, 1876]\"\r\nCP-6.zip,1,CP,1234,3452,191,1,[3452]\r\nNormal-1.zip,0,Normal,1684,873,133,5,\"[870, 871, 873, 874, 875]\"\r\nNormal-6.zip,0,Normal,1812,267,99,1,[267]\r\nNCP-17.zip,2,NCP,474,2103,114,2,\"[2103, 2104]\"\r\nNormal-7.zip,0,Normal,1857,312,80,1,[312]\r\nNormal-12.zip,0,Normal,1992,447,104,1,[447]\r\nCP-18.zip,1,CP,1664,4352,20,1,[4352]\r\nNormal-27.zip,0,Normal,3895,5420,71,4,\"[5418, 5419, 5420, 5421]\"\r\nNCP-19.zip,2,NCP,517,2190,139,2,\"[2190, 2191]\"\r\nNormal-23.zip,0,Normal,2625,135,39,1,[135]\r\nNormal-5.zip,0,Normal,811,246,124,1,[246]\r\nCP-4.zip,1,CP,1162,3380,212,1,[3380]\r\nCP-22.zip,1,CP,611,2973,76,1,[2973]\r\nCP-9.zip,1,CP,1381,3815,261,3,\"[3815, 3816, 3817]\"\r\nCP-9.zip,1,CP,1371,3794,200,3,\"[3794, 3795, 3796]\"\r\nNCP-16.zip,2,NCP,432,2017,128,2,\"[2017, 2018]\"\r\nNormal-20.zip,0,Normal,2278,733,90,1,[733]\r\nNormal-19.zip,0,Normal,2240,695,78,1,[695]\r\nCP-28.zip,1,CP,3786,5730,29,1,[5730]\r\nNormal-15.zip,0,Normal,2097,552,89,1,[552]\r\nNCP-18.zip,2,NCP,500,2156,162,2,\"[2156, 2157]\"\r\nCP-9.zip,1,CP,1374,3802,50,2,\"[3802, 3803]\"\r\nNormal-23.zip,0,Normal,2606,116,33,1,[116]\r\nCP-26.zip,1,CP,3651,5550,395,1,[5550]\r\nNormal-9.zip,0,Normal,1912,367,92,1,[367]\r\nNCP-25.zip,2,NCP,3953,5466,44,1,[5466]\r\nCP-25.zip,1,CP,724,3086,100,1,[3086]\r\nNormal-21.zip,0,Normal,2292,747,82,1,[747]\r\nCP-7.zip,1,CP,1262,3480,384,1,[3480]\r\nCP-13.zip,1,CP,1489,4068,229,4,\"[4067, 4068, 4069, 4070]\"\r\nNormal-10.zip,0,Normal,1931,386,80,1,[386]\r\nNCP-20.zip,2,NCP,563,2284,141,2,\"[2284, 2285]\"\r\nCP-2.zip,1,CP,1123,3341,213,1,[3341]\r\nNCP-17.zip,2,NCP,486,2127,153,2,\"[2127, 2128]\"\r\nCP-26.zip,1,CP,3733,5673,32,3,\"[5673, 5674, 5675]\"\r\nCP-3.zip,1,CP,1152,3370,69,1,[3370]\r\nNCP-28.zip,2,NCP,838,2353,89,1,[2353]\r\nNormal-1.zip,0,Normal,1717,990,67,2,\"[989, 990]\"\r\nNCP-30.zip,2,NCP,997,2553,54,2,\"[2553, 2554]\"\r\nNCP-17.zip,2,NCP,48,1230,61,2,\"[1229, 1230]\"\r\nNCP-17.zip,2,NCP,467,2089,138,2,\"[2089, 2090]\"\r\nNCP-20.zip,2,NCP,564,2286,143,2,\"[2286, 2287]\"\r\nNormal-1.zip,0,Normal,1722,1002,73,2,\"[1001, 1002]\"\r\nNormal-7.zip,0,Normal,1854,309,82,1,[309]\r\nNormal-2.zip,0,Normal,1747,1065,60,1,[1065]\r\nNCP-19.zip,2,NCP,535,2228,47,2,\"[2227, 2228]\"\r\nNCP-26.zip,2,NCP,3974,5508,52,1,[5508]\r\nCP-14.zip,1,CP,1526,4157,124,3,\"[4157, 4158, 4159]\"\r\nNormal-7.zip,0,Normal,1829,284,92,1,[284]\r\nNormal-1.zip,0,Normal,1673,808,57,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNCP-2.zip,2,NCP,1271,2712,56,1,[2712]\r\nCP-30.zip,1,CP,3934,5638,59,3,\"[5638, 5639, 5640]\"\r\nNCP-26.zip,2,NCP,3979,5486,52,1,[5486]\r\nNCP-20.zip,2,NCP,554,2265,128,2,\"[2265, 2266]\"\r\nNCP-6.zip,2,NCP,221,1587,53,2,\"[1586, 1587]\"\r\nNCP-20.zip,2,NCP,558,2273,119,2,\"[2273, 2274]\"\r\nCP-8.zip,1,CP,1321,3678,58,2,\"[3678, 3679]\"\r\nNCP-6.zip,2,NCP,226,1597,60,2,\"[1596, 1597]\"\r\nNCP-21.zip,2,NCP,76,1286,51,2,\"[1285, 1286]\"\r\nNCP-1.zip,2,NCP,1042,2613,143,2,\"[2613, 2614]\"\r\nNCP-13.zip,2,NCP,366,1884,67,2,\"[1883, 1884]\"\r\nNCP-18.zip,2,NCP,490,2136,147,2,\"[2136, 2137]\"\r\nNCP-28.zip,2,NCP,856,2376,227,2,\"[2376, 2377]\"\r\nCP-19.zip,1,CP,2445,2920,283,2,\"[2920, 2921]\"\r\nNormal-1.zip,0,Normal,1673,806,59,6,\"[804, 805, 806, 807, 808, 809]\"\r\nCP-25.zip,1,CP,9,3151,72,4,\"[3148, 3149, 3150, 3151]\"\r\nNormal-25.zip,0,Normal,3847,5359,219,1,[5359]\r\nNormal-12.zip,0,Normal,2005,460,77,1,[460]\r\nCP-30.zip,1,CP,3936,5642,59,1,[5642]\r\nNCP-12.zip,2,NCP,326,1800,117,2,\"[1800, 1801]\"\r\nNormal-13.zip,0,Normal,2045,500,85,1,[500]\r\nCP-15.zip,1,CP,1583,4271,18,1,[4271]\r\nNormal-20.zip,0,Normal,2261,716,83,1,[716]\r\nNormal-20.zip,0,Normal,2276,731,91,1,[731]\r\nCP-18.zip,1,CP,1776,3536,75,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-27.zip,2,NCP,1034,2605,19,1,[2605]\r\nNCP-16.zip,2,NCP,445,2044,139,2,\"[2044, 2045]\"\r\nCP-12.zip,1,CP,1461,4001,53,2,\"[4000, 4001]\"\r\nCP-12.zip,1,CP,1485,4056,114,3,\"[4056, 4057, 4058]\"\r\nNCP-7.zip,2,NCP,231,1606,139,2,\"[1606, 1607]\"\r\nNCP-13.zip,2,NCP,343,1838,55,2,\"[1837, 1838]\"\r\nNCP-6.zip,2,NCP,202,1548,161,2,\"[1548, 1549]\"\r\nNormal-17.zip,0,Normal,2160,615,96,1,[615]\r\nCP-28.zip,1,CP,3780,5724,27,1,[5724]\r\nCP-9.zip,1,CP,1354,3753,46,3,\"[3751, 3752, 3753]\"\r\nCP-16.zip,1,CP,1598,4286,23,1,[4286]\r\nCP-19.zip,1,CP,2445,2921,119,2,\"[2920, 2921]\"\r\nCP-9.zip,1,CP,1361,3771,50,2,\"[3770, 3771]\"\r\nNCP-15.zip,2,NCP,412,1974,54,2,\"[1973, 1974]\"\r\nNormal-8.zip,0,Normal,1861,316,76,1,[316]\r\nNormal-3.zip,0,Normal,1766,1150,57,3,\"[1149, 1150, 1151]\"\r\nNormal-17.zip,0,Normal,2182,637,96,1,[637]\r\nNormal-7.zip,0,Normal,1833,288,102,1,[288]\r\nNormal-9.zip,0,Normal,1894,349,99,1,[349]\r\nNormal-22.zip,0,Normal,2319,774,101,1,[774]\r\nNormal-1.zip,0,Normal,1680,844,64,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-24.zip,1,CP,679,3041,94,1,[3041]\r\nCP-13.zip,1,CP,1489,4069,58,4,\"[4067, 4068, 4069, 4070]\"\r\nCP-30.zip,1,CP,3832,5776,23,1,[5776]\r\nCP-25.zip,1,CP,720,3082,84,1,[3082]\r\nNormal-19.zip,0,Normal,2235,690,89,1,[690]\r\nCP-11.zip,1,CP,1429,3927,52,2,\"[3926, 3927]\"\r\nNormal-7.zip,0,Normal,1835,290,83,1,[290]\r\nNCP-7.zip,2,NCP,239,1624,61,2,\"[1623, 1624]\"\r\nNormal-27.zip,0,Normal,3899,5430,76,2,\"[5429, 5430]\"\r\nCP-4.zip,1,CP,1165,3383,151,1,[3383]\r\nNCP-3.zip,2,NCP,1297,2738,56,1,[2738]\r\nCP-1.zip,1,CP,0,3134,37,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-22.zip,2,NCP,832,2345,25,1,[2345]\r\nNCP-25.zip,2,NCP,3952,5505,46,1,[5505]\r\nNCP-26.zip,2,NCP,3977,5509,56,1,[5509]\r\nCP-16.zip,1,CP,1609,4297,20,1,[4297]\r\nNormal-21.zip,0,Normal,2294,749,103,1,[749]\r\nNCP-25.zip,2,NCP,3967,5507,46,1,[5507]\r\nCP-13.zip,1,CP,1495,4089,48,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-7.zip,1,CP,1317,3672,58,3,\"[3670, 3671, 3672]\"\r\nNormal-26.zip,0,Normal,3877,5389,25,1,[5389]\r\nCP-20.zip,1,CP,2766,3297,41,1,[3297]\r\nCP-18.zip,1,CP,1661,4349,32,1,[4349]\r\nNCP-19.zip,2,NCP,535,2227,112,2,\"[2227, 2228]\"\r\nCP-2.zip,1,CP,1120,3338,159,1,[3338]\r\nNCP-2.zip,2,NCP,118,1377,142,2,\"[1377, 1378]\"\r\nNormal-7.zip,0,Normal,1843,298,96,1,[298]\r\nNCP-15.zip,2,NCP,400,1950,155,1,[1950]\r\nNCP-25.zip,2,NCP,3704,5531,60,1,[5531]\r\nNormal-15.zip,0,Normal,2095,550,99,1,[550]\r\nNormal-1.zip,0,Normal,1684,870,68,5,\"[870, 871, 873, 874, 875]\"\r\nNCP-16.zip,2,NCP,44,1222,52,2,\"[1221, 1222]\"\r\nNCP-11.zip,2,NCP,31,1194,137,2,\"[1194, 1195]\"\r\nNCP-15.zip,2,NCP,409,1968,64,2,\"[1967, 1968]\"\r\nNCP-16.zip,2,NCP,451,2057,48,3,\"[2056, 2057, 2058]\"\r\nNormal-2.zip,0,Normal,1753,1086,77,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-8.zip,2,NCP,262,1670,139,2,\"[1670, 1671]\"\r\nNormal-10.zip,0,Normal,1955,410,93,1,[410]\r\nNormal-6.zip,0,Normal,1826,281,104,1,[281]\r\nNCP-28.zip,2,NCP,852,2371,47,2,\"[2371, 2372]\"\r\nNCP-27.zip,2,NCP,1000,2558,39,1,[2558]\r\nCP-1.zip,1,CP,1072,3115,52,1,[3115]\r\nNCP-5.zip,2,NCP,177,1499,58,2,\"[1498, 1499]\"\r\nNormal-13.zip,0,Normal,2052,507,71,1,[507]\r\nCP-7.zip,1,CP,1314,3663,30,2,\"[3663, 3664]\"\r\nNCP-13.zip,2,NCP,350,1851,109,2,\"[1851, 1852]\"\r\nNCP-21.zip,2,NCP,67,1267,70,2,\"[1266, 1267]\"\r\nNCP-3.zip,2,NCP,132,1409,117,1,[1409]\r\nNormal-18.zip,0,Normal,2205,660,91,1,[660]\r\nNormal-1.zip,0,Normal,1678,827,58,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNormal-14.zip,0,Normal,2054,509,88,1,[509]\r\nNormal-5.zip,0,Normal,809,244,114,1,[244]\r\nCP-1.zip,1,CP,1083,3129,71,2,\"[3128, 3129]\"\r\nNCP-27.zip,2,NCP,1029,2599,39,1,[2599]\r\nNCP-26.zip,2,NCP,3972,5481,58,1,[5481]\r\nNormal-13.zip,0,Normal,2026,481,85,1,[481]\r\nNCP-17.zip,2,NCP,47,1227,139,2,\"[1227, 1228]\"\r\nCP-27.zip,1,CP,3763,5707,20,1,[5707]\r\nNormal-6.zip,0,Normal,1798,253,93,1,[253]\r\nNCP-9.zip,2,NCP,2703,2669,41,1,[2669]\r\nCP-1.zip,1,CP,1071,3113,57,2,\"[3113, 3114]\"\r\nNCP-16.zip,2,NCP,430,2014,64,2,\"[2013, 2014]\"\r\nNCP-4.zip,2,NCP,144,1432,139,2,\"[1432, 1433]\"\r\nNormal-4.zip,0,Normal,780,215,116,1,[215]\r\nNormal-12.zip,0,Normal,2020,475,88,1,[475]\r\nNCP-13.zip,2,NCP,366,1883,161,2,\"[1883, 1884]\"\r\nNormal-2.zip,0,Normal,1761,1127,18,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-29.zip,2,NCP,899,2441,42,2,\"[2440, 2441]\"\r\nCP-16.zip,1,CP,1612,4300,26,1,[4300]\r\nNCP-15.zip,2,NCP,412,1973,129,2,\"[1973, 1974]\"\r\nNCP-10.zip,2,NCP,2717,2710,42,1,[2710]\r\nCP-19.zip,1,CP,1792,3214,71,2,\"[3214, 3215]\"\r\nNormal-20.zip,0,Normal,2269,724,113,1,[724]\r\nCP-11.zip,1,CP,1451,3976,51,2,\"[3975, 3976]\"\r\nNormal-11.zip,0,Normal,1978,433,94,1,[433]\r\nNCP-3.zip,2,NCP,1282,2723,70,1,[2723]\r\nCP-23.zip,1,CP,654,3016,74,1,[3016]\r\nNCP-13.zip,2,NCP,345,1842,62,2,\"[1841, 1842]\"\r\nCP-22.zip,1,CP,610,2972,70,1,[2972]\r\nCP-29.zip,1,CP,3799,5743,23,1,[5743]\r\nCP-1.zip,1,CP,0,3140,269,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-18.zip,2,NCP,506,2168,124,2,\"[2168, 2169]\"\r\nNormal-19.zip,0,Normal,2218,673,84,1,[673]\r\nNCP-7.zip,2,NCP,243,1632,31,3,\"[1631, 1632, 1633]\"\r\nNCP-25.zip,2,NCP,3948,5504,50,1,[5504]\r\nCP-7.zip,1,CP,1312,3658,65,2,\"[3658, 3659]\"\r\nNCP-16.zip,2,NCP,451,2058,23,3,\"[2056, 2057, 2058]\"\r\nCP-12.zip,1,CP,1461,4000,53,2,\"[4000, 4001]\"\r\nCP-1.zip,1,CP,10,3154,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-10.zip,1,CP,1388,3831,51,2,\"[3831, 3832]\"\r\nNormal-1.zip,0,Normal,1702,957,69,2,\"[957, 958]\"\r\nNormal-17.zip,0,Normal,2181,636,100,1,[636]\r\nNCP-19.zip,2,NCP,521,2198,139,2,\"[2198, 2199]\"\r\nNormal-9.zip,0,Normal,1922,377,87,1,[377]\r\nNormal-8.zip,0,Normal,1872,327,86,1,[327]\r\nCP-9.zip,1,CP,1369,3791,67,2,\"[3790, 3791]\"\r\nCP-29.zip,1,CP,3815,5759,23,1,[5759]\r\nNCP-2.zip,2,NCP,118,1378,60,2,\"[1377, 1378]\"\r\nCP-19.zip,1,CP,1793,3216,69,1,[3216]\r\nNCP-5.zip,2,NCP,178,1501,52,2,\"[1500, 1501]\"\r\nCP-13.zip,1,CP,1495,4087,50,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-18.zip,1,CP,1780,3566,41,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-9.zip,1,CP,1378,3810,50,2,\"[3810, 3811]\"\r\nCP-8.zip,1,CP,1323,3682,62,2,\"[3682, 3683]\"\r\nCP-20.zip,1,CP,2754,3285,30,1,[3285]\r\nNormal-26.zip,0,Normal,3865,5377,24,1,[5377]\r\nNormal-23.zip,0,Normal,2614,124,37,1,[124]\r\nCP-12.zip,1,CP,1465,4009,67,2,\"[4009, 4010]\"\r\nCP-14.zip,1,CP,1537,4183,53,3,\"[4182, 4183, 4184]\"\r\nNormal-1.zip,0,Normal,1719,993,76,2,\"[993, 994]\"\r\nNCP-3.zip,2,NCP,128,1401,122,2,\"[1401, 1402]\"\r\nCP-28.zip,1,CP,3778,5722,25,1,[5722]\r\nNCP-1.zip,2,NCP,1018,2584,252,1,[2584]\r\nNCP-9.zip,2,NCP,27,1187,33,2,\"[1186, 1187]\"\r\nCP-13.zip,1,CP,1494,4084,65,3,\"[4083, 4084, 4085]\"\r\nNCP-13.zip,2,NCP,344,1839,152,2,\"[1839, 1840]\"\r\nCP-21.zip,1,CP,604,2966,134,1,[2966]\r\nNCP-1.zip,2,NCP,1037,2608,32,1,[2608]\r\nCP-12.zip,1,CP,1485,4057,49,3,\"[4056, 4057, 4058]\"\r\nNCP-16.zip,2,NCP,45,1223,152,2,\"[1223, 1224]\"\r\nNormal-14.zip,0,Normal,2058,513,95,1,[513]\r\nNCP-12.zip,2,NCP,323,1795,49,2,\"[1794, 1795]\"\r\nNCP-26.zip,2,NCP,3999,5496,52,1,[5496]\r\nNormal-15.zip,0,Normal,2107,562,92,1,[562]\r\nCP-12.zip,1,CP,1478,4038,53,2,\"[4037, 4038]\"\r\nNormal-15.zip,0,Normal,2099,554,85,1,[554]\r\nNCP-21.zip,2,NCP,64,1261,132,2,\"[1261, 1262]\"\r\nCP-9.zip,1,CP,1384,3824,66,2,\"[3823, 3824]\"\r\nNCP-18.zip,2,NCP,511,2178,132,2,\"[2178, 2179]\"\r\nCP-6.zip,1,CP,1227,3445,307,1,[3445]\r\nNormal-23.zip,0,Normal,2633,143,40,1,[143]\r\nNCP-10.zip,2,NCP,2722,2678,53,1,[2678]\r\nNCP-15.zip,2,NCP,427,2008,56,2,\"[2007, 2008]\"\r\nNCP-23.zip,2,NCP,94,1324,153,2,\"[1324, 1325]\"\r\nCP-19.zip,1,CP,2446,2922,690,1,[2922]\r\nCP-26.zip,1,CP,3728,5664,229,1,[5664]\r\nCP-20.zip,1,CP,2668,3249,45,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-27.zip,0,Normal,3899,5429,75,2,\"[5429, 5430]\"\r\nNormal-9.zip,0,Normal,1902,357,93,1,[357]\r\nNCP-9.zip,2,NCP,27,1186,75,2,\"[1186, 1187]\"\r\nNCP-18.zip,2,NCP,508,2172,145,2,\"[2172, 2173]\"\r\nNormal-8.zip,0,Normal,1862,317,91,1,[317]\r\nNCP-3.zip,2,NCP,128,1402,52,2,\"[1401, 1402]\"\r\nNCP-8.zip,2,NCP,257,1660,152,2,\"[1660, 1661]\"\r\nNCP-30.zip,2,NCP,973,2516,57,1,[2516]\r\nCP-9.zip,1,CP,1357,3759,61,3,\"[3758, 3759, 3760]\"\r\nNormal-26.zip,0,Normal,3864,5376,178,1,[5376]\r\nCP-25.zip,1,CP,727,3089,104,1,[3089]\r\nNCP-8.zip,2,NCP,259,1664,155,2,\"[1664, 1665]\"\r\nCP-10.zip,1,CP,1390,3838,56,3,\"[3836, 3837, 3838]\"\r\nNormal-21.zip,0,Normal,2295,750,79,1,[750]\r\nNCP-18.zip,2,NCP,49,1231,146,2,\"[1231, 1232]\"\r\nCP-10.zip,1,CP,1391,3840,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNCP-17.zip,2,NCP,48,1229,145,2,\"[1229, 1230]\"\r\nNCP-21.zip,2,NCP,73,1278,130,3,\"[1278, 1279, 1280]\"\r\nNCP-11.zip,2,NCP,296,1738,58,2,\"[1737, 1738]\"\r\nNCP-3.zip,2,NCP,129,1404,56,2,\"[1403, 1404]\"\r\nNCP-12.zip,2,NCP,330,1808,153,2,\"[1808, 1809]\"\r\nCP-14.zip,1,CP,1529,4165,100,3,\"[4165, 4166, 4167]\"\r\nCP-4.zip,1,CP,1187,3405,325,1,[3405]\r\nNCP-11.zip,2,NCP,307,1761,136,2,\"[1761, 1762]\"\r\nCP-26.zip,1,CP,3725,5661,258,2,\"[5660, 5661]\"\r\nNormal-10.zip,0,Normal,1950,405,102,1,[405]\r\nCP-15.zip,1,CP,1563,4247,61,3,\"[4245, 4246, 4247]\"\r\nNCP-4.zip,2,NCP,144,1433,58,2,\"[1432, 1433]\"\r\nNCP-28.zip,2,NCP,855,2375,39,1,[2375]\r\nNormal-1.zip,0,Normal,1726,1008,69,2,\"[1007, 1008]\"\r\nCP-22.zip,1,CP,629,2991,304,1,[2991]\r\nNCP-4.zip,2,NCP,142,1428,141,2,\"[1428, 1429]\"\r\nCP-21.zip,1,CP,592,2954,104,1,[2954]\r\nCP-1.zip,1,CP,10,3159,293,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-8.zip,1,CP,1326,3689,53,2,\"[3688, 3689]\"\r\nCP-9.zip,1,CP,1357,3760,61,3,\"[3758, 3759, 3760]\"\r\nNormal-24.zip,0,Normal,2648,158,32,1,[158]\r\nNCP-9.zip,2,NCP,269,1684,153,2,\"[1684, 1685]\"\r\nNormal-15.zip,0,Normal,2108,563,101,1,[563]\r\nCP-25.zip,1,CP,9,3148,290,4,\"[3148, 3149, 3150, 3151]\"\r\nNCP-13.zip,2,NCP,364,1879,132,2,\"[1879, 1880]\"\r\nNormal-23.zip,0,Normal,2605,115,35,1,[115]\r\nNCP-10.zip,2,NCP,282,1711,51,2,\"[1710, 1711]\"\r\nCP-14.zip,1,CP,1546,4209,58,2,\"[4208, 4209]\"\r\nNCP-29.zip,2,NCP,925,2467,22,1,[2467]\r\nNormal-21.zip,0,Normal,2296,751,102,1,[751]\r\nCP-2.zip,1,CP,1114,3332,361,1,[3332]\r\nNCP-5.zip,2,NCP,19,1171,61,2,\"[1170, 1171]\"\r\nNCP-13.zip,2,NCP,363,1877,139,2,\"[1877, 1878]\"\r\nCP-12.zip,1,CP,1475,4031,50,2,\"[4031, 4032]\"\r\nNCP-14.zip,2,NCP,399,1949,62,2,\"[1948, 1949]\"\r\nCP-17.zip,1,CP,1626,4314,26,1,[4314]\r\nCP-18.zip,1,CP,1780,3556,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNormal-19.zip,0,Normal,2236,691,83,1,[691]\r\nCP-15.zip,1,CP,1572,4260,19,1,[4260]\r\nCP-6.zip,1,CP,1240,3458,137,1,[3458]\r\nNCP-21.zip,2,NCP,76,1285,121,2,\"[1285, 1286]\"\r\nCP-22.zip,1,CP,623,2985,463,1,[2985]\r\nCP-27.zip,1,CP,3760,5704,23,1,[5704]\r\nCP-23.zip,1,CP,672,3034,86,1,[3034]\r\nNCP-1.zip,2,NCP,1026,2596,21,1,[2596]\r\nCP-22.zip,1,CP,635,2997,106,1,[2997]\r\nNCP-14.zip,2,NCP,375,1901,115,3,\"[1901, 1902, 1903]\"\r\nNCP-11.zip,2,NCP,304,1754,161,2,\"[1754, 1755]\"\r\nNCP-15.zip,2,NCP,408,1965,131,2,\"[1965, 1966]\"\r\nNCP-9.zip,2,NCP,2702,2668,41,1,[2668]\r\nCP-11.zip,1,CP,1452,3978,56,2,\"[3977, 3978]\"\r\nNCP-29.zip,2,NCP,891,2430,22,1,[2430]\r\nNCP-16.zip,2,NCP,458,2070,131,2,\"[2070, 2071]\"\r\nNormal-2.zip,0,Normal,1753,1092,60,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNormal-1.zip,0,Normal,1702,958,69,2,\"[957, 958]\"\r\nNormal-2.zip,0,Normal,1761,1126,45,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nCP-12.zip,1,CP,1487,4063,68,3,\"[4061, 4062, 4063]\"\r\nNCP-25.zip,2,NCP,3958,5471,38,1,[5471]\r\nCP-15.zip,1,CP,1556,4231,40,2,\"[4230, 4231]\"\r\nNCP-16.zip,2,NCP,431,2015,160,2,\"[2015, 2016]\"\r\nNormal-2.zip,0,Normal,1745,1060,298,3,\"[1060, 1061, 1062]\"\r\nNCP-23.zip,2,NCP,906,2448,55,1,[2448]\r\nCP-2.zip,1,CP,11,3163,265,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-17.zip,2,NCP,487,2130,70,2,\"[2129, 2130]\"\r\nCP-16.zip,1,CP,1600,4288,19,1,[4288]\r\nNCP-21.zip,2,NCP,580,2317,139,2,\"[2317, 2318]\"\r\nNormal-1.zip,0,Normal,1673,805,59,6,\"[804, 805, 806, 807, 808, 809]\"\r\nCP-29.zip,1,CP,3801,5745,26,1,[5745]\r\nNormal-1.zip,0,Normal,1726,1007,69,2,\"[1007, 1008]\"\r\nNCP-29.zip,2,NCP,893,2432,25,2,\"[2432, 2433]\"\r\nCP-3.zip,1,CP,1143,3361,177,1,[3361]\r\nCP-8.zip,1,CP,1343,3726,56,2,\"[3726, 3727]\"\r\nNCP-2.zip,2,NCP,115,1371,118,2,\"[1371, 1372]\"\r\nNCP-11.zip,2,NCP,31,1195,57,2,\"[1194, 1195]\"\r\nCP-1.zip,1,CP,1071,3114,57,2,\"[3113, 3114]\"\r\nNCP-23.zip,2,NCP,951,2494,38,1,[2494]\r\nNormal-1.zip,0,Normal,1706,967,64,2,\"[967, 968]\"\r\nCP-1.zip,1,CP,0,3133,290,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-8.zip,2,NCP,262,1671,58,2,\"[1670, 1671]\"\r\nNormal-10.zip,0,Normal,1943,398,94,1,[398]\r\nNCP-8.zip,2,NCP,257,1661,64,2,\"[1660, 1661]\"\r\nNormal-24.zip,0,Normal,2644,154,39,1,[154]\r\nNCP-15.zip,2,NCP,407,1964,52,2,\"[1963, 1964]\"\r\nNormal-26.zip,0,Normal,3883,5395,61,1,[5395]\r\nNCP-9.zip,2,NCP,2685,2698,52,1,[2698]\r\nNCP-30.zip,2,NCP,992,2545,213,1,[2545]\r\nCP-21.zip,1,CP,596,2958,255,1,[2958]\r\nCP-7.zip,1,CP,1314,3664,30,2,\"[3663, 3664]\"\r\nNCP-16.zip,2,NCP,432,2018,54,2,\"[2017, 2018]\"\r\nNCP-14.zip,2,NCP,371,1894,59,2,\"[1893, 1894]\"\r\nNCP-7.zip,2,NCP,2482,2685,45,1,[2685]\r\nNormal-1.zip,0,Normal,1679,834,66,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-29.zip,1,CP,3824,5768,23,1,[5768]\r\nNormal-2.zip,0,Normal,1753,1089,66,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNormal-7.zip,0,Normal,1859,314,85,1,[314]\r\nNCP-21.zip,2,NCP,578,2313,130,2,\"[2313, 2314]\"\r\nCP-10.zip,1,CP,1402,3866,55,3,\"[3865, 3866, 3867]\"\r\nNormal-4.zip,0,Normal,791,226,138,1,[226]\r\nNormal-13.zip,0,Normal,2039,494,101,1,[494]\r\nNormal-15.zip,0,Normal,2115,570,94,1,[570]\r\nCP-12.zip,1,CP,1470,4021,54,2,\"[4020, 4021]\"\r\nCP-24.zip,1,CP,695,3057,201,1,[3057]\r\nNormal-12.zip,0,Normal,1994,449,95,1,[449]\r\nNormal-5.zip,0,Normal,804,239,325,1,[239]\r\nCP-17.zip,1,CP,1623,4311,23,1,[4311]\r\nNormal-18.zip,0,Normal,2208,663,95,1,[663]\r\nNCP-19.zip,2,NCP,526,2209,58,2,\"[2208, 2209]\"\r\nNCP-16.zip,2,NCP,45,1224,64,2,\"[1223, 1224]\"\r\nNormal-1.zip,0,Normal,1679,838,70,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-2.zip,1,CP,11,3161,244,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNormal-19.zip,0,Normal,2239,694,89,1,[694]\r\nNCP-7.zip,2,NCP,243,1631,145,3,\"[1631, 1632, 1633]\"\r\nNCP-7.zip,2,NCP,243,1633,61,3,\"[1631, 1632, 1633]\"\r\nCP-18.zip,1,CP,1780,3561,63,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-11.zip,1,CP,1429,3926,52,2,\"[3926, 3927]\"\r\nNCP-7.zip,2,NCP,237,1619,146,2,\"[1619, 1620]\"\r\nCP-7.zip,1,CP,1319,3674,61,2,\"[3674, 3675]\"\r\nNCP-28.zip,2,NCP,829,2342,36,1,[2342]\r\nNormal-18.zip,0,Normal,2186,641,84,1,[641]\r\nNCP-4.zip,2,NCP,141,1427,54,2,\"[1426, 1427]\"\r\nNormal-16.zip,0,Normal,2127,582,84,1,[582]\r\nNormal-1.zip,0,Normal,1723,1003,77,2,\"[1003, 1004]\"\r\nCP-5.zip,1,CP,1197,3415,191,1,[3415]\r\nCP-10.zip,1,CP,1414,3893,63,3,\"[3891, 3892, 3893]\"\r\nNCP-14.zip,2,NCP,384,1920,127,2,\"[1920, 1921]\"\r\nCP-7.zip,1,CP,1317,3671,116,3,\"[3670, 3671, 3672]\"\r\nNCP-22.zip,2,NCP,81,1295,125,2,\"[1295, 1296]\"\r\nCP-3.zip,1,CP,1156,3374,173,1,[3374]\r\nNormal-2.zip,0,Normal,1761,1129,60,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-8.zip,2,NCP,252,1651,58,2,\"[1650, 1651]\"\r\nNCP-25.zip,2,NCP,3959,5472,44,1,[5472]\r\nNormal-11.zip,0,Normal,1988,443,90,1,[443]\r\nCP-30.zip,1,CP,3833,5777,23,1,[5777]\r\nNCP-26.zip,2,NCP,3985,5491,50,1,[5491]\r\nCP-20.zip,1,CP,2668,3255,28,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-14.zip,0,Normal,2077,532,92,1,[532]\r\nNormal-14.zip,0,Normal,2059,514,95,1,[514]\r\nCP-29.zip,1,CP,3829,5773,26,1,[5773]\r\nNCP-15.zip,2,NCP,402,1954,62,2,\"[1953, 1954]\"\r\nCP-29.zip,1,CP,3800,5744,29,1,[5744]\r\nCP-9.zip,1,CP,1383,3821,71,2,\"[3821, 3822]\"\r\nNCP-6.zip,2,NCP,225,1594,135,2,\"[1594, 1595]\"\r\nCP-27.zip,1,CP,3759,5703,23,1,[5703]\r\nNCP-15.zip,2,NCP,423,2000,56,2,\"[1999, 2000]\"\r\nCP-4.zip,1,CP,1190,3408,173,1,[3408]\r\nNCP-11.zip,2,NCP,302,1750,152,2,\"[1750, 1751]\"\r\nNCP-29.zip,2,NCP,889,2427,38,2,\"[2427, 2428]\"\r\nNCP-20.zip,2,NCP,570,2299,58,2,\"[2298, 2299]\"\r\nNCP-14.zip,2,NCP,375,1902,40,3,\"[1901, 1902, 1903]\"\r\nNormal-19.zip,0,Normal,2238,693,91,1,[693]\r\nNCP-2.zip,2,NCP,1273,2714,56,1,[2714]\r\nNCP-18.zip,2,NCP,497,2151,53,2,\"[2150, 2151]\"\r\nCP-25.zip,1,CP,715,3077,609,1,[3077]\r\nCP-7.zip,1,CP,1264,3482,126,1,[3482]\r\nCP-1.zip,1,CP,10,3157,46,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNormal-20.zip,0,Normal,2266,721,94,1,[721]\r\nCP-11.zip,1,CP,1433,3935,62,2,\"[3934, 3935]\"\r\nNCP-18.zip,2,NCP,511,2179,56,2,\"[2178, 2179]\"\r\nCP-3.zip,1,CP,1138,3356,158,1,[3356]\r\nNormal-20.zip,0,Normal,2249,704,66,1,[704]\r\nNormal-6.zip,0,Normal,1809,264,94,1,[264]\r\nCP-14.zip,1,CP,1547,4210,142,3,\"[4210, 4211, 4212]\"\r\nCP-21.zip,1,CP,586,2948,174,1,[2948]\r\nCP-23.zip,1,CP,650,3012,102,1,[3012]\r\nCP-12.zip,1,CP,1459,3995,164,3,\"[3995, 3996, 3997]\"\r\nCP-14.zip,1,CP,1522,4149,61,2,\"[4148, 4149]\"\r\nNCP-8.zip,2,NCP,250,1646,144,2,\"[1646, 1647]\"\r\nNormal-26.zip,0,Normal,3884,5397,298,2,\"[5396, 5397]\"\r\nCP-28.zip,1,CP,3773,5717,20,1,[5717]\r\nNormal-21.zip,0,Normal,2309,764,88,1,[764]\r\nNCP-12.zip,2,NCP,326,1801,50,2,\"[1800, 1801]\"\r\nNormal-1.zip,0,Normal,1729,1017,74,2,\"[1017, 1018]\"\r\nNormal-1.zip,0,Normal,1684,871,68,5,\"[870, 871, 873, 874, 875]\"\r\nCP-15.zip,1,CP,1567,4254,118,2,\"[4254, 4255]\"\r\nNCP-4.zip,2,NCP,163,1470,154,2,\"[1470, 1471]\"\r\nNormal-1.zip,0,Normal,1705,966,69,2,\"[965, 966]\"\r\nCP-11.zip,1,CP,1446,3966,63,2,\"[3965, 3966]\"\r\nNCP-6.zip,2,NCP,225,1595,57,2,\"[1594, 1595]\"\r\nNCP-11.zip,2,NCP,293,1732,52,2,\"[1731, 1732]\"\r\nNCP-28.zip,2,NCP,839,2354,209,1,[2354]\r\nNCP-18.zip,2,NCP,513,2182,163,2,\"[2182, 2183]\"\r\nNormal-8.zip,0,Normal,1889,344,87,1,[344]\r\nCP-2.zip,1,CP,1112,3330,154,1,[3330]\r\nNormal-26.zip,0,Normal,3874,5386,28,1,[5386]\r\nCP-29.zip,1,CP,3813,5757,21,1,[5757]\r\nCP-7.zip,1,CP,1317,3670,229,3,\"[3670, 3671, 3672]\"\r\nNCP-20.zip,2,NCP,553,2264,58,2,\"[2263, 2264]\"\r\nCP-29.zip,1,CP,3820,5764,31,1,[5764]\r\nNCP-17.zip,2,NCP,482,2120,58,2,\"[2119, 2120]\"\r\nNCP-7.zip,2,NCP,233,1610,86,2,\"[1610, 1612]\"\r\nNCP-18.zip,2,NCP,500,2157,68,2,\"[2156, 2157]\"\r\nNormal-4.zip,0,Normal,799,234,118,1,[234]\r\nNCP-16.zip,2,NCP,442,2039,53,2,\"[2038, 2039]\"\r\nNCP-23.zip,2,NCP,94,1325,64,2,\"[1324, 1325]\"\r\nCP-18.zip,1,CP,1780,3563,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-23.zip,2,NCP,902,2444,45,1,[2444]\r\nCP-2.zip,1,CP,11,3162,260,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-3.zip,2,NCP,135,1415,58,2,\"[1414, 1415]\"\r\nCP-8.zip,1,CP,1350,3745,55,1,[3745]\r\nNormal-14.zip,0,Normal,2065,520,81,1,[520]\r\nNCP-5.zip,2,NCP,188,1521,57,2,\"[1520, 1521]\"\r\nNormal-2.zip,0,Normal,1745,1061,60,3,\"[1060, 1061, 1062]\"\r\nNCP-15.zip,2,NCP,424,2002,64,2,\"[2001, 2002]\"\r\nNormal-4.zip,0,Normal,790,225,126,1,[225]\r\nNCP-4.zip,2,NCP,142,1429,59,2,\"[1428, 1429]\"\r\nCP-7.zip,1,CP,1310,3653,51,2,\"[3653, 3654]\"\r\nCP-14.zip,1,CP,1537,4182,53,3,\"[4182, 4183, 4184]\"\r\nCP-17.zip,1,CP,1625,4313,26,1,[4313]\r\nNormal-1.zip,0,Normal,1680,843,64,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNCP-11.zip,2,NCP,311,1769,134,2,\"[1769, 1770]\"\r\nCP-1.zip,1,CP,0,3136,290,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-1.zip,1,CP,1075,3118,553,2,\"[3118, 3119]\"\r\nNormal-4.zip,0,Normal,770,205,116,1,[205]\r\nCP-7.zip,1,CP,1311,3655,160,3,\"[3655, 3656, 3657]\"\r\nNormal-1.zip,0,Normal,1724,1005,55,1,[1005]\r\nNCP-20.zip,2,NCP,563,2285,59,2,\"[2284, 2285]\"\r\nNCP-4.zip,2,NCP,163,1471,65,2,\"[1470, 1471]\"\r\nNormal-15.zip,0,Normal,2114,569,101,1,[569]\r\nNormal-12.zip,0,Normal,2016,471,89,1,[471]\r\nCP-23.zip,1,CP,657,3019,343,1,[3019]\r\nNormal-1.zip,0,Normal,1729,1018,74,2,\"[1017, 1018]\"\r\nCP-18.zip,1,CP,1780,3558,73,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-5.zip,2,NCP,183,1511,52,2,\"[1510, 1511]\"\r\nCP-1.zip,1,CP,1074,3117,61,1,[3117]\r\nNormal-8.zip,0,Normal,1870,325,88,1,[325]\r\nCP-6.zip,1,CP,1254,3472,125,1,[3472]\r\nCP-21.zip,1,CP,2775,3306,43,1,[3306]\r\nCP-16.zip,1,CP,1587,4275,20,1,[4275]\r\nNCP-26.zip,2,NCP,3984,5490,54,1,[5490]\r\nCP-27.zip,1,CP,3747,5691,20,1,[5691]\r\nCP-13.zip,1,CP,1495,4088,48,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-9.zip,1,CP,1384,3823,66,2,\"[3823, 3824]\"\r\nNCP-1.zip,2,NCP,100,1338,58,2,\"[1337, 1338]\"\r\nNCP-27.zip,2,NCP,1025,2595,252,1,[2595]\r\nNCP-18.zip,2,NCP,510,2177,43,2,\"[2176, 2177]\"\r\nNCP-11.zip,2,NCP,298,1743,61,2,\"[1742, 1743]\"\r\nNormal-17.zip,0,Normal,2174,629,88,1,[629]\r\nCP-23.zip,1,CP,677,3039,309,1,[3039]\r\nNormal-21.zip,0,Normal,2284,739,80,1,[739]\r\nNormal-18.zip,0,Normal,2193,648,85,1,[648]\r\nCP-1.zip,1,CP,0,3135,269,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-27.zip,2,NCP,1015,2579,39,1,[2579]\r\nNCP-6.zip,2,NCP,214,1572,144,2,\"[1572, 1573]\"\r\nCP-6.zip,1,CP,1248,3466,141,1,[3466]\r\nNormal-27.zip,0,Normal,3901,5433,66,1,[5433]\r\nCP-13.zip,1,CP,1519,4142,68,2,\"[4141, 4142]\"\r\nNCP-14.zip,2,NCP,385,1922,64,1,[1922]\r\nCP-7.zip,1,CP,1311,3657,67,3,\"[3655, 3656, 3657]\"\r\nCP-14.zip,1,CP,1547,4212,58,3,\"[4210, 4211, 4212]\"\r\nCP-4.zip,1,CP,1186,3404,204,1,[3404]\r\nCP-14.zip,1,CP,1526,4159,51,3,\"[4157, 4158, 4159]\"\r\nNCP-4.zip,2,NCP,165,1474,131,2,\"[1474, 1475]\"\r\nCP-1.zip,1,CP,10,3160,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-3.zip,1,CP,1157,3375,204,1,[3375]\r\nNCP-11.zip,2,NCP,307,1762,57,2,\"[1761, 1762]\"\r\nCP-11.zip,1,CP,1441,3952,53,3,\"[3951, 3952, 3953]\"\r\nNCP-21.zip,2,NCP,63,1259,139,2,\"[1259, 1260]\"\r\nNormal-6.zip,0,Normal,1806,261,100,1,[261]\r\nCP-1.zip,1,CP,0,3131,285,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-17.zip,1,CP,1627,4315,26,1,[4315]\r\nNormal-14.zip,0,Normal,2064,519,91,1,[519]\r\nNCP-5.zip,2,NCP,180,1505,57,2,\"[1504, 1505]\"\r\nNormal-16.zip,0,Normal,2134,589,72,1,[589]\r\nNormal-14.zip,0,Normal,2063,518,99,1,[518]\r\nCP-11.zip,1,CP,1451,3975,51,2,\"[3975, 3976]\"\r\nNormal-24.zip,0,Normal,2647,157,34,1,[157]\r\nNCP-21.zip,2,NCP,66,1265,58,1,[1265]\r\nNormal-25.zip,0,Normal,3843,5355,180,1,[5355]\r\nNCP-12.zip,2,NCP,336,1820,117,2,\"[1820, 1821]\"\r\nCP-25.zip,1,CP,729,3091,106,1,[3091]\r\nCP-20.zip,1,CP,2668,3256,53,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-6.zip,2,NCP,200,1544,123,2,\"[1544, 1545]\"\r\nNormal-1.zip,0,Normal,1685,879,65,4,\"[877, 878, 879, 880]\"\r\nNCP-24.zip,2,NCP,972,2515,120,1,[2515]\r\nCP-14.zip,1,CP,1547,4211,58,3,\"[4210, 4211, 4212]\"\r\nCP-18.zip,1,CP,1775,3530,58,4,\"[3530, 3531, 3532, 3533]\"\r\nCP-11.zip,1,CP,1427,3921,43,2,\"[3921, 3922]\"\r\nCP-18.zip,1,CP,1776,3534,64,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-13.zip,2,NCP,368,1888,54,2,\"[1887, 1888]\"\r\nCP-23.zip,1,CP,644,3006,134,1,[3006]\r\nCP-7.zip,1,CP,1312,3659,65,2,\"[3658, 3659]\"\r\nNCP-4.zip,2,NCP,139,1422,132,2,\"[1422, 1423]\"\r\nNCP-15.zip,2,NCP,422,1998,63,2,\"[1997, 1998]\"\r\nCP-10.zip,1,CP,1391,3842,59,4,\"[3839, 3840, 3841, 3842]\"\r\nCP-11.zip,1,CP,1441,3953,53,3,\"[3951, 3952, 3953]\"\r\nNCP-4.zip,2,NCP,154,1452,110,2,\"[1452, 1453]\"\r\nNCP-6.zip,2,NCP,202,1549,67,2,\"[1548, 1549]\"\r\nCP-11.zip,1,CP,1436,3941,45,2,\"[3940, 3941]\"\r\nNCP-16.zip,2,NCP,431,2016,67,2,\"[2015, 2016]\"\r\nNormal-26.zip,0,Normal,3870,5382,30,1,[5382]\r\nNormal-17.zip,0,Normal,2159,614,89,1,[614]\r\nCP-11.zip,1,CP,1427,3922,43,2,\"[3921, 3922]\"\r\nNCP-7.zip,2,NCP,235,1616,58,2,\"[1615, 1616]\"\r\nCP-11.zip,1,CP,1418,3902,54,3,\"[3900, 3901, 3902]\"\r\nCP-6.zip,1,CP,1228,3446,307,1,[3446]\r\nNCP-15.zip,2,NCP,422,1997,156,2,\"[1997, 1998]\"\r\nNormal-1.zip,0,Normal,1679,836,67,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-16.zip,1,CP,1604,4292,22,1,[4292]\r\nCP-4.zip,1,CP,1179,3397,153,1,[3397]\r\nNCP-6.zip,2,NCP,221,1586,125,2,\"[1586, 1587]\"\r\nCP-18.zip,1,CP,1780,3564,41,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-4.zip,2,NCP,139,1423,56,2,\"[1422, 1423]\"\r\nNormal-1.zip,0,Normal,1685,880,65,4,\"[877, 878, 879, 880]\"\r\nCP-18.zip,1,CP,1780,3557,73,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-7.zip,1,CP,1269,3487,172,1,[3487]\r\nNormal-1.zip,0,Normal,1680,841,69,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-13.zip,1,CP,1491,4074,113,3,\"[4074, 4075, 4076]\"\r\nNCP-13.zip,2,NCP,344,1840,63,2,\"[1839, 1840]\"\r\nNCP-17.zip,2,NCP,476,2108,53,2,\"[2107, 2108]\"\r\nNormal-12.zip,0,Normal,1997,452,104,1,[452]\r\nNormal-2.zip,0,Normal,1745,1062,60,3,\"[1060, 1061, 1062]\"\r\nNormal-19.zip,0,Normal,2224,679,82,1,[679]\r\nCP-2.zip,1,CP,1101,3319,187,1,[3319]\r\nNormal-26.zip,0,Normal,3873,5385,25,1,[5385]\r\nCP-15.zip,1,CP,1578,4266,22,1,[4266]\r\nNormal-22.zip,0,Normal,2591,101,37,1,[101]\r\nNormal-11.zip,0,Normal,1966,421,90,1,[421]\r\nNCP-17.zip,2,NCP,480,2115,139,2,\"[2115, 2116]\"\r\nCP-19.zip,1,CP,2,3503,34,1,[3503]\r\nNCP-7.zip,2,NCP,236,1618,119,2,\"[1617, 1618]\"\r\nCP-16.zip,1,CP,1616,4304,29,1,[4304]\r\nCP-11.zip,1,CP,1439,3946,62,2,\"[3946, 3947]\"\r\nCP-10.zip,1,CP,1410,3883,51,2,\"[3883, 3884]\"\r\nCP-24.zip,1,CP,701,3063,66,1,[3063]\r\nNCP-6.zip,2,NCP,200,1545,52,2,\"[1544, 1545]\"\r\nCP-1.zip,1,CP,10,3155,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-4.zip,2,NCP,160,1464,146,2,\"[1464, 1465]\"\r\nNormal-8.zip,0,Normal,1890,345,99,1,[345]\r\nNCP-9.zip,2,NCP,2694,2660,39,1,[2660]\r\nCP-30.zip,1,CP,3930,5628,62,2,\"[5628, 5629]\"\r\nCP-25.zip,1,CP,9,3149,290,4,\"[3148, 3149, 3150, 3151]\"\r\nNormal-13.zip,0,Normal,2022,477,92,1,[477]\r\nNormal-1.zip,0,Normal,1680,842,69,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNCP-7.zip,2,NCP,229,1603,65,2,\"[1602, 1603]\"\r\nNormal-1.zip,0,Normal,1712,979,70,1,[979]\r\nNormal-12.zip,0,Normal,2002,457,96,1,[457]\r\nCP-6.zip,1,CP,1233,3451,150,1,[3451]\r\nNCP-18.zip,2,NCP,489,2135,58,2,\"[2134, 2135]\"\r\nCP-7.zip,1,CP,1310,3654,51,2,\"[3653, 3654]\"\r\nCP-22.zip,1,CP,636,2998,102,1,[2998]\r\nNCP-21.zip,2,NCP,70,1273,51,2,\"[1272, 1273]\"\r\nNormal-23.zip,0,Normal,2603,113,41,1,[113]\r\nCP-8.zip,1,CP,1323,3683,62,2,\"[3682, 3683]\"\r\nNormal-20.zip,0,Normal,2274,729,85,1,[729]\r\nNCP-29.zip,2,NCP,889,2428,121,2,\"[2427, 2428]\"\r\nNCP-1.zip,2,NCP,1040,2611,113,1,[2611]\r\nCP-1.zip,1,CP,0,3139,39,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNormal-21.zip,0,Normal,2298,753,80,1,[753]\r\nCP-19.zip,1,CP,1792,3215,71,2,\"[3214, 3215]\"\r\nNormal-27.zip,0,Normal,3916,5459,77,1,[5459]\r\nNormal-21.zip,0,Normal,2311,766,91,1,[766]\r\nCP-1.zip,1,CP,0,3132,42,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNormal-1.zip,0,Normal,1708,972,74,2,\"[971, 972]\"\r\nNCP-13.zip,2,NCP,343,1837,130,2,\"[1837, 1838]\"\r\nNCP-26.zip,2,NCP,3989,5513,45,1,[5513]\r\nCP-12.zip,1,CP,1459,3997,69,3,\"[3995, 3996, 3997]\"\r\nCP-13.zip,1,CP,1495,4086,112,4,\"[4086, 4087, 4088, 4089]\"\r\nNormal-5.zip,0,Normal,812,247,126,1,[247]\r\nNormal-15.zip,0,Normal,2098,553,84,1,[553]\r\nNormal-16.zip,0,Normal,2119,574,93,1,[574]\r\nCP-25.zip,1,CP,731,3093,82,1,[3093]\r\nCP-16.zip,1,CP,1597,4285,23,1,[4285]\r\nCP-26.zip,1,CP,3726,5662,232,1,[5662]\r\nCP-4.zip,1,CP,1183,3401,294,1,[3401]\r\nCP-10.zip,1,CP,1391,3839,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNCP-23.zip,2,NCP,901,2443,320,1,[2443]\r\nNormal-11.zip,0,Normal,1957,412,78,1,[412]\r\nNCP-18.zip,2,NCP,504,2164,155,2,\"[2164, 2165]\"\r\nNCP-17.zip,2,NCP,474,2104,48,2,\"[2103, 2104]\"\r\nNCP-9.zip,2,NCP,2698,2664,57,1,[2664]\r\nNCP-7.zip,2,NCP,233,1612,45,2,\"[1610, 1612]\"\r\nNCP-9.zip,2,NCP,2686,2699,48,1,[2699]\r\nCP-18.zip,1,CP,1776,3537,75,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nCP-3.zip,1,CP,1158,3376,193,1,[3376]\r\nCP-27.zip,1,CP,3755,5699,23,1,[5699]\r\nCP-13.zip,1,CP,1509,4120,59,3,\"[4118, 4119, 4120]\"\r\nNCP-29.zip,2,NCP,910,2452,76,1,[2452]\r\nCP-2.zip,1,CP,11,3166,274,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-16.zip,2,NCP,433,2020,51,2,\"[2019, 2020]\"\r\nNormal-26.zip,0,Normal,3863,5375,231,1,[5375]\r\nNormal-7.zip,0,Normal,1851,306,102,1,[306]\r\nNCP-23.zip,2,NCP,917,2459,272,1,[2459]\r\nNCP-26.zip,2,NCP,3986,5492,42,1,[5492]\r\nCP-12.zip,1,CP,1478,4037,53,2,\"[4037, 4038]\"\r\nNCP-2.zip,2,NCP,115,1372,50,2,\"[1371, 1372]\"\r\nNCP-13.zip,2,NCP,362,1875,151,2,\"[1875, 1876]\"\r\nNormal-22.zip,0,Normal,2592,102,39,1,[102]\r\nCP-9.zip,1,CP,1357,3758,61,3,\"[3758, 3759, 3760]\"\r\nNormal-6.zip,0,Normal,1825,280,81,1,[280]\r\nNormal-4.zip,0,Normal,775,210,134,1,[210]\r\nNCP-13.zip,2,NCP,365,1881,117,2,\"[1881, 1882]\"\r\nCP-24.zip,1,CP,709,3071,302,1,[3071]\r\nCP-17.zip,1,CP,1630,4318,23,1,[4318]\r\nCP-15.zip,1,CP,1557,4232,43,2,\"[4232, 4233]\"\r\nNCP-23.zip,2,NCP,956,2499,156,1,[2499]\r\nCP-2.zip,1,CP,1106,3324,164,1,[3324]\r\nNormal-9.zip,0,Normal,1895,350,92,1,[350]\r\nCP-21.zip,1,CP,599,2961,68,1,[2961]\r\nNormal-1.zip,0,Normal,1720,996,74,2,\"[995, 996]\"\r\nNCP-16.zip,2,NCP,448,2051,58,2,\"[2050, 2051]\"\r\nCP-5.zip,1,CP,1206,3424,176,1,[3424]\r\nCP-26.zip,1,CP,3648,5540,170,1,[5540]\r\nCP-1.zip,1,CP,1091,3309,354,1,[3309]\r\nNCP-10.zip,2,NCP,2713,2706,39,1,[2706]\r\nNCP-30.zip,2,NCP,949,2492,42,1,[2492]\r\nNCP-17.zip,2,NCP,480,2116,58,2,\"[2115, 2116]\"\r\nCP-7.zip,1,CP,1306,3643,48,3,\"[3642, 3643, 3644]\"\r\nNormal-7.zip,0,Normal,1840,295,108,1,[295]\r\nCP-18.zip,1,CP,1780,3562,63,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-1.zip,2,NCP,1011,2575,111,2,\"[2574, 2575]\"\r\nNormal-16.zip,0,Normal,2132,587,97,1,[587]\r\nCP-29.zip,1,CP,3814,5758,29,1,[5758]\r\nCP-18.zip,1,CP,1768,3175,175,1,[3175]\r\nNormal-13.zip,0,Normal,2028,483,89,1,[483]\r\nNCP-16.zip,2,NCP,454,2062,139,2,\"[2062, 2063]\"\r\nCP-8.zip,1,CP,1333,3706,52,2,\"[3705, 3706]\"\r\nCP-25.zip,1,CP,737,3099,84,1,[3099]\r\nNCP-9.zip,2,NCP,2683,2653,46,1,[2653]\r\nNormal-11.zip,0,Normal,1958,413,90,1,[413]\r\nNormal-7.zip,0,Normal,1855,310,86,1,[310]\r\nNCP-10.zip,2,NCP,282,1710,120,2,\"[1710, 1711]\"\r\nNCP-8.zip,2,NCP,252,1650,139,2,\"[1650, 1651]\"\r\nNCP-3.zip,2,NCP,133,1411,41,2,\"[1410, 1411]\"\r\nCP-21.zip,1,CP,588,2950,116,1,[2950]\r\nNormal-15.zip,0,Normal,2094,549,78,1,[549]\r\nNCP-20.zip,2,NCP,562,2282,113,2,\"[2282, 2283]\"\r\nNormal-5.zip,0,Normal,806,241,104,1,[241]\r\nCP-3.zip,1,CP,1145,3363,169,1,[3363]\r\nNCP-28.zip,2,NCP,847,2365,53,1,[2365]\r\nNCP-4.zip,2,NCP,143,1431,54,2,\"[1430, 1431]\"\r\nCP-19.zip,1,CP,1786,3192,81,3,\"[3192, 3193, 3194]\"\r\nNCP-15.zip,2,NCP,407,1963,124,2,\"[1963, 1964]\"\r\nNormal-6.zip,0,Normal,1817,272,85,1,[272]\r\nCP-32.zip,1,CP,1089,3224,90,1,[3224]\r\nNCP-22.zip,2,NCP,834,2347,194,2,\"[2347, 2348]\"\r\nCP-9.zip,1,CP,1381,3816,66,3,\"[3815, 3816, 3817]\"\r\nNormal-8.zip,0,Normal,1866,321,75,1,[321]\r\nNCP-22.zip,2,NCP,86,1306,50,2,\"[1305, 1306]\"\r\nCP-26.zip,1,CP,3725,5660,251,2,\"[5660, 5661]\"\r\nNCP-18.zip,2,NCP,497,2150,126,2,\"[2150, 2151]\"\r\nNCP-27.zip,2,NCP,1043,2615,45,1,[2615]\r\nCP-4.zip,1,CP,1167,3385,149,1,[3385]\r\nNormal-4.zip,0,Normal,782,217,340,1,[217]\r\nNCP-15.zip,2,NCP,421,1995,161,2,\"[1995, 1996]\"\r\nNormal-9.zip,0,Normal,1897,352,88,1,[352]\r\nNCP-13.zip,2,NCP,365,1882,50,2,\"[1881, 1882]\"\r\nCP-1.zip,1,CP,1067,3106,62,1,[3106]\r\nCP-22.zip,1,CP,642,3004,128,1,[3004]\r\nCP-20.zip,1,CP,2668,3258,52,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-10.zip,1,CP,1406,3875,60,2,\"[3874, 3875]\"\r\nCP-1.zip,1,CP,10,3158,285,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-21.zip,2,NCP,60,1254,59,2,\"[1253, 1254]\"\r\nNormal-26.zip,0,Normal,3884,5396,62,2,\"[5396, 5397]\"\r\nNCP-25.zip,2,NCP,3710,5537,66,1,[5537]\r\nCP-9.zip,1,CP,1371,3795,60,3,\"[3794, 3795, 3796]\"\r\nCP-20.zip,1,CP,2450,2928,92,2,\"[2928, 2929]\"\r\nNCP-4.zip,2,NCP,166,1476,139,2,\"[1476, 1477]\"\r\nNCP-20.zip,2,NCP,554,2266,54,2,\"[2265, 2266]\"\r\nNCP-18.zip,2,NCP,491,2139,62,2,\"[2138, 2139]\"\r\nCP-2.zip,1,CP,1098,3316,171,1,[3316]\r\nCP-12.zip,1,CP,1465,4010,67,2,\"[4009, 4010]\"\r\nNCP-20.zip,2,NCP,548,2254,61,2,\"[2253, 2254]\"\r\nNormal-16.zip,0,Normal,2150,605,88,1,[605]\r\nNormal-1.zip,0,Normal,1678,830,34,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-16.zip,2,NCP,451,2056,51,3,\"[2056, 2057, 2058]\"\r\nNormal-11.zip,0,Normal,1965,420,88,1,[420]\r\nNCP-1.zip,2,NCP,101,1339,136,2,\"[1339, 1340]\"\r\nNormal-12.zip,0,Normal,2008,463,92,1,[463]\r\nCP-10.zip,1,CP,1402,3867,55,3,\"[3865, 3866, 3867]\"\r\nNCP-2.zip,2,NCP,122,1386,62,2,\"[1385, 1386]\"\r\nCP-20.zip,1,CP,2457,2941,108,1,[2941]\r\nNCP-14.zip,2,NCP,38,1208,137,2,\"[1208, 1209]\"\r\nNormal-10.zip,0,Normal,1933,388,103,1,[388]\r\nCP-1.zip,1,CP,10,3152,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-20.zip,2,NCP,562,2283,48,2,\"[2282, 2283]\"\r\nNCP-12.zip,2,NCP,335,1819,55,2,\"[1818, 1819]\"\r\nNCP-21.zip,2,NCP,579,2316,63,2,\"[2315, 2316]\"\r\nNormal-7.zip,0,Normal,1856,311,80,1,[311]\r\nNCP-18.zip,2,NCP,506,2169,51,2,\"[2168, 2169]\"\r\nCP-8.zip,1,CP,1339,3719,59,2,\"[3718, 3719]\"\r\nCP-18.zip,1,CP,1652,4340,25,1,[4340]\r\nNCP-11.zip,2,NCP,296,1737,139,2,\"[1737, 1738]\"\r\nNormal-8.zip,0,Normal,1886,341,84,1,[341]\r\nNCP-8.zip,2,NCP,250,1647,60,2,\"[1646, 1647]\"\r\nCP-26.zip,1,CP,3720,5652,48,2,\"[5652, 5653]\"\r\nCP-14.zip,1,CP,1537,4184,53,3,\"[4182, 4183, 4184]\"\r\nNCP-17.zip,2,NCP,486,2128,64,2,\"[2127, 2128]\"\r\nCP-8.zip,1,CP,1335,3711,62,3,\"[3709, 3710, 3711]\"\r\nCP-27.zip,1,CP,3739,5683,19,1,[5683]\r\nNCP-25.zip,2,NCP,3950,5464,41,1,[5464]\r\nCP-12.zip,1,CP,1474,4029,62,2,\"[4029, 4030]\"\r\nNormal-10.zip,0,Normal,1946,401,93,1,[401]\r\nCP-19.zip,1,CP,1786,3193,81,3,\"[3192, 3193, 3194]\"\r\nNCP-30.zip,2,NCP,947,2490,41,1,[2490]\r\nNCP-14.zip,2,NCP,371,1893,141,2,\"[1893, 1894]\"\r\nNCP-8.zip,2,NCP,2676,2694,54,1,[2694]\r\nNCP-1.zip,2,NCP,1011,2574,117,2,\"[2574, 2575]\"\r\nNormal-9.zip,0,Normal,1906,361,93,1,[361]\r\nNCP-4.zip,2,NCP,147,1439,72,2,\"[1438, 1439]\"\r\nCP-12.zip,1,CP,1485,4058,49,3,\"[4056, 4057, 4058]\"\r\nNormal-7.zip,0,Normal,1838,293,86,1,[293]\r\nCP-25.zip,1,CP,9,3150,72,4,\"[3148, 3149, 3150, 3151]\"\r\nNCP-12.zip,2,NCP,330,1809,64,2,\"[1808, 1809]\"\r\nNCP-8.zip,2,NCP,267,1681,54,2,\"[1680, 1681]\"\r\nNCP-20.zip,2,NCP,553,2263,137,2,\"[2263, 2264]\"\r\nNCP-29.zip,2,NCP,893,2433,24,2,\"[2432, 2433]\"\r\nNCP-21.zip,2,NCP,582,2321,128,2,\"[2321, 2322]\"\r\nNormal-24.zip,0,Normal,2642,152,38,1,[152]\r\nCP-25.zip,1,CP,726,3088,183,1,[3088]\r\nNCP-5.zip,2,NCP,171,1487,60,2,\"[1486, 1487]\"\r\nCP-22.zip,1,CP,632,2994,132,1,[2994]\r\nNormal-7.zip,0,Normal,1850,305,99,1,[305]\r\nNCP-30.zip,2,NCP,945,2488,45,1,[2488]\r\nNormal-19.zip,0,Normal,2244,699,98,1,[699]\r\nCP-1.zip,1,CP,1073,3116,52,1,[3116]\r\nNormal-21.zip,0,Normal,2310,765,91,1,[765]\r\nCP-1.zip,1,CP,10,3153,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-1.zip,1,CP,1075,3119,70,2,\"[3118, 3119]\"\r\nCP-12.zip,1,CP,1470,4020,54,2,\"[4020, 4021]\"\r\nNCP-26.zip,2,NCP,3997,5519,56,1,[5519]\r\nNCP-10.zip,2,NCP,274,1694,160,2,\"[1694, 1695]\"\r\nNormal-15.zip,0,Normal,2089,544,98,1,[544]\r\nCP-24.zip,1,CP,681,3043,102,1,[3043]\r\nNCP-20.zip,2,NCP,573,2305,63,2,\"[2304, 2305]\"\r\nCP-15.zip,1,CP,1557,4233,43,2,\"[4232, 4233]\"\r\nNCP-30.zip,2,NCP,990,2543,59,1,[2543]\r\nCP-7.zip,1,CP,1305,3640,20,2,\"[3640, 3641]\"\r\nNCP-5.zip,2,NCP,183,1510,123,2,\"[1510, 1511]\"\r\nCP-15.zip,1,CP,1582,4270,20,1,[4270]\r\nCP-29.zip,1,CP,3817,5761,25,1,[5761]\r\nNCP-20.zip,2,NCP,56,1245,164,2,\"[1245, 1246]\"\r\nNCP-21.zip,2,NCP,58,1250,55,2,\"[1249, 1250]\"\r\nCP-8.zip,1,CP,1335,3710,62,3,\"[3709, 3710, 3711]\"\r\nNormal-3.zip,0,Normal,1766,1149,60,3,\"[1149, 1150, 1151]\"\r\nNCP-10.zip,2,NCP,2716,2709,49,1,[2709]\r\nCP-10.zip,1,CP,1402,3865,131,3,\"[3865, 3866, 3867]\"\r\nCP-10.zip,1,CP,1391,3841,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNormal-22.zip,0,Normal,2594,104,42,1,[104]\r\nCP-26.zip,1,CP,3733,5675,174,3,\"[5673, 5674, 5675]\"\r\nNormal-25.zip,0,Normal,3715,5345,30,1,[5345]\r\nNormal-3.zip,0,Normal,762,197,363,1,[197]\r\nNCP-15.zip,2,NCP,420,1994,71,2,\"[1993, 1994]\"\r\nCP-13.zip,1,CP,1489,4070,58,4,\"[4067, 4068, 4069, 4070]\"\r\nNormal-12.zip,0,Normal,1996,451,90,1,[451]\r\nNCP-13.zip,2,NCP,361,1874,60,2,\"[1873, 1874]\"\r\nNCP-22.zip,2,NCP,885,2423,195,2,\"[2422, 2423]\"\r\nNCP-29.zip,2,NCP,921,2463,36,1,[2463]\r\nNormal-25.zip,0,Normal,3848,5360,192,1,[5360]\r\nCP-28.zip,1,CP,3776,5720,30,1,[5720]\r\nNCP-15.zip,2,NCP,402,1953,148,2,\"[1953, 1954]\"\r\nNormal-19.zip,0,Normal,2232,687,99,1,[687]\r\nCP-11.zip,1,CP,1447,3968,63,2,\"[3967, 3968]\"\r\nNormal-17.zip,0,Normal,2176,631,91,1,[631]\r\nNCP-12.zip,2,NCP,315,1778,46,2,\"[1777, 1778]\"\r\nCP-2.zip,1,CP,1102,3320,182,1,[3320]\r\nNCP-14.zip,2,NCP,373,1897,122,2,\"[1897, 1898]\"\r\nNCP-17.zip,2,NCP,483,2121,137,2,\"[2121, 2122]\"\r\nCP-4.zip,1,CP,1175,3393,189,1,[3393]\r\nNCP-14.zip,2,NCP,392,1934,143,2,\"[1934, 1935]\"\r\nCP-8.zip,1,CP,1321,3679,58,2,\"[3678, 3679]\"\r\nNCP-16.zip,2,NCP,430,2013,152,2,\"[2013, 2014]\"\r\nNCP-26.zip,2,NCP,3988,5512,53,1,[5512]\r\nNormal-22.zip,0,Normal,2316,771,92,1,[771]\r\nCP-14.zip,1,CP,1531,4170,59,2,\"[4169, 4170]\"\r\nNormal-3.zip,0,Normal,748,183,261,1,[183]\r\nNCP-23.zip,2,NCP,943,2486,334,1,[2486]\r\nNormal-18.zip,0,Normal,2202,657,82,1,[657]\r\nCP-27.zip,1,CP,3735,5679,26,1,[5679]\r\nNCP-15.zip,2,NCP,409,1967,153,2,\"[1967, 1968]\"\r\nCP-4.zip,1,CP,1171,3389,180,1,[3389]\r\nCP-11.zip,1,CP,1452,3977,56,2,\"[3977, 3978]\"\r\nNormal-1.zip,0,Normal,1684,875,71,5,\"[870, 871, 873, 874, 875]\"\r\nCP-8.zip,1,CP,1333,3705,52,2,\"[3705, 3706]\"\r\nNCP-3.zip,2,NCP,135,1414,138,2,\"[1414, 1415]\"\r\nNCP-25.zip,2,NCP,3965,5506,53,1,[5506]\r\nNCP-8.zip,2,NCP,258,1662,135,2,\"[1662, 1663]\"\r\nNormal-10.zip,0,Normal,1926,381,87,1,[381]\r\nCP-16.zip,1,CP,1596,4284,22,1,[4284]\r\nCP-14.zip,1,CP,1554,4226,41,2,\"[4226, 4227]\"\r\nCP-26.zip,1,CP,3645,5605,38,1,[5605]\r\nCP-2.zip,1,CP,1110,3328,143,1,[3328]\r\nNCP-22.zip,2,NCP,81,1296,53,2,\"[1295, 1296]\"\r\nNormal-1.zip,0,Normal,1685,877,65,4,\"[877, 878, 879, 880]\"\r\nNCP-29.zip,2,NCP,923,2465,19,1,[2465]\r\nNCP-14.zip,2,NCP,399,1948,149,2,\"[1948, 1949]\"\r\nNCP-18.zip,2,NCP,510,2176,102,2,\"[2176, 2177]\"\r\nNCP-20.zip,2,NCP,558,2274,51,2,\"[2273, 2274]\"\r\nNormal-1.zip,0,Normal,1678,832,62,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNormal-2.zip,0,Normal,1762,1131,70,2,\"[1130, 1131]\"\r\nCP-19.zip,1,CP,2434,2898,102,3,\"[2898, 2899, 2900]\"\r\nNormal-19.zip,0,Normal,2219,674,106,1,[674]\r\nNormal-8.zip,0,Normal,1869,324,94,1,[324]\r\nNCP-21.zip,2,NCP,70,1272,120,2,\"[1272, 1273]\"\r\nNCP-10.zip,2,NCP,2710,2703,48,1,[2703]\r\nNormal-9.zip,0,Normal,1904,359,94,1,[359]\r\nNCP-20.zip,2,NCP,564,2287,60,2,\"[2286, 2287]\"\r\nNCP-15.zip,2,NCP,424,2001,161,2,\"[2001, 2002]\"\r\nCP-14.zip,1,CP,1529,4166,42,3,\"[4165, 4166, 4167]\"\r\nNormal-16.zip,0,Normal,2138,593,72,1,[593]\r\nCP-16.zip,1,CP,1613,4301,27,1,[4301]\r\nCP-24.zip,1,CP,697,3059,114,1,[3059]\r\nCP-10.zip,1,CP,1390,3836,215,3,\"[3836, 3837, 3838]\"\r\nNormal-6.zip,0,Normal,1805,260,79,1,[260]\r\nCP-10.zip,1,CP,1390,3837,56,3,\"[3836, 3837, 3838]\"\r\nCP-3.zip,1,CP,1150,3368,214,1,[3368]\r\nCP-2.zip,1,CP,1116,3334,183,1,[3334]\r\nNormal-14.zip,0,Normal,2057,512,78,1,[512]\r\nNCP-19.zip,2,NCP,532,2223,58,2,\"[2222, 2223]\"\r\nCP-29.zip,1,CP,3810,5754,24,1,[5754]\r\nCP-14.zip,1,CP,1539,4188,131,3,\"[4188, 4189, 4190]\"\r\nCP-10.zip,1,CP,1385,3826,64,2,\"[3825, 3826]\"\r\nNCP-29.zip,2,NCP,929,2471,21,1,[2471]\r\nNCP-28.zip,2,NCP,856,2377,229,2,\"[2376, 2377]\"\r\nNCP-15.zip,2,NCP,408,1966,55,2,\"[1965, 1966]\"\r\nNCP-16.zip,2,NCP,43,1219,156,2,\"[1219, 1220]\"\r\nCP-7.zip,1,CP,1319,3675,61,2,\"[3674, 3675]\"\r\nNCP-1.zip,2,NCP,1022,2591,48,1,[2591]\r\nNormal-20.zip,0,Normal,2254,709,75,1,[709]\r\nNCP-22.zip,2,NCP,862,2385,33,1,[2385]\r\nCP-29.zip,1,CP,3812,5756,27,1,[5756]\r\nCP-11.zip,1,CP,1447,3967,63,2,\"[3967, 3968]\"\r\nCP-15.zip,1,CP,1556,4230,40,2,\"[4230, 4231]\"\r\nCP-1.zip,1,CP,1080,3125,64,1,[3125]\r\nNormal-4.zip,0,Normal,778,213,114,1,[213]\r\nCP-14.zip,1,CP,1529,4167,42,3,\"[4165, 4166, 4167]\"\r\nCP-2.zip,1,CP,11,3167,283,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-20.zip,2,NCP,549,2256,36,2,\"[2255, 2256]\"\r\nNCP-3.zip,2,NCP,1292,2733,66,1,[2733]\r\nNormal-13.zip,0,Normal,2047,502,93,1,[502]\r\nNCP-20.zip,2,NCP,549,2255,83,2,\"[2255, 2256]\"\r\nCP-15.zip,1,CP,1563,4246,122,3,\"[4245, 4246, 4247]\"\r\nNCP-25.zip,2,NCP,3956,5469,49,1,[5469]\r\nNCP-22.zip,2,NCP,833,2346,484,1,[2346]\r\nNCP-18.zip,2,NCP,499,2154,139,2,\"[2154, 2155]\"\r\nCP-12.zip,1,CP,1487,4061,163,3,\"[4061, 4062, 4063]\"\r\nCP-7.zip,1,CP,1306,3642,52,3,\"[3642, 3643, 3644]\"\r\nNCP-17.zip,2,NCP,47,1228,58,2,\"[1227, 1228]\"\r\nCP-8.zip,1,CP,1338,3716,67,2,\"[3716, 3717]\"\r\nNormal-25.zip,0,Normal,3711,5341,27,1,[5341]\r\nNCP-16.zip,2,NCP,452,2059,63,1,[2059]\r\nNormal-23.zip,0,Normal,2604,114,36,1,[114]\r\nNCP-28.zip,2,NCP,849,2368,224,1,[2368]\r\nNCP-29.zip,2,NCP,886,2424,52,1,[2424]\r\nNCP-28.zip,2,NCP,875,2408,218,1,[2408]\r\nNCP-20.zip,2,NCP,573,2304,151,2,\"[2304, 2305]\"\r\nNCP-22.zip,2,NCP,83,1300,70,2,\"[1299, 1300]\"\r\nNormal-14.zip,0,Normal,2056,511,84,1,[511]\r\nNormal-7.zip,0,Normal,1844,299,93,1,[299]\r\nCP-13.zip,1,CP,1494,4083,154,3,\"[4083, 4084, 4085]\"\r\nCP-5.zip,1,CP,1201,3419,171,1,[3419]\r\nNCP-23.zip,2,NCP,897,2438,40,1,[2438]\r\nNormal-27.zip,0,Normal,3914,5456,55,2,\"[5456, 5457]\"\r\nCP-9.zip,1,CP,1354,3751,181,3,\"[3751, 3752, 3753]\"\r\nNCP-29.zip,2,NCP,899,2440,34,2,\"[2440, 2441]\"\r\nCP-10.zip,1,CP,1414,3891,151,3,\"[3891, 3892, 3893]\"\r\nCP-14.zip,1,CP,1543,4202,57,3,\"[4200, 4201, 4202]\"\r\nNormal-25.zip,0,Normal,3837,5349,208,1,[5349]\r\nNCP-10.zip,2,NCP,272,1691,64,2,\"[1690, 1691]\"\r\nNormal-9.zip,0,Normal,1905,360,93,1,[360]\r\nCP-8.zip,1,CP,1340,3721,64,2,\"[3720, 3721]\"\r\nNCP-5.zip,2,NCP,19,1170,146,2,\"[1170, 1171]\"\r\nNormal-2.zip,0,Normal,1738,1041,75,1,[1041]\r\nNCP-2.zip,2,NCP,108,1354,58,2,\"[1353, 1354]\"\r\nNormal-25.zip,0,Normal,3844,5356,201,1,[5356]\r\nCP-20.zip,1,CP,2459,2945,108,1,[2945]\r\nCP-10.zip,1,CP,1414,3892,63,3,\"[3891, 3892, 3893]\"\r\nNormal-18.zip,0,Normal,2201,656,66,1,[656]\r\nNCP-21.zip,2,NCP,78,1289,166,2,\"[1289, 1290]\"\r\nCP-18.zip,1,CP,1776,3539,76,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-1.zip,2,NCP,1010,2572,126,2,\"[2572, 2573]\"\r\nCP-10.zip,1,CP,1409,3882,66,2,\"[3881, 3882]\"\r\nCP-11.zip,1,CP,1441,3951,203,3,\"[3951, 3952, 3953]\"\r\nCP-13.zip,1,CP,1512,4125,50,2,\"[4125, 4126]\"\r\nCP-30.zip,1,CP,3934,5640,53,3,\"[5638, 5639, 5640]\"\r\nNCP-4.zip,2,NCP,143,1430,128,2,\"[1430, 1431]\"\r\nNormal-17.zip,0,Normal,2166,621,93,1,[621]\r\nNCP-22.zip,2,NCP,83,1299,167,2,\"[1299, 1300]\"\r\nCP-29.zip,1,CP,3804,5748,29,1,[5748]\r\nCP-22.zip,1,CP,624,2986,90,1,[2986]\r\nNCP-7.zip,2,NCP,231,1607,58,2,\"[1606, 1607]\"\r\nNCP-8.zip,2,NCP,258,1663,57,2,\"[1662, 1663]\"\r\nNormal-10.zip,0,Normal,1956,411,89,1,[411]\r\nNCP-4.zip,2,NCP,165,1475,55,2,\"[1474, 1475]\"\r\nNormal-2.zip,0,Normal,1753,1091,60,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nCP-6.zip,1,CP,1247,3465,218,1,[3465]\r\nCP-17.zip,1,CP,1644,4332,23,1,[4332]\r\nNCP-5.zip,2,NCP,188,1520,134,2,\"[1520, 1521]\"\r\nCP-13.zip,1,CP,1509,4118,233,3,\"[4118, 4119, 4120]\"\r\nCP-19.zip,1,CP,2434,2899,102,3,\"[2898, 2899, 2900]\"\r\nNormal-27.zip,0,Normal,3914,5457,55,2,\"[5456, 5457]\"\r\nNCP-3.zip,2,NCP,133,1410,100,2,\"[1410, 1411]\"\r\nCP-24.zip,1,CP,690,3052,134,1,[3052]\r\nNCP-6.zip,2,NCP,208,1560,134,2,\"[1560, 1561]\"\r\nNormal-26.zip,0,Normal,3872,5384,29,1,[5384]\r\nCP-7.zip,1,CP,1258,3476,202,1,[3476]\r\nNCP-4.zip,2,NCP,154,1453,47,2,\"[1452, 1453]\"\r\nCP-8.zip,1,CP,1335,3709,207,3,\"[3709, 3710, 3711]\"\r\nCP-7.zip,1,CP,1305,3641,50,2,\"[3640, 3641]\"\r\nCP-25.zip,1,CP,716,3078,640,1,[3078]\r\nNormal-2.zip,0,Normal,1761,1125,45,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-14.zip,2,NCP,38,1209,57,2,\"[1208, 1209]\"\r\nNormal-1.zip,0,Normal,1685,878,65,4,\"[877, 878, 879, 880]\"\r\nNCP-17.zip,2,NCP,467,2090,58,2,\"[2089, 2090]\"\r\nCP-14.zip,1,CP,1539,4189,54,3,\"[4188, 4189, 4190]\"\r\nNCP-16.zip,2,NCP,454,2063,58,2,\"[2062, 2063]\"\r\nCP-13.zip,1,CP,1491,4076,48,3,\"[4074, 4075, 4076]\"\r\nNormal-4.zip,0,Normal,794,229,341,1,[229]\r\nNCP-19.zip,2,NCP,521,2199,58,2,\"[2198, 2199]\"\r\nCP-7.zip,1,CP,1311,3656,67,3,\"[3655, 3656, 3657]\"\r\nNormal-22.zip,0,Normal,2584,94,44,1,[94]\r\nCP-23.zip,1,CP,678,3040,46,1,[3040]\r\nCP-14.zip,1,CP,1539,4190,54,3,\"[4188, 4189, 4190]\"\r\nCP-30.zip,1,CP,3937,5644,55,2,\"[5643, 5644]\"\r\nNCP-15.zip,2,NCP,427,2007,132,2,\"[2007, 2008]\"\r\nNCP-28.zip,2,NCP,843,2358,279,1,[2358]\r\nNCP-14.zip,2,NCP,375,1903,49,3,\"[1901, 1902, 1903]\"\r\nNCP-11.zip,2,NCP,306,1759,153,2,\"[1759, 1760]\"\r\nNCP-16.zip,2,NCP,44,1221,124,2,\"[1221, 1222]\"\r\nNCP-8.zip,2,NCP,256,1659,58,2,\"[1658, 1659]\"\r\nCP-8.zip,1,CP,1338,3717,67,2,\"[3716, 3717]\"\r\nCP-18.zip,1,CP,1780,3553,67,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-7.zip,1,CP,1267,3485,151,1,[3485]\r\nCP-13.zip,1,CP,1509,4119,118,3,\"[4118, 4119, 4120]\"\r\nNormal-3.zip,0,Normal,1766,1151,62,3,\"[1149, 1150, 1151]\"\r\nCP-10.zip,1,CP,1405,3873,60,2,\"[3872, 3873]\"\r\nCP-1.zip,1,CP,1079,3124,63,1,[3124]\r\nCP-18.zip,1,CP,1780,3559,69,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNormal-7.zip,0,Normal,1852,307,94,1,[307]\r\nNCP-5.zip,2,NCP,194,1533,56,2,\"[1532, 1533]\"\r\nCP-5.zip,1,CP,1195,3413,247,1,[3413]\r\nNCP-20.zip,2,NCP,556,2270,53,2,\"[2269, 2270]\"\r\nNCP-2.zip,2,NCP,108,1353,139,2,\"[1353, 1354]\"\r\nNCP-16.zip,2,NCP,445,2045,58,2,\"[2044, 2045]\"\r\nCP-13.zip,1,CP,1512,4126,50,2,\"[4125, 4126]\"\r\nNCP-21.zip,2,NCP,64,1262,55,2,\"[1261, 1262]\"\r\nCP-5.zip,1,CP,1211,3429,143,1,[3429]\r\nNCP-1.zip,2,NCP,1042,2614,143,2,\"[2613, 2614]\"\r\nNCP-21.zip,2,NCP,73,1280,55,3,\"[1278, 1279, 1280]\"\r\nCP-9.zip,1,CP,1364,3776,133,3,\"[3776, 3777, 3778]\"\r\nNCP-21.zip,2,NCP,58,1249,131,2,\"[1249, 1250]\"\r\nCP-20.zip,1,CP,2668,3250,44,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-19.zip,2,NCP,518,2193,57,2,\"[2192, 2193]\"\r\nNCP-21.zip,2,NCP,73,1279,57,3,\"[1278, 1279, 1280]\"\r\nCP-26.zip,1,CP,3733,5674,159,3,\"[5673, 5674, 5675]\"\r\nNormal-19.zip,0,Normal,2247,702,86,1,[702]\r\nNCP-28.zip,2,NCP,867,2394,161,1,[2394]\r\nCP-22.zip,1,CP,633,2995,114,1,[2995]\r\nCP-9.zip,1,CP,1371,3796,60,3,\"[3794, 3795, 3796]\"\r\nNCP-22.zip,2,NCP,86,1305,117,2,\"[1305, 1306]\"\r\nNCP-14.zip,2,NCP,40,1213,63,2,\"[1212, 1213]\"\r\nNormal-26.zip,0,Normal,3892,5415,72,1,[5415]\r\nCP-7.zip,1,CP,1306,3644,237,3,\"[3642, 3643, 3644]\"\r\nCP-24.zip,1,CP,702,3064,78,1,[3064]\r\nNCP-26.zip,2,NCP,3975,5483,44,1,[5483]\r\nCP-4.zip,1,CP,1164,3382,193,1,[3382]\r\nNormal-11.zip,0,Normal,1960,415,98,1,[415]\r\nCP-5.zip,1,CP,1203,3421,231,1,[3421]\r\nCP-19.zip,1,CP,2434,2900,104,3,\"[2898, 2899, 2900]\"\r\nNCP-29.zip,2,NCP,890,2429,203,1,[2429]\r\nNCP-16.zip,2,NCP,448,2050,139,2,\"[2050, 2051]\"\r\nCP-18.zip,1,CP,1780,3555,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-12.zip,1,CP,1457,3991,69,1,[3991]\r\nNormal-3.zip,0,Normal,756,191,106,1,[191]\r\nNCP-29.zip,2,NCP,900,2442,506,1,[2442]\r\nCP-11.zip,1,CP,1432,3932,60,2,\"[3932, 3933]\"\r\nNCP-17.zip,2,NCP,476,2107,127,2,\"[2107, 2108]\"\r\nCP-28.zip,1,CP,3794,5738,26,1,[5738]\r\nCP-23.zip,1,CP,669,3031,70,1,[3031]\r\nNormal-9.zip,0,Normal,1911,366,96,1,[366]\r\nNormal-9.zip,0,Normal,1919,374,99,1,[374]\r\nNCP-12.zip,2,NCP,335,1818,129,2,\"[1818, 1819]\"\r\nCP-18.zip,1,CP,1651,4339,31,1,[4339]\r\nNormal-4.zip,0,Normal,798,233,122,1,[233]\r\nNCP-18.zip,2,NCP,508,2173,61,2,\"[2172, 2173]\"\r\nNCP-21.zip,2,NCP,67,1266,168,2,\"[1266, 1267]\"\r\nNCP-6.zip,2,NCP,214,1573,60,2,\"[1572, 1573]\"\r\nCP-10.zip,1,CP,1405,3872,60,2,\"[3872, 3873]\"\r\nNCP-6.zip,2,NCP,208,1561,56,2,\"[1560, 1561]\"\r\nNCP-14.zip,2,NCP,373,1898,52,2,\"[1897, 1898]\"\r\nNCP-3.zip,2,NCP,1281,2722,65,1,[2722]\r\nCP-24.zip,1,CP,707,3069,72,1,[3069]\r\nNCP-28.zip,2,NCP,831,2344,278,1,[2344]\r\nNormal-17.zip,0,Normal,2179,634,101,1,[634]\r\nNCP-21.zip,2,NCP,60,1253,141,2,\"[1253, 1254]\"\r\nNCP-8.zip,2,NCP,259,1665,65,2,\"[1664, 1665]\"\r\nNCP-11.zip,2,NCP,311,1770,55,2,\"[1769, 1770]\"\r\nNormal-1.zip,0,Normal,1678,828,58,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-27.zip,2,NCP,1050,2623,46,2,\"[2623, 2624]\"\r\nNCP-18.zip,2,NCP,490,2137,62,2,\"[2136, 2137]\"\r\nNormal-27.zip,0,Normal,3900,5431,64,2,\"[5431, 5432]\"\r\nNormal-15.zip,0,Normal,2110,565,83,1,[565]\r\nNCP-13.zip,2,NCP,368,1887,129,2,\"[1887, 1888]\"\r\nNCP-27.zip,2,NCP,817,2326,120,1,[2326]\r\nNormal-1.zip,0,Normal,1678,831,62,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-15.zip,1,CP,1567,4255,59,2,\"[4254, 4255]\"\r\nNCP-5.zip,2,NCP,178,1500,124,2,\"[1500, 1501]\"\r\nNCP-13.zip,2,NCP,345,1841,147,2,\"[1841, 1842]\"\r\nNormal-2.zip,0,Normal,1761,1128,60,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nCP-8.zip,1,CP,1343,3727,56,2,\"[3726, 3727]\"\r\nNCP-30.zip,2,NCP,936,2478,21,1,[2478]\r\nNCP-11.zip,2,NCP,306,1760,64,2,\"[1759, 1760]\"\r\nNCP-17.zip,2,NCP,487,2129,167,2,\"[2129, 2130]\"\r\nCP-1.zip,1,CP,0,3138,245,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-30.zip,1,CP,3930,5629,62,2,\"[5628, 5629]\"\r\nNCP-9.zip,2,NCP,2692,2700,48,1,[2700]\r\nNCP-20.zip,2,NCP,556,2269,125,2,\"[2269, 2270]\"\r\nCP-18.zip,1,CP,1775,3531,58,4,\"[3530, 3531, 3532, 3533]\"\r\nNCP-23.zip,2,NCP,896,2437,39,1,[2437]\r\nCP-21.zip,1,CP,5,3509,275,1,[3509]\r\nNormal-19.zip,0,Normal,2217,672,71,1,[672]\r\nNCP-1.zip,2,NCP,1010,2573,126,2,\"[2572, 2573]\"\r\nNormal-1.zip,0,Normal,1710,975,78,2,\"[975, 976]\"\r\nCP-14.zip,1,CP,1545,4206,65,2,\"[4206, 4207]\"\r\nNCP-1.zip,2,NCP,100,1337,139,2,\"[1337, 1338]\"\r\nNCP-26.zip,2,NCP,3998,5495,41,1,[5495]\r\nCP-25.zip,1,CP,711,3073,112,1,[3073]\r\nCP-24.zip,1,CP,699,3061,64,1,[3061]\r\nCP-4.zip,1,CP,1173,3391,201,1,[3391]\r\nCP-27.zip,1,CP,3740,5684,23,1,[5684]\r\nCP-16.zip,1,CP,1590,4278,20,1,[4278]\r\nNormal-2.zip,0,Normal,1762,1130,70,2,\"[1130, 1131]\"\r\nNormal-1.zip,0,Normal,1679,833,66,6,\"[833, 834, 835, 836, 837, 838]\"\r\nNCP-29.zip,2,NCP,928,2470,25,1,[2470]\r\nCP-18.zip,1,CP,1775,3533,57,4,\"[3530, 3531, 3532, 3533]\"\r\nNormal-3.zip,0,Normal,766,201,94,1,[201]\r\nNormal-11.zip,0,Normal,1964,419,100,1,[419]\r\nNCP-9.zip,2,NCP,2690,2657,48,1,[2657]\r\nNCP-21.zip,2,NCP,78,1290,69,2,\"[1289, 1290]\"\r\nNormal-16.zip,0,Normal,2147,602,95,1,[602]\r\nNCP-19.zip,2,NCP,544,2246,62,2,\"[2245, 2246]\"\r\nNormal-27.zip,0,Normal,3900,5432,64,2,\"[5431, 5432]\"\r\nNormal-8.zip,0,Normal,1860,315,92,1,[315]\r\nCP-21.zip,1,CP,601,2963,104,1,[2963]\r\nCP-2.zip,1,CP,11,3164,287,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nCP-15.zip,1,CP,1563,4245,241,3,\"[4245, 4246, 4247]\""
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_fold2_train.csv",
    "content": "zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids\r\nCP-6.zip,1,CP,1229,3447,144,1,[3447]\r\nCP-26.zip,1,CP,3718,5647,51,2,\"[5647, 5648]\"\r\nCP-3.zip,1,CP,1148,3366,158,1,[3366]\r\nCP-5.zip,1,CP,1200,3418,309,1,[3418]\r\nCP-1.zip,1,CP,1088,3221,54,4,\"[3220, 3221, 3222, 3223]\"\r\nCP-21.zip,1,CP,585,2947,94,1,[2947]\r\nCP-18.zip,1,CP,1772,3178,72,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-1.zip,1,CP,1078,3123,68,1,[3123]\r\nCP-12.zip,1,CP,1473,4028,51,3,\"[4026, 4027, 4028]\"\r\nCP-15.zip,1,CP,1559,4237,53,2,\"[4237, 4238]\"\r\nCP-7.zip,1,CP,1259,3477,162,1,[3477]\r\nCP-14.zip,1,CP,1541,4194,142,3,\"[4194, 4195, 4196]\"\r\nCP-18.zip,1,CP,1658,4346,29,1,[4346]\r\nCP-9.zip,1,CP,1373,3801,55,2,\"[3800, 3801]\"\r\nCP-12.zip,1,CP,1456,3990,52,3,\"[3988, 3989, 3990]\"\r\nCP-9.zip,1,CP,1367,3787,58,3,\"[3785, 3786, 3787]\"\r\nCP-1.zip,1,CP,1097,3315,119,1,[3315]\r\nCP-11.zip,1,CP,1438,3944,46,2,\"[3944, 3945]\"\r\nNormal-2.zip,0,Normal,1759,1115,64,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-14.zip,1,CP,1523,4150,65,2,\"[4150, 4151]\"\r\nCP-19.zip,1,CP,2447,2923,83,2,\"[2923, 2924]\"\r\nCP-19.zip,1,CP,1788,3203,57,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-5.zip,1,CP,1220,3438,200,1,[3438]\r\nCP-16.zip,1,CP,1593,4281,22,1,[4281]\r\nNormal-18.zip,0,Normal,2200,655,94,1,[655]\r\nCP-28.zip,1,CP,3784,5728,29,1,[5728]\r\nCP-2.zip,1,CP,1109,3327,210,1,[3327]\r\nCP-19.zip,1,CP,2444,2918,124,2,\"[2918, 2919]\"\r\nNormal-2.zip,0,Normal,1760,1122,137,4,\"[1121, 1122, 1123, 1124]\"\r\nCP-12.zip,1,CP,1476,4033,106,2,\"[4033, 4034]\"\r\nCP-14.zip,1,CP,1538,4186,66,3,\"[4185, 4186, 4187]\"\r\nNormal-13.zip,0,Normal,2046,501,79,1,[501]\r\nCP-15.zip,1,CP,1565,4250,66,2,\"[4250, 4251]\"\r\nCP-10.zip,1,CP,1407,3876,58,2,\"[3876, 3877]\"\r\nNormal-27.zip,0,Normal,3905,5437,288,2,\"[5437, 5438]\"\r\nNCP-13.zip,2,NCP,36,1204,141,2,\"[1204, 1205]\"\r\nNCP-30.zip,2,NCP,941,2484,169,1,[2484]\r\nNormal-2.zip,0,Normal,1758,1109,291,2,\"[1109, 1110]\"\r\nCP-8.zip,1,CP,1342,3723,139,3,\"[3723, 3724, 3725]\"\r\nCP-3.zip,1,CP,1132,3350,180,1,[3350]\r\nCP-18.zip,1,CP,1773,3184,67,4,\"[3182, 3183, 3184, 3185]\"\r\nNCP-17.zip,2,NCP,464,2083,60,2,\"[2082, 2083]\"\r\nNCP-16.zip,2,NCP,447,2048,139,2,\"[2048, 2049]\"\r\nNCP-3.zip,2,NCP,136,1416,126,2,\"[1416, 1417]\"\r\nNCP-18.zip,2,NCP,501,2158,146,2,\"[2158, 2159]\"\r\nCP-19.zip,1,CP,2439,2909,409,1,[2909]\r\nNCP-19.zip,2,NCP,538,2233,142,2,\"[2233, 2234]\"\r\nNormal-27.zip,0,Normal,3907,5440,63,2,\"[5440, 5441]\"\r\nCP-18.zip,1,CP,1773,3182,61,4,\"[3182, 3183, 3184, 3185]\"\r\nCP-8.zip,1,CP,1320,3677,62,2,\"[3676, 3677]\"\r\nCP-9.zip,1,CP,1366,3782,138,3,\"[3782, 3783, 3784]\"\r\nCP-7.zip,1,CP,1309,3651,49,2,\"[3651, 3652]\"\r\nNCP-18.zip,2,NCP,492,2140,139,2,\"[2140, 2141]\"\r\nNCP-21.zip,2,NCP,69,1271,48,2,\"[1270, 1271]\"\r\nCP-13.zip,1,CP,1515,4131,137,3,\"[4131, 4132, 4133]\"\r\nNormal-11.zip,0,Normal,1980,435,83,1,[435]\r\nNormal-14.zip,0,Normal,2073,528,87,1,[528]\r\nCP-3.zip,1,CP,1149,3367,157,1,[3367]\r\nNCP-14.zip,2,NCP,376,1905,60,2,\"[1904, 1905]\"\r\nNCP-8.zip,2,NCP,253,1653,58,2,\"[1652, 1653]\"\r\nNCP-27.zip,2,NCP,1061,2638,75,1,[2638]\r\nNormal-9.zip,0,Normal,1921,376,80,1,[376]\r\nNCP-16.zip,2,NCP,453,2061,51,2,\"[2060, 2061]\"\r\nNCP-10.zip,2,NCP,275,1697,64,2,\"[1696, 1697]\"\r\nCP-24.zip,1,CP,708,3070,80,1,[3070]\r\nNCP-20.zip,2,NCP,560,2277,124,2,\"[2277, 2279]\"\r\nNCP-6.zip,2,NCP,207,1558,109,2,\"[1558, 1559]\"\r\nNCP-2.zip,2,NCP,114,1370,53,2,\"[1369, 1370]\"\r\nCP-10.zip,1,CP,1407,3877,58,2,\"[3876, 3877]\"\r\nNormal-1.zip,0,Normal,1682,858,70,6,\"[847, 848, 852, 853, 857, 858]\"\r\nCP-14.zip,1,CP,1548,4214,51,2,\"[4213, 4214]\"\r\nNormal-2.zip,0,Normal,1760,1124,74,4,\"[1121, 1122, 1123, 1124]\"\r\nNCP-14.zip,2,NCP,374,1900,58,2,\"[1899, 1900]\"\r\nNCP-7.zip,2,NCP,2486,2645,50,1,[2645]\r\nNCP-19.zip,2,NCP,542,2242,55,2,\"[2241, 2242]\"\r\nNormal-25.zip,0,Normal,3836,5348,202,1,[5348]\r\nNormal-11.zip,0,Normal,1961,416,91,1,[416]\r\nNCP-27.zip,2,NCP,819,2329,33,1,[2329]\r\nNCP-5.zip,2,NCP,184,1512,112,2,\"[1512, 1513]\"\r\nNCP-15.zip,2,NCP,416,1984,139,2,\"[1984, 1986]\"\r\nCP-14.zip,1,CP,1538,4187,65,3,\"[4185, 4186, 4187]\"\r\nCP-8.zip,1,CP,1351,3746,56,1,[3746]\r\nNCP-10.zip,2,NCP,281,1709,51,2,\"[1708, 1709]\"\r\nCP-10.zip,1,CP,1415,3895,65,3,\"[3894, 3895, 3896]\"\r\nNormal-1.zip,0,Normal,1682,848,67,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNCP-17.zip,2,NCP,485,2126,64,2,\"[2125, 2126]\"\r\nNCP-18.zip,2,NCP,501,2159,61,2,\"[2158, 2159]\"\r\nNormal-8.zip,0,Normal,1863,318,82,1,[318]\r\nCP-18.zip,1,CP,1772,3176,81,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-26.zip,1,CP,3652,5551,53,2,\"[5551, 5552]\"\r\nNormal-5.zip,0,Normal,808,243,134,1,[243]\r\nCP-28.zip,1,CP,3771,5715,23,1,[5715]\r\nCP-26.zip,1,CP,3637,5596,35,1,[5596]\r\nCP-12.zip,1,CP,1455,3987,58,3,\"[3985, 3986, 3987]\"\r\nCP-8.zip,1,CP,1336,3712,60,2,\"[3712, 3713]\"\r\nCP-30.zip,1,CP,4015,5564,226,1,[5564]\r\nNormal-8.zip,0,Normal,1883,338,91,1,[338]\r\nNormal-3.zip,0,Normal,1764,1145,62,4,\"[1143, 1144, 1145, 1146]\"\r\nNCP-15.zip,2,NCP,42,1218,61,2,\"[1216, 1218]\"\r\nNCP-7.zip,2,NCP,245,1636,149,2,\"[1636, 1637]\"\r\nNormal-14.zip,0,Normal,2066,521,74,1,[521]\r\nNormal-20.zip,0,Normal,2275,730,85,1,[730]\r\nNCP-8.zip,2,NCP,268,1682,126,2,\"[1682, 1683]\"\r\nCP-7.zip,1,CP,1307,3647,49,4,\"[3645, 3646, 3647, 3648]\"\r\nNormal-15.zip,0,Normal,2106,561,93,1,[561]\r\nCP-20.zip,1,CP,2772,3303,261,1,[3303]\r\nNCP-25.zip,2,NCP,3970,5479,48,1,[5479]\r\nCP-28.zip,1,CP,3772,5716,23,1,[5716]\r\nNCP-5.zip,2,NCP,175,1494,131,2,\"[1494, 1495]\"\r\nNCP-18.zip,2,NCP,507,2171,58,2,\"[2170, 2171]\"\r\nNCP-19.zip,2,NCP,537,2231,143,2,\"[2231, 2232]\"\r\nNormal-1.zip,0,Normal,1728,1014,66,4,\"[1013, 1014, 1015, 1016]\"\r\nNormal-23.zip,0,Normal,2608,118,25,1,[118]\r\nNCP-23.zip,2,NCP,90,1317,43,2,\"[1316, 1317]\"\r\nNCP-2.zip,2,NCP,123,1388,62,2,\"[1387, 1388]\"\r\nNCP-18.zip,2,NCP,507,2170,138,2,\"[2170, 2171]\"\r\nNCP-14.zip,2,NCP,395,1940,171,2,\"[1940, 1941]\"\r\nNCP-23.zip,2,NCP,946,2489,26,1,[2489]\r\nCP-7.zip,1,CP,1308,3649,43,2,\"[3649, 3650]\"\r\nNCP-17.zip,2,NCP,462,2078,161,2,\"[2078, 2079]\"\r\nNormal-16.zip,0,Normal,2145,600,86,1,[600]\r\nNCP-20.zip,2,NCP,560,2279,51,2,\"[2277, 2279]\"\r\nCP-30.zip,1,CP,3931,5630,82,4,\"[5630, 5631, 5632, 5633]\"\r\nCP-13.zip,1,CP,1501,4101,55,2,\"[4100, 4101]\"\r\nCP-1.zip,1,CP,1,3144,248,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nCP-25.zip,1,CP,713,3075,120,1,[3075]\r\nCP-15.zip,1,CP,1562,4244,55,2,\"[4243, 4244]\"\r\nCP-26.zip,1,CP,3643,5602,298,2,\"[5602, 5603]\"\r\nCP-27.zip,1,CP,3748,5692,17,1,[5692]\r\nCP-14.zip,1,CP,1524,4152,229,3,\"[4152, 4153, 4154]\"\r\nNormal-6.zip,0,Normal,1800,255,92,1,[255]\r\nNormal-1.zip,0,Normal,1711,978,63,2,\"[977, 978]\"\r\nNormal-17.zip,0,Normal,2157,612,78,1,[612]\r\nCP-8.zip,1,CP,1334,3707,133,2,\"[3707, 3708]\"\r\nNCP-19.zip,2,NCP,545,2247,135,2,\"[2247, 2248]\"\r\nCP-28.zip,1,CP,3790,5734,23,1,[5734]\r\nNCP-30.zip,2,NCP,993,2546,203,1,[2546]\r\nNCP-9.zip,2,NCP,2689,2656,47,1,[2656]\r\nNormal-27.zip,0,Normal,3907,5441,66,2,\"[5440, 5441]\"\r\nCP-26.zip,1,CP,3652,5552,52,2,\"[5551, 5552]\"\r\nNCP-11.zip,2,NCP,287,1719,142,2,\"[1719, 1720]\"\r\nNCP-2.zip,2,NCP,114,1369,125,2,\"[1369, 1370]\"\r\nNCP-21.zip,2,NCP,581,2320,58,2,\"[2319, 2320]\"\r\nNormal-26.zip,0,Normal,3887,5404,78,3,\"[5400, 5401, 5404]\"\r\nNCP-12.zip,2,NCP,325,1799,50,2,\"[1798, 1799]\"\r\nNCP-27.zip,2,NCP,1060,2637,81,1,[2637]\r\nCP-13.zip,1,CP,1516,4135,62,2,\"[4134, 4135]\"\r\nCP-15.zip,1,CP,1580,4268,21,1,[4268]\r\nNCP-15.zip,2,NCP,428,2009,125,2,\"[2009, 2010]\"\r\nNCP-19.zip,2,NCP,52,1237,135,2,\"[1237, 1238]\"\r\nNCP-9.zip,2,NCP,2691,2658,44,1,[2658]\r\nNCP-12.zip,2,NCP,34,1200,156,2,\"[1200, 1201]\"\r\nNCP-19.zip,2,NCP,539,2235,131,2,\"[2235, 2236]\"\r\nNormal-1.zip,0,Normal,1728,1015,72,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-6.zip,2,NCP,222,1588,122,2,\"[1588, 1589]\"\r\nNCP-10.zip,2,NCP,273,1693,54,2,\"[1692, 1693]\"\r\nCP-29.zip,1,CP,3822,5766,20,1,[5766]\r\nCP-10.zip,1,CP,1401,3864,51,3,\"[3862, 3863, 3864]\"\r\nNormal-13.zip,0,Normal,2030,485,66,1,[485]\r\nNCP-4.zip,2,NCP,164,1473,63,2,\"[1472, 1473]\"\r\nCP-21.zip,1,CP,3,3504,35,1,[3504]\r\nCP-9.zip,1,CP,1368,3788,69,2,\"[3788, 3789]\"\r\nNormal-1.zip,0,Normal,1704,963,69,4,\"[961, 962, 963, 964]\"\r\nCP-12.zip,1,CP,1466,4012,52,2,\"[4011, 4012]\"\r\nNormal-11.zip,0,Normal,1971,426,100,1,[426]\r\nNCP-16.zip,2,NCP,450,2055,34,2,\"[2054, 2055]\"\r\nNCP-30.zip,2,NCP,962,2505,38,1,[2505]\r\nNCP-8.zip,2,NCP,2675,2648,44,1,[2648]\r\nNCP-25.zip,2,NCP,3955,5468,46,1,[5468]\r\nNCP-18.zip,2,NCP,488,2131,139,2,\"[2131, 2133]\"\r\nCP-12.zip,1,CP,1484,4053,181,3,\"[4053, 4054, 4055]\"\r\nCP-9.zip,1,CP,1368,3789,69,2,\"[3788, 3789]\"\r\nNCP-10.zip,2,NCP,28,1188,145,2,\"[1188, 1189]\"\r\nCP-30.zip,1,CP,3931,5631,82,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-10.zip,2,NCP,277,1701,64,2,\"[1700, 1701]\"\r\nNCP-4.zip,2,NCP,148,1441,63,2,\"[1440, 1441]\"\r\nCP-12.zip,1,CP,1481,4044,139,3,\"[4044, 4045, 4046]\"\r\nNormal-21.zip,0,Normal,2288,743,96,1,[743]\r\nCP-30.zip,1,CP,4017,5566,41,1,[5566]\r\nCP-13.zip,1,CP,1499,4098,53,2,\"[4097, 4098]\"\r\nCP-13.zip,1,CP,1516,4134,62,2,\"[4134, 4135]\"\r\nNormal-13.zip,0,Normal,2049,504,88,1,[504]\r\nCP-18.zip,1,CP,1772,3179,72,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-20.zip,2,NCP,57,1248,56,2,\"[1247, 1248]\"\r\nNormal-1.zip,0,Normal,1704,961,71,4,\"[961, 962, 963, 964]\"\r\nCP-9.zip,1,CP,1366,3783,57,3,\"[3782, 3783, 3784]\"\r\nCP-32.zip,1,CP,2464,3228,66,1,[3228]\r\nCP-15.zip,1,CP,1555,4228,62,2,\"[4228, 4229]\"\r\nNormal-3.zip,0,Normal,758,193,122,1,[193]\r\nNCP-12.zip,2,NCP,329,1806,157,2,\"[1806, 1807]\"\r\nCP-7.zip,1,CP,1307,3646,259,4,\"[3645, 3646, 3647, 3648]\"\r\nCP-26.zip,1,CP,3722,5657,205,2,\"[5656, 5657]\"\r\nNCP-14.zip,2,NCP,382,1916,139,2,\"[1916, 1917]\"\r\nCP-27.zip,1,CP,3752,5696,20,1,[5696]\r\nNormal-16.zip,0,Normal,2129,584,75,1,[584]\r\nNCP-13.zip,2,NCP,367,1885,158,2,\"[1885, 1886]\"\r\nNCP-6.zip,2,NCP,204,1553,58,2,\"[1552, 1553]\"\r\nCP-30.zip,1,CP,3918,5542,71,1,[5542]\r\nNormal-11.zip,0,Normal,1979,434,87,1,[434]\r\nNormal-2.zip,0,Normal,1741,1053,61,2,\"[1053, 1054]\"\r\nNormal-10.zip,0,Normal,1945,400,87,1,[400]\r\nNormal-26.zip,0,Normal,3882,5394,27,1,[5394]\r\nCP-20.zip,1,CP,2456,2940,126,1,[2940]\r\nNCP-5.zip,2,NCP,184,1513,48,2,\"[1512, 1513]\"\r\nNCP-9.zip,2,NCP,2693,2659,49,1,[2659]\r\nCP-8.zip,1,CP,1348,3739,197,3,\"[3739, 3740, 3741]\"\r\nNormal-18.zip,0,Normal,2214,669,102,1,[669]\r\nCP-10.zip,1,CP,1415,3896,65,3,\"[3894, 3895, 3896]\"\r\nNCP-3.zip,2,NCP,1290,2731,66,1,[2731]\r\nNormal-2.zip,0,Normal,1759,1111,62,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-15.zip,2,NCP,401,1951,139,2,\"[1951, 1952]\"\r\nCP-7.zip,1,CP,1309,3652,49,2,\"[3651, 3652]\"\r\nNormal-4.zip,0,Normal,787,222,320,1,[222]\r\nNCP-20.zip,2,NCP,550,2258,60,2,\"[2257, 2258]\"\r\nNCP-5.zip,2,NCP,195,1534,143,2,\"[1534, 1535]\"\r\nNCP-13.zip,2,NCP,367,1886,66,2,\"[1885, 1886]\"\r\nNCP-19.zip,2,NCP,530,2218,132,1,[2218]\r\nNormal-6.zip,0,Normal,1811,266,95,1,[266]\r\nNCP-30.zip,2,NCP,963,2506,21,1,[2506]\r\nNormal-2.zip,0,Normal,1759,1112,62,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-13.zip,2,NCP,369,1890,58,2,\"[1889, 1890]\"\r\nNCP-16.zip,2,NCP,457,2068,134,2,\"[2068, 2069]\"\r\nNCP-26.zip,2,NCP,3981,5488,45,1,[5488]\r\nNCP-22.zip,2,NCP,816,2325,50,1,[2325]\r\nNormal-1.zip,0,Normal,1730,1019,63,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-15.zip,2,NCP,419,1991,130,2,\"[1991, 1992]\"\r\nCP-30.zip,1,CP,4016,5565,37,1,[5565]\r\nCP-24.zip,1,CP,694,3056,135,1,[3056]\r\nNCP-17.zip,2,NCP,470,2095,154,2,\"[2095, 2096]\"\r\nNormal-4.zip,0,Normal,781,216,118,1,[216]\r\nCP-13.zip,1,CP,1497,4093,68,3,\"[4092, 4093, 4094]\"\r\nNCP-26.zip,2,NCP,3991,5515,43,1,[5515]\r\nCP-8.zip,1,CP,1331,3701,62,2,\"[3701, 3702]\"\r\nNormal-9.zip,0,Normal,1910,365,91,1,[365]\r\nNCP-27.zip,2,NCP,820,2330,34,1,[2330]\r\nCP-7.zip,1,CP,13,3171,65,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-20.zip,1,CP,2764,3295,39,1,[3295]\r\nNormal-1.zip,0,Normal,1714,984,71,3,\"[982, 983, 984]\"\r\nCP-13.zip,1,CP,1501,4100,55,2,\"[4100, 4101]\"\r\nNormal-15.zip,0,Normal,2117,572,87,1,[572]\r\nCP-30.zip,1,CP,3929,5627,70,2,\"[5626, 5627]\"\r\nNCP-4.zip,2,NCP,158,1461,52,2,\"[1460, 1461]\"\r\nCP-10.zip,1,CP,1389,3834,52,3,\"[3833, 3834, 3835]\"\r\nCP-13.zip,1,CP,1497,4094,68,3,\"[4092, 4093, 4094]\"\r\nCP-10.zip,1,CP,1415,3894,155,3,\"[3894, 3895, 3896]\"\r\nCP-30.zip,1,CP,4014,5563,35,1,[5563]\r\nNCP-17.zip,2,NCP,462,2079,67,2,\"[2078, 2079]\"\r\nCP-29.zip,1,CP,3803,5747,23,1,[5747]\r\nCP-1.zip,1,CP,1,3143,300,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNCP-11.zip,2,NCP,305,1756,157,2,\"[1756, 1758]\"\r\nNormal-1.zip,0,Normal,1668,781,63,4,\"[778, 779, 780, 781]\"\r\nCP-4.zip,1,CP,1174,3392,175,1,[3392]\r\nNormal-14.zip,0,Normal,2060,515,77,1,[515]\r\nNormal-22.zip,0,Normal,2602,112,32,1,[112]\r\nCP-14.zip,1,CP,1541,4196,58,3,\"[4194, 4195, 4196]\"\r\nNormal-12.zip,0,Normal,2019,474,87,1,[474]\r\nCP-25.zip,1,CP,733,3095,84,1,[3095]\r\nCP-13.zip,1,CP,1499,4097,53,2,\"[4097, 4098]\"\r\nCP-19.zip,1,CP,1788,3201,55,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNormal-1.zip,0,Normal,1704,964,69,4,\"[961, 962, 963, 964]\"\r\nNCP-3.zip,2,NCP,1289,2730,62,1,[2730]\r\nNCP-20.zip,2,NCP,567,2292,148,2,\"[2292, 2293]\"\r\nNormal-13.zip,0,Normal,2027,482,89,1,[482]\r\nNormal-2.zip,0,Normal,1759,1114,59,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNormal-27.zip,0,Normal,3897,5424,75,4,\"[5423, 5424, 5426, 5427]\"\r\nNormal-22.zip,0,Normal,2317,772,77,1,[772]\r\nNormal-2.zip,0,Normal,1758,1110,59,2,\"[1109, 1110]\"\r\nNCP-2.zip,2,NCP,121,1384,43,2,\"[1383, 1384]\"\r\nNCP-13.zip,2,NCP,356,1864,53,2,\"[1863, 1864]\"\r\nNormal-2.zip,0,Normal,1760,1121,85,4,\"[1121, 1122, 1123, 1124]\"\r\nCP-29.zip,1,CP,3825,5769,25,1,[5769]\r\nNCP-17.zip,2,NCP,46,1226,52,2,\"[1225, 1226]\"\r\nNCP-19.zip,2,NCP,53,1240,60,2,\"[1239, 1240]\"\r\nNCP-12.zip,2,NCP,314,1776,58,2,\"[1775, 1776]\"\r\nNormal-21.zip,0,Normal,2290,745,88,1,[745]\r\nNormal-2.zip,0,Normal,1760,1123,74,4,\"[1121, 1122, 1123, 1124]\"\r\nNormal-24.zip,0,Normal,2666,176,35,1,[176]\r\nCP-8.zip,1,CP,1346,3735,53,3,\"[3733, 3734, 3735]\"\r\nNormal-17.zip,0,Normal,2164,619,84,1,[619]\r\nNCP-8.zip,2,NCP,2672,2647,47,1,[2647]\r\nNCP-2.zip,2,NCP,1277,2718,57,1,[2718]\r\nCP-9.zip,1,CP,1370,3793,62,2,\"[3792, 3793]\"\r\nNormal-10.zip,0,Normal,1928,383,87,1,[383]\r\nCP-21.zip,1,CP,598,2960,646,1,[2960]\r\nCP-20.zip,1,CP,2755,3286,34,1,[3286]\r\nNormal-16.zip,0,Normal,2141,596,100,1,[596]\r\nCP-14.zip,1,CP,1544,4205,50,3,\"[4203, 4204, 4205]\"\r\nNCP-9.zip,2,NCP,270,1687,62,2,\"[1686, 1687]\"\r\nCP-5.zip,1,CP,1222,3440,157,1,[3440]\r\nCP-19.zip,1,CP,1791,3210,100,4,\"[3210, 3211, 3212, 3213]\"\r\nNCP-16.zip,2,NCP,450,2054,78,2,\"[2054, 2055]\"\r\nNCP-20.zip,2,NCP,557,2272,56,2,\"[2271, 2272]\"\r\nNCP-3.zip,2,NCP,1284,2725,50,1,[2725]\r\nCP-5.zip,1,CP,1205,3423,146,1,[3423]\r\nNCP-1.zip,2,NCP,1017,2583,452,1,[2583]\r\nNormal-2.zip,0,Normal,1736,1033,25,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNCP-1.zip,2,NCP,1021,2589,183,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-7.zip,2,NCP,232,1608,146,2,\"[1608, 1609]\"\r\nNCP-23.zip,2,NCP,950,2493,34,1,[2493]\r\nCP-6.zip,1,CP,1246,3464,175,1,[3464]\r\nNormal-1.zip,0,Normal,1671,794,67,3,\"[793, 794, 795]\"\r\nCP-1.zip,1,CP,1095,3313,161,1,[3313]\r\nCP-25.zip,1,CP,714,3076,98,1,[3076]\r\nNCP-17.zip,2,NCP,479,2114,58,2,\"[2113, 2114]\"\r\nNormal-24.zip,0,Normal,2652,162,35,1,[162]\r\nNormal-2.zip,0,Normal,1752,1085,66,1,[1085]\r\nNormal-24.zip,0,Normal,2638,148,38,1,[148]\r\nNCP-8.zip,2,NCP,260,1666,163,2,\"[1666, 1667]\"\r\nNCP-18.zip,2,NCP,509,2175,58,2,\"[2174, 2175]\"\r\nNormal-14.zip,0,Normal,2079,534,92,1,[534]\r\nNormal-3.zip,0,Normal,751,186,119,1,[186]\r\nNCP-8.zip,2,NCP,263,1673,74,2,\"[1672, 1673]\"\r\nCP-22.zip,1,CP,626,2988,174,1,[2988]\r\nNormal-23.zip,0,Normal,2619,129,43,1,[129]\r\nCP-1.zip,1,CP,1069,3109,77,4,\"[3108, 3109, 3110, 3111]\"\r\nNCP-13.zip,2,NCP,360,1872,51,2,\"[1871, 1872]\"\r\nNCP-23.zip,2,NCP,915,2457,31,1,[2457]\r\nNCP-3.zip,2,NCP,131,1407,117,2,\"[1407, 1408]\"\r\nNCP-21.zip,2,NCP,79,1292,55,2,\"[1291, 1292]\"\r\nNormal-4.zip,0,Normal,779,214,290,1,[214]\r\nCP-27.zip,1,CP,3734,5676,32,3,\"[5676, 5677, 5678]\"\r\nNormal-15.zip,0,Normal,2104,559,101,1,[559]\r\nCP-5.zip,1,CP,1218,3436,213,1,[3436]\r\nNCP-3.zip,2,NCP,1291,2732,55,1,[2732]\r\nNCP-19.zip,2,NCP,537,2232,60,2,\"[2231, 2232]\"\r\nNCP-21.zip,2,NCP,71,1274,126,2,\"[1274, 1275]\"\r\nNCP-5.zip,2,NCP,195,1535,60,2,\"[1534, 1535]\"\r\nCP-9.zip,1,CP,1359,3766,46,3,\"[3764, 3765, 3766]\"\r\nNCP-2.zip,2,NCP,119,1380,62,2,\"[1379, 1380]\"\r\nNormal-19.zip,0,Normal,2241,696,86,1,[696]\r\nNormal-15.zip,0,Normal,2112,567,84,1,[567]\r\nNCP-20.zip,2,NCP,569,2296,142,2,\"[2296, 2297]\"\r\nNCP-9.zip,2,NCP,2700,2666,43,1,[2666]\r\nNCP-9.zip,2,NCP,2697,2663,46,1,[2663]\r\nCP-29.zip,1,CP,3809,5753,19,1,[5753]\r\nNCP-10.zip,2,NCP,2718,2674,42,1,[2674]\r\nNormal-1.zip,0,Normal,1668,778,60,4,\"[778, 779, 780, 781]\"\r\nNCP-18.zip,2,NCP,509,2174,138,2,\"[2174, 2175]\"\r\nNCP-16.zip,2,NCP,456,2066,135,2,\"[2066, 2067]\"\r\nNCP-5.zip,2,NCP,187,1519,57,2,\"[1518, 1519]\"\r\nNCP-20.zip,2,NCP,57,1247,132,2,\"[1247, 1248]\"\r\nNormal-1.zip,0,Normal,1715,986,71,2,\"[985, 986]\"\r\nNormal-2.zip,0,Normal,1749,1069,61,4,\"[1069, 1070, 1071, 1072]\"\r\nNCP-24.zip,2,NCP,984,2530,241,2,\"[2529, 2530]\"\r\nNormal-1.zip,0,Normal,1682,847,67,6,\"[847, 848, 852, 853, 857, 858]\"\r\nCP-1.zip,1,CP,1069,3110,77,4,\"[3108, 3109, 3110, 3111]\"\r\nNormal-12.zip,0,Normal,2017,472,99,1,[472]\r\nCP-10.zip,1,CP,1400,3861,54,2,\"[3860, 3861]\"\r\nNCP-22.zip,2,NCP,881,2416,225,1,[2416]\r\nCP-11.zip,1,CP,1420,3906,59,2,\"[3905, 3906]\"\r\nNCP-6.zip,2,NCP,20,1172,127,2,\"[1172, 1173]\"\r\nNCP-28.zip,2,NCP,846,2364,269,1,[2364]\r\nNormal-14.zip,0,Normal,2075,530,93,1,[530]\r\nCP-6.zip,1,CP,1238,3456,191,1,[3456]\r\nCP-7.zip,1,CP,1263,3481,120,1,[3481]\r\nCP-1.zip,1,CP,1088,3220,54,4,\"[3220, 3221, 3222, 3223]\"\r\nCP-8.zip,1,CP,1320,3676,62,2,\"[3676, 3677]\"\r\nNCP-15.zip,2,NCP,426,2005,139,2,\"[2005, 2006]\"\r\nNCP-28.zip,2,NCP,869,2397,58,1,[2397]\r\nNCP-11.zip,2,NCP,288,1721,114,2,\"[1721, 1722]\"\r\nNCP-21.zip,2,NCP,581,2319,139,2,\"[2319, 2320]\"\r\nNormal-26.zip,0,Normal,3878,5390,24,1,[5390]\r\nNormal-13.zip,0,Normal,2041,496,95,1,[496]\r\nNormal-25.zip,0,Normal,3845,5357,182,1,[5357]\r\nNormal-22.zip,0,Normal,2599,109,39,1,[109]\r\nNormal-4.zip,0,Normal,789,224,120,1,[224]\r\nNormal-1.zip,0,Normal,1714,982,40,3,\"[982, 983, 984]\"\r\nNCP-16.zip,2,NCP,434,2022,51,2,\"[2021, 2022]\"\r\nNCP-28.zip,2,NCP,830,2343,120,1,[2343]\r\nNormal-1.zip,0,Normal,1704,962,71,4,\"[961, 962, 963, 964]\"\r\nNCP-5.zip,2,NCP,196,1537,55,2,\"[1536, 1537]\"\r\nCP-8.zip,1,CP,1336,3713,60,2,\"[3712, 3713]\"\r\nNCP-29.zip,2,NCP,895,2436,140,2,\"[2435, 2436]\"\r\nNCP-29.zip,2,NCP,930,2472,23,1,[2472]\r\nCP-12.zip,1,CP,1482,4047,181,3,\"[4047, 4048, 4049]\"\r\nCP-10.zip,1,CP,1401,3862,201,3,\"[3862, 3863, 3864]\"\r\nNCP-5.zip,2,NCP,182,1509,55,2,\"[1508, 1509]\"\r\nCP-12.zip,1,CP,1483,4050,148,3,\"[4050, 4051, 4052]\"\r\nNormal-2.zip,0,Normal,1741,1054,61,2,\"[1053, 1054]\"\r\nCP-8.zip,1,CP,1324,3684,58,2,\"[3684, 3685]\"\r\nNCP-9.zip,2,NCP,2681,2696,58,1,[2696]\r\nCP-9.zip,1,CP,1367,3786,58,3,\"[3785, 3786, 3787]\"\r\nCP-19.zip,1,CP,1790,3209,69,2,\"[3208, 3209]\"\r\nCP-11.zip,1,CP,1430,3928,77,2,\"[3928, 3929]\"\r\nNormal-18.zip,0,Normal,2207,662,99,1,[662]\r\nNormal-11.zip,0,Normal,1972,427,97,1,[427]\r\nCP-5.zip,1,CP,1221,3439,295,1,[3439]\r\nNCP-15.zip,2,NCP,42,1216,146,2,\"[1216, 1218]\"\r\nCP-22.zip,1,CP,640,3002,136,1,[3002]\r\nNCP-7.zip,2,NCP,245,1637,62,2,\"[1636, 1637]\"\r\nNCP-6.zip,2,NCP,215,1574,155,2,\"[1574, 1575]\"\r\nNCP-29.zip,2,NCP,903,2445,87,1,[2445]\r\nNCP-7.zip,2,NCP,232,1609,61,2,\"[1608, 1609]\"\r\nNCP-2.zip,2,NCP,119,1379,147,2,\"[1379, 1380]\"\r\nNormal-2.zip,0,Normal,1739,1042,278,3,\"[1042, 1043, 1044]\"\r\nCP-28.zip,1,CP,3791,5735,26,1,[5735]\r\nNCP-27.zip,2,NCP,828,2341,45,1,[2341]\r\nNCP-12.zip,2,NCP,314,1775,139,2,\"[1775, 1776]\"\r\nNCP-6.zip,2,NCP,20,1173,54,2,\"[1172, 1173]\"\r\nCP-13.zip,1,CP,1490,4073,69,3,\"[4071, 4072, 4073]\"\r\nNCP-20.zip,2,NCP,569,2297,60,2,\"[2296, 2297]\"\r\nNormal-2.zip,0,Normal,1759,1113,59,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-30.zip,2,NCP,987,2536,71,2,\"[2536, 2537]\"\r\nCP-15.zip,1,CP,1579,4267,20,1,[4267]\r\nCP-19.zip,1,CP,1790,3208,69,2,\"[3208, 3209]\"\r\nNCP-20.zip,2,NCP,568,2295,61,2,\"[2294, 2295]\"\r\nNormal-13.zip,0,Normal,2036,491,102,1,[491]\r\nNCP-26.zip,2,NCP,3973,5482,48,1,[5482]\r\nCP-27.zip,1,CP,3743,5687,22,1,[5687]\r\nNormal-11.zip,0,Normal,1981,436,91,1,[436]\r\nNCP-2.zip,2,NCP,125,1391,127,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-9.zip,2,NCP,270,1686,147,2,\"[1686, 1687]\"\r\nNormal-23.zip,0,Normal,2636,146,42,1,[146]\r\nNCP-3.zip,2,NCP,1286,2727,64,1,[2727]\r\nCP-10.zip,1,CP,1386,3828,66,2,\"[3827, 3828]\"\r\nNormal-19.zip,0,Normal,2230,685,91,1,[685]\r\nNormal-5.zip,0,Normal,805,240,327,1,[240]\r\nNormal-26.zip,0,Normal,3891,5412,62,2,\"[5411, 5412]\"\r\nNCP-27.zip,2,NCP,2671,2691,51,1,[2691]\r\nNCP-27.zip,2,NCP,1059,2636,52,1,[2636]\r\nCP-8.zip,1,CP,1344,3730,58,3,\"[3728, 3729, 3730]\"\r\nNormal-24.zip,0,Normal,2662,172,41,1,[172]\r\nNormal-3.zip,0,Normal,744,179,278,1,[179]\r\nCP-5.zip,1,CP,1202,3420,207,1,[3420]\r\nNCP-27.zip,2,NCP,1006,2567,19,2,\"[2566, 2567]\"\r\nNormal-19.zip,0,Normal,2248,703,87,1,[703]\r\nNormal-2.zip,0,Normal,1736,1034,25,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNormal-8.zip,0,Normal,1867,322,87,1,[322]\r\nNormal-23.zip,0,Normal,2609,119,40,1,[119]\r\nCP-11.zip,1,CP,1453,3980,56,3,\"[3979, 3980, 3981]\"\r\nNormal-26.zip,0,Normal,3875,5387,24,1,[5387]\r\nCP-26.zip,1,CP,3647,5607,32,1,[5607]\r\nNormal-12.zip,0,Normal,2006,461,77,1,[461]\r\nNormal-6.zip,0,Normal,1827,282,99,1,[282]\r\nNCP-19.zip,2,NCP,533,2224,156,1,[2224]\r\nNCP-11.zip,2,NCP,287,1720,60,2,\"[1719, 1720]\"\r\nNCP-7.zip,2,NCP,2487,2687,38,1,[2687]\r\nCP-3.zip,1,CP,1160,3378,318,1,[3378]\r\nNormal-7.zip,0,Normal,1858,313,95,1,[313]\r\nCP-13.zip,1,CP,1514,4129,61,2,\"[4129, 4130]\"\r\nNCP-20.zip,2,NCP,561,2280,139,2,\"[2280, 2281]\"\r\nCP-14.zip,1,CP,1527,4161,58,3,\"[4160, 4161, 4162]\"\r\nCP-25.zip,1,CP,721,3083,86,1,[3083]\r\nCP-13.zip,1,CP,1496,4091,55,2,\"[4090, 4091]\"\r\nNormal-1.zip,0,Normal,1728,1013,66,4,\"[1013, 1014, 1015, 1016]\"\r\nNCP-12.zip,2,NCP,317,1781,117,2,\"[1781, 1782]\"\r\nCP-19.zip,1,CP,2437,2906,132,3,\"[2905, 2906, 2907]\"\r\nNCP-5.zip,2,NCP,196,1536,131,2,\"[1536, 1537]\"\r\nCP-11.zip,1,CP,1437,3942,57,2,\"[3942, 3943]\"\r\nNCP-5.zip,2,NCP,182,1508,130,2,\"[1508, 1509]\"\r\nCP-9.zip,1,CP,1363,3774,64,2,\"[3774, 3775]\"\r\nCP-10.zip,1,CP,1401,3863,51,3,\"[3862, 3863, 3864]\"\r\nNCP-10.zip,2,NCP,275,1696,153,2,\"[1696, 1697]\"\r\nCP-30.zip,1,CP,3931,5633,68,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-7.zip,2,NCP,234,1614,58,2,\"[1613, 1614]\"\r\nNCP-3.zip,2,NCP,1296,2737,66,1,[2737]\r\nNCP-11.zip,2,NCP,283,1712,62,1,[1712]\r\nCP-9.zip,1,CP,1363,3775,64,2,\"[3774, 3775]\"\r\nNCP-17.zip,2,NCP,464,2082,144,2,\"[2082, 2083]\"\r\nCP-12.zip,1,CP,1473,4027,51,3,\"[4026, 4027, 4028]\"\r\nCP-28.zip,1,CP,3781,5725,20,1,[5725]\r\nNCP-14.zip,2,NCP,391,1933,55,2,\"[1932, 1933]\"\r\nNormal-13.zip,0,Normal,2032,487,85,1,[487]\r\nNCP-28.zip,2,NCP,872,2403,183,2,\"[2403, 2404]\"\r\nNCP-17.zip,2,NCP,479,2113,139,2,\"[2113, 2114]\"\r\nNCP-11.zip,2,NCP,305,1758,65,2,\"[1756, 1758]\"\r\nNCP-1.zip,2,NCP,1021,2587,201,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-30.zip,2,NCP,957,2500,50,1,[2500]\r\nNormal-17.zip,0,Normal,2172,627,91,1,[627]\r\nCP-7.zip,1,CP,1316,3667,147,3,\"[3667, 3668, 3669]\"\r\nNCP-24.zip,2,NCP,971,2514,74,1,[2514]\r\nNCP-18.zip,2,NCP,494,2145,65,2,\"[2144, 2145]\"\r\nNCP-30.zip,2,NCP,987,2537,368,2,\"[2536, 2537]\"\r\nNormal-13.zip,0,Normal,2048,503,94,1,[503]\r\nCP-8.zip,1,CP,1347,3736,265,3,\"[3736, 3737, 3738]\"\r\nNCP-15.zip,2,NCP,41,1215,63,2,\"[1214, 1215]\"\r\nCP-12.zip,1,CP,1456,3989,52,3,\"[3988, 3989, 3990]\"\r\nNCP-21.zip,2,NCP,80,1294,54,2,\"[1293, 1294]\"\r\nCP-29.zip,1,CP,3808,5752,23,1,[5752]\r\nCP-26.zip,1,CP,3732,5671,53,2,\"[5671, 5672]\"\r\nNCP-8.zip,2,NCP,251,1648,131,2,\"[1648, 1649]\"\r\nNormal-2.zip,0,Normal,1755,1099,71,4,\"[1097, 1098, 1099, 1100]\"\r\nNormal-2.zip,0,Normal,1759,1120,66,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-28.zip,2,NCP,874,2407,341,1,[2407]\r\nNormal-1.zip,0,Normal,1730,1023,59,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNormal-8.zip,0,Normal,1891,346,96,1,[346]\r\nCP-4.zip,1,CP,1170,3388,180,1,[3388]\r\nCP-10.zip,1,CP,1395,3849,63,2,\"[3849, 3850]\"\r\nNCP-2.zip,2,NCP,123,1387,148,2,\"[1387, 1388]\"\r\nNCP-16.zip,2,NCP,446,2047,61,2,\"[2046, 2047]\"\r\nNCP-2.zip,2,NCP,111,1364,56,2,\"[1363, 1364]\"\r\nCP-15.zip,1,CP,1568,4256,22,1,[4256]\r\nNCP-21.zip,2,NCP,79,1291,131,2,\"[1291, 1292]\"\r\nCP-10.zip,1,CP,1395,3850,63,2,\"[3849, 3850]\"\r\nCP-14.zip,1,CP,1542,4199,54,3,\"[4197, 4198, 4199]\"\r\nCP-15.zip,1,CP,1555,4229,62,2,\"[4228, 4229]\"\r\nCP-21.zip,1,CP,606,2968,255,1,[2968]\r\nCP-12.zip,1,CP,1480,4042,54,2,\"[4042, 4043]\"\r\nNCP-27.zip,2,NCP,1063,2640,82,1,[2640]\r\nNormal-7.zip,0,Normal,1831,286,99,1,[286]\r\nCP-14.zip,1,CP,1552,4221,62,2,\"[4221, 4222]\"\r\nNCP-19.zip,2,NCP,541,2240,51,2,\"[2239, 2240]\"\r\nNCP-23.zip,2,NCP,91,1318,100,2,\"[1318, 1319]\"\r\nNormal-2.zip,0,Normal,1739,1043,56,3,\"[1042, 1043, 1044]\"\r\nCP-19.zip,1,CP,2437,2907,183,3,\"[2905, 2906, 2907]\"\r\nCP-10.zip,1,CP,1396,3851,139,3,\"[3851, 3852, 3853]\"\r\nCP-13.zip,1,CP,1490,4072,69,3,\"[4071, 4072, 4073]\"\r\nCP-6.zip,1,CP,1242,3460,229,1,[3460]\r\nNCP-17.zip,2,NCP,471,2098,59,2,\"[2097, 2098]\"\r\nNCP-16.zip,2,NCP,434,2021,119,2,\"[2021, 2022]\"\r\nNCP-16.zip,2,NCP,446,2046,146,2,\"[2046, 2047]\"\r\nNCP-21.zip,2,NCP,69,1270,113,2,\"[1270, 1271]\"\r\nNormal-9.zip,0,Normal,1896,351,98,1,[351]\r\nNCP-9.zip,2,NCP,2709,2702,44,1,[2702]\r\nNCP-29.zip,2,NCP,907,2449,287,1,[2449]\r\nNCP-2.zip,2,NCP,106,1349,150,2,\"[1349, 1350]\"\r\nNCP-17.zip,2,NCP,477,2109,139,2,\"[2109, 2110]\"\r\nCP-27.zip,1,CP,3734,5677,163,3,\"[5676, 5677, 5678]\"\r\nNormal-8.zip,0,Normal,1877,332,88,1,[332]\r\nNormal-7.zip,0,Normal,1853,308,94,1,[308]\r\nNCP-2.zip,2,NCP,1272,2713,62,1,[2713]\r\nCP-13.zip,1,CP,1515,4132,57,3,\"[4131, 4132, 4133]\"\r\nNCP-21.zip,2,NCP,68,1269,49,2,\"[1268, 1269]\"\r\nCP-25.zip,1,CP,719,3081,128,1,[3081]\r\nNCP-10.zip,2,NCP,276,1698,139,2,\"[1698, 1699]\"\r\nNCP-11.zip,2,NCP,294,1734,57,2,\"[1733, 1734]\"\r\nCP-8.zip,1,CP,1342,3724,58,3,\"[3723, 3724, 3725]\"\r\nNormal-4.zip,0,Normal,783,218,118,1,[218]\r\nNormal-11.zip,0,Normal,1977,432,96,1,[432]\r\nCP-12.zip,1,CP,1460,3998,60,2,\"[3998, 3999]\"\r\nNCP-12.zip,2,NCP,32,1197,61,2,\"[1196, 1197]\"\r\nNormal-2.zip,0,Normal,1736,1035,55,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nCP-7.zip,1,CP,1308,3650,219,2,\"[3649, 3650]\"\r\nNCP-17.zip,2,NCP,485,2125,153,2,\"[2125, 2126]\"\r\nNormal-24.zip,0,Normal,2649,159,26,1,[159]\r\nCP-1.zip,1,CP,1082,3127,74,1,[3127]\r\nCP-28.zip,1,CP,3788,5732,26,1,[5732]\r\nNormal-3.zip,0,Normal,1764,1143,66,4,\"[1143, 1144, 1145, 1146]\"\r\nNCP-2.zip,2,NCP,125,1392,132,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-1.zip,2,NCP,1013,2577,524,1,[2577]\r\nCP-22.zip,1,CP,630,2992,118,1,[2992]\r\nNormal-27.zip,0,Normal,3897,5423,70,4,\"[5423, 5424, 5426, 5427]\"\r\nCP-1.zip,1,CP,1088,3222,50,4,\"[3220, 3221, 3222, 3223]\"\r\nNCP-11.zip,2,NCP,294,1733,136,2,\"[1733, 1734]\"\r\nCP-3.zip,1,CP,1135,3353,202,1,[3353]\r\nCP-10.zip,1,CP,1408,3879,59,3,\"[3878, 3879, 3880]\"\r\nCP-19.zip,1,CP,1791,3213,71,4,\"[3210, 3211, 3212, 3213]\"\r\nNormal-1.zip,0,Normal,1709,974,61,2,\"[973, 974]\"\r\nCP-11.zip,1,CP,1438,3945,46,2,\"[3944, 3945]\"\r\nCP-8.zip,1,CP,1325,3687,64,2,\"[3686, 3687]\"\r\nCP-20.zip,1,CP,2761,3292,38,1,[3292]\r\nNCP-17.zip,2,NCP,470,2096,64,2,\"[2095, 2096]\"\r\nNCP-4.zip,2,NCP,164,1472,150,2,\"[1472, 1473]\"\r\nNCP-14.zip,2,NCP,380,1912,148,2,\"[1912, 1913]\"\r\nCP-7.zip,1,CP,1266,3484,134,1,[3484]\r\nCP-10.zip,1,CP,1400,3860,54,2,\"[3860, 3861]\"\r\nNCP-10.zip,2,NCP,281,1708,121,2,\"[1708, 1709]\"\r\nNCP-14.zip,2,NCP,397,1944,158,2,\"[1944, 1945]\"\r\nCP-27.zip,1,CP,3734,5678,32,3,\"[5676, 5677, 5678]\"\r\nCP-15.zip,1,CP,1559,4238,53,2,\"[4237, 4238]\"\r\nNormal-26.zip,0,Normal,3888,5406,63,1,[5406]\r\nNCP-11.zip,2,NCP,308,1764,49,2,\"[1763, 1764]\"\r\nNCP-16.zip,2,NCP,435,2024,62,2,\"[2023, 2024]\"\r\nNCP-11.zip,2,NCP,285,1715,149,2,\"[1715, 1716]\"\r\nNCP-20.zip,2,NCP,568,2294,144,2,\"[2294, 2295]\"\r\nNCP-20.zip,2,NCP,550,2257,143,2,\"[2257, 2258]\"\r\nNCP-6.zip,2,NCP,218,1581,58,2,\"[1580, 1581]\"\r\nNormal-15.zip,0,Normal,2092,547,87,1,[547]\r\nCP-10.zip,1,CP,1396,3853,58,3,\"[3851, 3852, 3853]\"\r\nNormal-12.zip,0,Normal,2010,465,91,1,[465]\r\nNormal-18.zip,0,Normal,2194,649,89,1,[649]\r\nNCP-10.zip,2,NCP,276,1699,58,2,\"[1698, 1699]\"\r\nCP-27.zip,1,CP,3746,5690,17,1,[5690]\r\nNormal-24.zip,0,Normal,2656,166,34,1,[166]\r\nCP-29.zip,1,CP,3802,5746,26,1,[5746]\r\nCP-17.zip,1,CP,1641,4329,26,1,[4329]\r\nNormal-2.zip,0,Normal,1749,1072,66,4,\"[1069, 1070, 1071, 1072]\"\r\nCP-9.zip,1,CP,1373,3800,55,2,\"[3800, 3801]\"\r\nNormal-22.zip,0,Normal,2596,106,44,1,[106]\r\nNormal-14.zip,0,Normal,2072,527,77,1,[527]\r\nNormal-20.zip,0,Normal,2251,706,89,1,[706]\r\nCP-12.zip,1,CP,1482,4049,75,3,\"[4047, 4048, 4049]\"\r\nCP-6.zip,1,CP,1231,3449,375,1,[3449]\r\nCP-28.zip,1,CP,3797,5741,28,1,[5741]\r\nCP-7.zip,1,CP,1307,3648,242,4,\"[3645, 3646, 3647, 3648]\"\r\nNCP-1.zip,2,NCP,1030,2600,279,1,[2600]\r\nCP-11.zip,1,CP,1448,3970,62,2,\"[3969, 3970]\"\r\nNormal-20.zip,0,Normal,2255,710,95,1,[710]\r\nCP-2.zip,1,CP,1124,3342,215,1,[3342]\r\nNCP-28.zip,2,NCP,872,2404,46,2,\"[2403, 2404]\"\r\nNormal-3.zip,0,Normal,1765,1147,60,2,\"[1147, 1148]\"\r\nNCP-11.zip,2,NCP,289,1724,47,2,\"[1723, 1724]\"\r\nCP-11.zip,1,CP,1442,3956,58,3,\"[3954, 3955, 3956]\"\r\nCP-1.zip,1,CP,1081,3126,68,1,[3126]\r\nNormal-20.zip,0,Normal,2263,718,108,1,[718]\r\nNCP-19.zip,2,NCP,524,2204,191,1,[2204]\r\nNormal-4.zip,0,Normal,784,219,105,1,[219]\r\nCP-8.zip,1,CP,1337,3715,60,2,\"[3714, 3715]\"\r\nNCP-28.zip,2,NCP,841,2356,282,1,[2356]\r\nNCP-26.zip,2,NCP,3983,5510,40,1,[5510]\r\nCP-20.zip,1,CP,2767,3298,35,1,[3298]\r\nNormal-19.zip,0,Normal,2229,684,87,1,[684]\r\nNCP-15.zip,2,NCP,429,2012,55,2,\"[2011, 2012]\"\r\nCP-19.zip,1,CP,1788,3197,52,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-15.zip,1,CP,1558,4235,62,3,\"[4234, 4235, 4236]\"\r\nNCP-3.zip,2,NCP,1280,2721,50,1,[2721]\r\nNCP-4.zip,2,NCP,148,1440,150,2,\"[1440, 1441]\"\r\nNormal-22.zip,0,Normal,2582,92,39,1,[92]\r\nNormal-23.zip,0,Normal,2623,133,35,1,[133]\r\nCP-13.zip,1,CP,1496,4090,55,2,\"[4090, 4091]\"\r\nCP-30.zip,1,CP,3835,5779,23,1,[5779]\r\nCP-11.zip,1,CP,1442,3954,139,3,\"[3954, 3955, 3956]\"\r\nNCP-15.zip,2,NCP,429,2011,131,2,\"[2011, 2012]\"\r\nCP-17.zip,1,CP,1621,4309,29,1,[4309]\r\nCP-6.zip,1,CP,1244,3462,87,1,[3462]\r\nNCP-1.zip,2,NCP,1021,2590,181,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-9.zip,2,NCP,2706,2672,51,1,[2672]\r\nNCP-14.zip,2,NCP,391,1932,131,2,\"[1932, 1933]\"\r\nCP-3.zip,1,CP,1134,3352,330,1,[3352]\r\nCP-8.zip,1,CP,1346,3734,53,3,\"[3733, 3734, 3735]\"\r\nNCP-12.zip,2,NCP,320,1789,58,2,\"[1788, 1789]\"\r\nNCP-21.zip,2,NCP,77,1287,126,2,\"[1287, 1288]\"\r\nCP-17.zip,1,CP,1647,4335,23,1,[4335]\r\nCP-11.zip,1,CP,1453,3979,221,3,\"[3979, 3980, 3981]\"\r\nNormal-2.zip,0,Normal,1759,1117,65,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-17.zip,2,NCP,481,2118,68,2,\"[2117, 2118]\"\r\nNCP-3.zip,2,NCP,1279,2720,66,1,[2720]\r\nCP-8.zip,1,CP,1346,3733,53,3,\"[3733, 3734, 3735]\"\r\nNormal-10.zip,0,Normal,1954,409,88,1,[409]\r\nCP-17.zip,1,CP,1648,4336,29,1,[4336]\r\nCP-14.zip,1,CP,1524,4154,58,3,\"[4152, 4153, 4154]\"\r\nNormal-18.zip,0,Normal,2216,671,97,1,[671]\r\nNCP-27.zip,2,NCP,179,1503,43,2,\"[1503, 1502]\"\r\nCP-19.zip,1,CP,1788,3202,55,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-12.zip,1,CP,1482,4048,75,3,\"[4047, 4048, 4049]\"\r\nNCP-29.zip,2,NCP,913,2455,268,1,[2455]\r\nCP-19.zip,1,CP,2444,2919,112,2,\"[2918, 2919]\"\r\nCP-22.zip,1,CP,639,3001,136,1,[3001]\r\nNCP-2.zip,2,NCP,121,1383,100,2,\"[1383, 1384]\"\r\nCP-8.zip,1,CP,1324,3685,58,2,\"[3684, 3685]\"\r\nCP-11.zip,1,CP,1430,3929,77,2,\"[3928, 3929]\"\r\nNCP-15.zip,2,NCP,401,1952,58,2,\"[1951, 1952]\"\r\nNormal-4.zip,0,Normal,788,223,336,1,[223]\r\nNormal-27.zip,0,Normal,3898,5428,74,1,[5428]\r\nNormal-21.zip,0,Normal,2312,767,88,1,[767]\r\nNormal-17.zip,0,Normal,2170,625,62,1,[625]\r\nNCP-3.zip,2,NCP,130,1406,59,2,\"[1405, 1406]\"\r\nCP-3.zip,1,CP,1154,3372,169,1,[3372]\r\nNormal-3.zip,0,Normal,1765,1148,60,2,\"[1147, 1148]\"\r\nNormal-11.zip,0,Normal,1962,417,78,1,[417]\r\nCP-18.zip,1,CP,1667,4355,26,1,[4355]\r\nCP-1.zip,1,CP,1066,3105,59,1,[3105]\r\nNCP-1.zip,2,NCP,1047,2619,473,1,[2619]\r\nNCP-10.zip,2,NCP,2711,2704,44,1,[2704]\r\nNormal-19.zip,0,Normal,2237,692,85,1,[692]\r\nNCP-11.zip,2,NCP,289,1723,110,2,\"[1723, 1724]\"\r\nNCP-7.zip,2,NCP,240,1626,66,2,\"[1625, 1626]\"\r\nNormal-11.zip,0,Normal,1974,429,96,1,[429]\r\nNormal-26.zip,0,Normal,3887,5401,67,3,\"[5400, 5401, 5404]\"\r\nNormal-26.zip,0,Normal,3891,5411,67,2,\"[5411, 5412]\"\r\nNormal-18.zip,0,Normal,2191,646,106,1,[646]\r\nNCP-28.zip,2,NCP,840,2355,55,1,[2355]\r\nNormal-6.zip,0,Normal,1814,269,88,1,[269]\r\nNCP-12.zip,2,NCP,329,1807,66,2,\"[1806, 1807]\"\r\nCP-24.zip,1,CP,686,3048,133,1,[3048]\r\nCP-19.zip,1,CP,2432,2894,124,1,[2894]\r\nNormal-10.zip,0,Normal,1952,407,107,1,[407]\r\nCP-13.zip,1,CP,1515,4133,57,3,\"[4131, 4132, 4133]\"\r\nCP-8.zip,1,CP,1347,3737,34,3,\"[3736, 3737, 3738]\"\r\nNormal-2.zip,0,Normal,1754,1095,69,4,\"[1093, 1094, 1095, 1096]\"\r\nCP-22.zip,1,CP,622,2984,459,1,[2984]\r\nCP-3.zip,1,CP,1141,3359,350,1,[3359]\r\nCP-14.zip,1,CP,1533,4173,100,3,\"[4173, 4174, 4175]\"\r\nNormal-10.zip,0,Normal,1935,390,91,1,[390]\r\nNormal-22.zip,0,Normal,2320,775,91,1,[775]\r\nNormal-25.zip,0,Normal,3859,5371,216,1,[5371]\r\nNormal-12.zip,0,Normal,2018,473,93,1,[473]\r\nCP-9.zip,1,CP,1359,3764,181,3,\"[3764, 3765, 3766]\"\r\nCP-20.zip,1,CP,2452,2931,298,1,[2931]\r\nNCP-23.zip,2,NCP,90,1316,100,2,\"[1316, 1317]\"\r\nNormal-2.zip,0,Normal,1744,1058,71,2,\"[1058, 1059]\"\r\nNCP-18.zip,2,NCP,492,2141,58,2,\"[2140, 2141]\"\r\nNormal-13.zip,0,Normal,2053,508,81,1,[508]\r\nNormal-17.zip,0,Normal,2156,611,82,1,[611]\r\nNCP-19.zip,2,NCP,541,2239,121,2,\"[2239, 2240]\"\r\nNCP-19.zip,2,NCP,531,2221,58,2,\"[2220, 2221]\"\r\nCP-19.zip,1,CP,2448,2925,104,2,\"[2925, 2926]\"\r\nCP-31.zip,1,CP,4044,5593,276,1,[5593]\r\nCP-8.zip,1,CP,1345,3732,55,2,\"[3731, 3732]\"\r\nNormal-3.zip,0,Normal,743,178,340,1,[178]\r\nNormal-23.zip,0,Normal,2613,123,40,1,[123]\r\nNormal-1.zip,0,Normal,1714,983,71,3,\"[982, 983, 984]\"\r\nNCP-8.zip,2,NCP,268,1683,53,2,\"[1682, 1683]\"\r\nCP-8.zip,1,CP,1347,3738,34,3,\"[3736, 3737, 3738]\"\r\nCP-25.zip,1,CP,718,3080,466,1,[3080]\r\nNormal-13.zip,0,Normal,2024,479,86,1,[479]\r\nNormal-1.zip,0,Normal,1668,780,63,4,\"[778, 779, 780, 781]\"\r\nCP-17.zip,1,CP,1636,4324,26,1,[4324]\r\nNCP-20.zip,2,NCP,55,1244,63,2,\"[1243, 1244]\"\r\nCP-32.zip,1,CP,2463,3227,77,1,[3227]\r\nNCP-16.zip,2,NCP,435,2023,153,2,\"[2023, 2024]\"\r\nNCP-2.zip,2,NCP,106,1350,63,2,\"[1349, 1350]\"\r\nCP-27.zip,1,CP,3753,5697,20,1,[5697]\r\nNCP-15.zip,2,NCP,415,1983,63,2,\"[1982, 1983]\"\r\nNCP-5.zip,2,NCP,191,1527,54,2,\"[1526, 1527]\"\r\nCP-3.zip,1,CP,1142,3360,138,1,[3360]\r\nNCP-19.zip,2,NCP,531,2220,139,2,\"[2220, 2221]\"\r\nCP-14.zip,1,CP,1524,4153,58,3,\"[4152, 4153, 4154]\"\r\nCP-19.zip,1,CP,1788,3196,49,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-14.zip,1,CP,1533,4175,42,3,\"[4173, 4174, 4175]\"\r\nNCP-7.zip,2,NCP,244,1635,69,2,\"[1634, 1635]\"\r\nNormal-7.zip,0,Normal,1848,303,95,1,[303]\r\nCP-24.zip,1,CP,703,3065,120,1,[3065]\r\nNormal-1.zip,0,Normal,1731,1024,36,1,[1024]\r\nCP-11.zip,1,CP,1448,3969,62,2,\"[3969, 3970]\"\r\nNormal-25.zip,0,Normal,3850,5362,180,1,[5362]\r\nCP-7.zip,1,CP,13,3172,255,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-14.zip,1,CP,1533,4174,42,3,\"[4173, 4174, 4175]\"\r\nNCP-3.zip,2,NCP,130,1405,140,2,\"[1405, 1406]\"\r\nCP-23.zip,1,CP,647,3009,384,1,[3009]\r\nNormal-24.zip,0,Normal,2637,147,36,1,[147]\r\nNCP-28.zip,2,NCP,848,2367,283,2,\"[2366, 2367]\"\r\nNormal-9.zip,0,Normal,1903,358,86,1,[358]\r\nNormal-26.zip,0,Normal,3889,5408,65,2,\"[5407, 5408]\"\r\nNCP-20.zip,2,NCP,567,2293,60,2,\"[2292, 2293]\"\r\nCP-22.zip,1,CP,621,2983,174,1,[2983]\r\nCP-10.zip,1,CP,1389,3835,51,3,\"[3833, 3834, 3835]\"\r\nCP-9.zip,1,CP,1362,3773,61,2,\"[3772, 3773]\"\r\nNormal-27.zip,0,Normal,3897,5426,72,4,\"[5423, 5424, 5426, 5427]\"\r\nNCP-15.zip,2,NCP,428,2010,53,2,\"[2009, 2010]\"\r\nNormal-3.zip,0,Normal,759,194,297,1,[194]\r\nCP-13.zip,1,CP,1497,4092,68,3,\"[4092, 4093, 4094]\"\r\nNormal-19.zip,0,Normal,2246,701,87,1,[701]\r\nCP-3.zip,1,CP,1130,3348,166,1,[3348]\r\nCP-14.zip,1,CP,1552,4222,62,2,\"[4221, 4222]\"\r\nNCP-26.zip,2,NCP,3994,5518,52,1,[5518]\r\nNCP-27.zip,2,NCP,328,1805,43,2,\"[1804, 1805]\"\r\nNCP-13.zip,2,NCP,369,1889,138,2,\"[1889, 1890]\"\r\nCP-20.zip,1,CP,2756,3287,56,1,[3287]\r\nCP-22.zip,1,CP,638,3000,116,1,[3000]\r\nCP-6.zip,1,CP,1250,3468,451,1,[3468]\r\nCP-19.zip,1,CP,2437,2905,316,3,\"[2905, 2906, 2907]\"\r\nNormal-16.zip,0,Normal,2130,585,88,1,[585]\r\nNCP-14.zip,2,NCP,376,1904,142,2,\"[1904, 1905]\"\r\nNormal-10.zip,0,Normal,1932,387,91,1,[387]\r\nNCP-16.zip,2,NCP,453,2060,121,2,\"[2060, 2061]\"\r\nNCP-5.zip,2,NCP,191,1526,128,2,\"[1526, 1527]\"\r\nCP-12.zip,1,CP,1476,4034,53,2,\"[4033, 4034]\"\r\nNCP-5.zip,2,NCP,175,1495,55,2,\"[1494, 1495]\"\r\nNCP-21.zip,2,NCP,71,1275,53,2,\"[1274, 1275]\"\r\nNormal-10.zip,0,Normal,1925,380,90,1,[380]\r\nNCP-30.zip,2,NCP,994,2548,226,2,\"[2547, 2548]\"\r\nCP-4.zip,1,CP,1192,3410,184,1,[3410]\r\nNormal-23.zip,0,Normal,2631,141,38,1,[141]\r\nNCP-9.zip,2,NCP,2684,2697,50,1,[2697]\r\nCP-27.zip,1,CP,3757,5701,22,1,[5701]\r\nNCP-3.zip,2,NCP,1288,2729,61,1,[2729]\r\nNCP-18.zip,2,NCP,505,2166,157,2,\"[2166, 2167]\"\r\nCP-8.zip,1,CP,1348,3741,59,3,\"[3739, 3740, 3741]\"\r\nNormal-24.zip,0,Normal,2651,161,34,1,[161]\r\nNormal-23.zip,0,Normal,2618,128,35,1,[128]\r\nCP-8.zip,1,CP,1331,3702,62,2,\"[3701, 3702]\"\r\nNCP-14.zip,2,NCP,398,1947,70,2,\"[1946, 1947]\"\r\nNCP-4.zip,2,NCP,158,1460,122,2,\"[1460, 1461]\"\r\nNCP-23.zip,2,NCP,89,1312,157,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-2.zip,0,Normal,1759,1116,64,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-17.zip,1,CP,1645,4333,26,1,[4333]\r\nCP-10.zip,1,CP,1408,3880,59,3,\"[3878, 3879, 3880]\"\r\nCP-30.zip,1,CP,3917,5541,62,1,[5541]\r\nNCP-30.zip,2,NCP,933,2475,23,1,[2475]\r\nCP-8.zip,1,CP,1344,3728,142,3,\"[3728, 3729, 3730]\"\r\nNCP-17.zip,2,NCP,459,2072,133,2,\"[2072, 2073]\"\r\nNCP-4.zip,2,NCP,150,1445,75,2,\"[1444, 1445]\"\r\nCP-12.zip,1,CP,1455,3986,58,3,\"[3985, 3986, 3987]\"\r\nNormal-27.zip,0,Normal,3897,5427,72,4,\"[5423, 5424, 5426, 5427]\"\r\nCP-18.zip,1,CP,1772,3177,81,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nCP-27.zip,1,CP,3745,5689,23,1,[5689]\r\nNCP-29.zip,2,NCP,920,2462,183,1,[2462]\r\nNCP-9.zip,2,NCP,2688,2655,56,1,[2655]\r\nNormal-8.zip,0,Normal,1887,342,94,1,[342]\r\nCP-1.zip,1,CP,1076,3120,70,1,[3120]\r\nNormal-15.zip,0,Normal,2100,555,94,1,[555]\r\nNCP-11.zip,2,NCP,285,1716,62,2,\"[1715, 1716]\"\r\nCP-8.zip,1,CP,1344,3729,59,3,\"[3728, 3729, 3730]\"\r\nNormal-12.zip,0,Normal,2021,476,85,1,[476]\r\nNormal-15.zip,0,Normal,2105,560,87,1,[560]\r\nCP-9.zip,1,CP,1366,3784,57,3,\"[3782, 3783, 3784]\"\r\nCP-18.zip,1,CP,1772,3181,75,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNCP-15.zip,2,NCP,426,2006,58,2,\"[2005, 2006]\"\r\nNCP-1.zip,2,NCP,1020,2586,45,1,[2586]\r\nNCP-13.zip,2,NCP,356,1863,124,2,\"[1863, 1864]\"\r\nNormal-8.zip,0,Normal,1865,320,99,1,[320]\r\nNCP-30.zip,2,NCP,994,2547,226,2,\"[2547, 2548]\"\r\nNormal-12.zip,0,Normal,2011,466,93,1,[466]\r\nCP-18.zip,1,CP,1773,3185,67,4,\"[3182, 3183, 3184, 3185]\"\r\nNCP-18.zip,2,NCP,505,2167,66,2,\"[2166, 2167]\"\r\nCP-8.zip,1,CP,1328,3694,69,2,\"[3693, 3694]\"\r\nNCP-2.zip,2,NCP,1278,2719,61,1,[2719]\r\nCP-25.zip,1,CP,736,3098,494,1,[3098]\r\nCP-24.zip,1,CP,7,3512,299,2,\"[3511, 3512]\"\r\nNormal-27.zip,0,Normal,3913,5455,71,2,\"[5454, 5455]\"\r\nNCP-6.zip,2,NCP,218,1580,139,2,\"[1580, 1581]\"\r\nNormal-4.zip,0,Normal,795,230,120,1,[230]\r\nNCP-6.zip,2,NCP,207,1559,46,2,\"[1558, 1559]\"\r\nNCP-5.zip,2,NCP,189,1523,58,2,\"[1522, 1523]\"\r\nNormal-22.zip,0,Normal,2314,769,84,1,[769]\r\nCP-14.zip,1,CP,1541,4195,58,3,\"[4194, 4195, 4196]\"\r\nNormal-26.zip,0,Normal,3866,5378,27,1,[5378]\r\nNCP-30.zip,2,NCP,938,2481,78,2,\"[2480, 2481]\"\r\nNCP-1.zip,2,NCP,1041,2612,126,1,[2612]\r\nNormal-24.zip,0,Normal,2664,174,28,1,[174]\r\nCP-14.zip,1,CP,1542,4198,54,3,\"[4197, 4198, 4199]\"\r\nCP-8.zip,1,CP,1332,3704,41,2,\"[3703, 3704]\"\r\nCP-14.zip,1,CP,1527,4160,142,3,\"[4160, 4161, 4162]\"\r\nNormal-2.zip,0,Normal,1749,1071,66,4,\"[1069, 1070, 1071, 1072]\"\r\nCP-7.zip,1,CP,13,3170,271,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-20.zip,1,CP,2769,3300,36,1,[3300]\r\nNormal-11.zip,0,Normal,1973,428,90,1,[428]\r\nCP-28.zip,1,CP,3783,5727,26,1,[5727]\r\nNCP-12.zip,2,NCP,320,1788,139,2,\"[1788, 1789]\"\r\nNormal-10.zip,0,Normal,1929,384,91,1,[384]\r\nNormal-7.zip,0,Normal,1841,296,79,1,[296]\r\nNormal-8.zip,0,Normal,1881,336,91,1,[336]\r\nNCP-25.zip,2,NCP,3964,5475,41,1,[5475]\r\nCP-12.zip,1,CP,1480,4043,54,2,\"[4042, 4043]\"\r\nNCP-23.zip,2,NCP,91,1319,43,2,\"[1318, 1319]\"\r\nNCP-11.zip,2,NCP,30,1193,56,1,[1193]\r\nNCP-29.zip,2,NCP,924,2466,18,1,[2466]\r\nCP-16.zip,1,CP,1614,4302,23,1,[4302]\r\nNormal-14.zip,0,Normal,2061,516,88,1,[516]\r\nNCP-27.zip,2,NCP,826,2339,54,1,[2339]\r\nNormal-13.zip,0,Normal,2038,493,80,1,[493]\r\nNormal-1.zip,0,Normal,1715,985,71,2,\"[985, 986]\"\r\nCP-28.zip,1,CP,3782,5726,25,1,[5726]\r\nCP-21.zip,1,CP,2777,3308,22,1,[3308]\r\nCP-8.zip,1,CP,1328,3693,69,2,\"[3693, 3694]\"\r\nNCP-17.zip,2,NCP,468,2091,154,2,\"[2091, 2092]\"\r\nNCP-13.zip,2,NCP,36,1205,59,2,\"[1204, 1205]\"\r\nNormal-12.zip,0,Normal,2000,455,93,1,[455]\r\nCP-19.zip,1,CP,2448,2926,102,2,\"[2925, 2926]\"\r\nNCP-10.zip,2,NCP,2728,2711,54,1,[2711]\r\nNCP-8.zip,2,NCP,263,1672,177,2,\"[1672, 1673]\"\r\nCP-30.zip,1,CP,3831,5775,25,1,[5775]\r\nNormal-1.zip,0,Normal,1709,973,61,2,\"[973, 974]\"\r\nCP-27.zip,1,CP,3751,5695,22,1,[5695]\r\nCP-11.zip,1,CP,1453,3981,56,3,\"[3979, 3980, 3981]\"\r\nCP-16.zip,1,CP,1617,4305,23,1,[4305]\r\nCP-19.zip,1,CP,1788,3198,53,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-19.zip,1,CP,2447,2924,86,2,\"[2923, 2924]\"\r\nNCP-20.zip,2,NCP,55,1243,150,2,\"[1243, 1244]\"\r\nCP-1.zip,1,CP,1069,3108,77,4,\"[3108, 3109, 3110, 3111]\"\r\nCP-29.zip,1,CP,3827,5771,26,1,[5771]\r\nCP-16.zip,1,CP,1599,4287,17,1,[4287]\r\nNCP-12.zip,2,NCP,34,1201,64,2,\"[1200, 1201]\"\r\nNCP-19.zip,2,NCP,523,2202,148,2,\"[2202, 2203]\"\r\nCP-19.zip,1,CP,2429,2890,100,1,[2890]\r\nNCP-9.zip,2,NCP,2695,2661,45,1,[2661]\r\nNormal-1.zip,0,Normal,1730,1022,59,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nCP-24.zip,1,CP,7,3511,298,2,\"[3511, 3512]\"\r\nNCP-27.zip,2,NCP,1045,2617,30,1,[2617]\r\nNormal-15.zip,0,Normal,2088,543,75,1,[543]\r\nNormal-25.zip,0,Normal,3853,5365,205,1,[5365]\r\nNormal-14.zip,0,Normal,2076,531,77,1,[531]\r\nNCP-22.zip,2,NCP,84,1301,127,2,\"[1301, 1302]\"\r\nCP-18.zip,1,CP,1660,4348,23,1,[4348]\r\nNCP-26.zip,2,NCP,3980,5487,38,1,[5487]\r\nCP-20.zip,1,CP,2758,3289,35,1,[3289]\r\nNormal-6.zip,0,Normal,1808,263,95,1,[263]\r\nNormal-2.zip,0,Normal,1739,1044,56,3,\"[1042, 1043, 1044]\"\r\nCP-1.zip,1,CP,1068,3107,62,1,[3107]\r\nNormal-14.zip,0,Normal,2083,538,87,1,[538]\r\nCP-12.zip,1,CP,1484,4054,46,3,\"[4053, 4054, 4055]\"\r\nCP-29.zip,1,CP,3811,5755,23,1,[5755]\r\nCP-14.zip,1,CP,1548,4213,51,2,\"[4213, 4214]\"\r\nNCP-20.zip,2,NCP,561,2281,58,2,\"[2280, 2281]\"\r\nCP-14.zip,1,CP,1544,4204,51,3,\"[4203, 4204, 4205]\"\r\nNCP-27.zip,2,NCP,1062,2639,176,1,[2639]\r\nCP-25.zip,1,CP,735,3097,110,1,[3097]\r\nCP-2.zip,1,CP,1115,3333,180,1,[3333]\r\nCP-27.zip,1,CP,3756,5700,20,1,[5700]\r\nNormal-5.zip,0,Normal,813,248,136,1,[248]\r\nNormal-19.zip,0,Normal,2221,676,103,1,[676]\r\nNormal-27.zip,0,Normal,3902,5434,73,1,[5434]\r\nCP-11.zip,1,CP,1437,3943,57,2,\"[3942, 3943]\"\r\nNCP-2.zip,2,NCP,126,1398,64,2,\"[1396, 1398]\"\r\nNormal-20.zip,0,Normal,2265,720,87,1,[720]\r\nCP-16.zip,1,CP,1589,4277,23,1,[4277]\r\nNormal-16.zip,0,Normal,2149,604,85,1,[604]\r\nNCP-19.zip,2,NCP,523,2203,62,2,\"[2202, 2203]\"\r\nCP-12.zip,1,CP,1455,3985,138,3,\"[3985, 3986, 3987]\"\r\nCP-30.zip,1,CP,4040,5589,38,1,[5589]\r\nNCP-1.zip,2,NCP,1049,2622,205,1,[2622]\r\nNormal-1.zip,0,Normal,1674,811,74,2,\"[810, 811]\"\r\nNCP-19.zip,2,NCP,539,2236,55,2,\"[2235, 2236]\"\r\nNormal-1.zip,0,Normal,1668,779,60,4,\"[778, 779, 780, 781]\"\r\nNCP-19.zip,2,NCP,542,2241,130,2,\"[2241, 2242]\"\r\nCP-25.zip,1,CP,739,3101,112,1,[3101]\r\nCP-9.zip,1,CP,1367,3785,140,3,\"[3785, 3786, 3787]\"\r\nCP-14.zip,1,CP,1549,4215,61,2,\"[4215, 4216]\"\r\nNCP-19.zip,2,NCP,53,1239,144,2,\"[1239, 1240]\"\r\nNormal-1.zip,0,Normal,1730,1021,294,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-29.zip,2,NCP,918,2460,213,1,[2460]\r\nNCP-23.zip,2,NCP,89,1311,138,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-2.zip,0,Normal,1759,1119,66,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nCP-9.zip,1,CP,1359,3765,46,3,\"[3764, 3765, 3766]\"\r\nNCP-25.zip,2,NCP,3706,5533,54,1,[5533]\r\nCP-8.zip,1,CP,1325,3686,65,2,\"[3686, 3687]\"\r\nNCP-19.zip,2,NCP,545,2248,57,2,\"[2247, 2248]\"\r\nNCP-15.zip,2,NCP,418,1989,143,2,\"[1989, 1990]\"\r\nCP-7.zip,1,CP,1261,3479,198,1,[3479]\r\nNCP-29.zip,2,NCP,895,2435,143,2,\"[2435, 2436]\"\r\nCP-12.zip,1,CP,1483,4051,62,3,\"[4050, 4051, 4052]\"\r\nCP-12.zip,1,CP,1460,3999,60,2,\"[3998, 3999]\"\r\nCP-12.zip,1,CP,1456,3988,122,3,\"[3988, 3989, 3990]\"\r\nNormal-12.zip,0,Normal,2014,469,98,1,[469]\r\nCP-14.zip,1,CP,1542,4197,180,3,\"[4197, 4198, 4199]\"\r\nNormal-2.zip,0,Normal,1755,1098,73,4,\"[1097, 1098, 1099, 1100]\"\r\nNCP-14.zip,2,NCP,382,1917,58,2,\"[1916, 1917]\"\r\nNCP-4.zip,2,NCP,153,1451,58,2,\"[1450, 1451]\"\r\nNormal-27.zip,0,Normal,3913,5454,68,2,\"[5454, 5455]\"\r\nNormal-1.zip,0,Normal,1674,810,74,2,\"[810, 811]\"\r\nNormal-2.zip,0,Normal,1736,1036,55,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNormal-1.zip,0,Normal,1682,852,81,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNormal-4.zip,0,Normal,796,231,287,1,[231]\r\nNCP-11.zip,2,NCP,292,1729,138,2,\"[1729, 1730]\"\r\nNCP-12.zip,2,NCP,327,1803,55,2,\"[1802, 1803]\"\r\nNormal-25.zip,0,Normal,3712,5342,28,1,[5342]\r\nCP-4.zip,1,CP,1182,3400,130,1,[3400]\r\nCP-2.zip,1,CP,1113,3331,197,1,[3331]\r\nNCP-22.zip,2,NCP,888,2426,55,1,[2426]\r\nNormal-25.zip,0,Normal,3846,5358,209,1,[5358]\r\nCP-9.zip,1,CP,1362,3772,61,2,\"[3772, 3773]\"\r\nCP-4.zip,1,CP,1193,3411,190,1,[3411]\r\nNormal-5.zip,0,Normal,802,237,298,1,[237]\r\nCP-23.zip,1,CP,655,3017,511,1,[3017]\r\nNCP-13.zip,2,NCP,360,1871,121,2,\"[1871, 1872]\"\r\nNCP-30.zip,2,NCP,977,2521,257,1,[2521]\r\nNCP-26.zip,2,NCP,3990,5514,51,1,[5514]\r\nNormal-3.zip,0,Normal,768,203,130,1,[203]\r\nNormal-1.zip,0,Normal,1713,980,71,2,\"[980, 981]\"\r\nCP-26.zip,1,CP,3732,5672,53,2,\"[5671, 5672]\"\r\nCP-20.zip,1,CP,2762,3293,33,1,[3293]\r\nNormal-20.zip,0,Normal,2267,722,100,1,[722]\r\nNCP-5.zip,2,NCP,189,1522,139,2,\"[1522, 1523]\"\r\nNCP-28.zip,2,NCP,848,2366,57,2,\"[2366, 2367]\"\r\nNCP-6.zip,2,NCP,215,1575,65,2,\"[1574, 1575]\"\r\nNormal-27.zip,0,Normal,3905,5438,58,2,\"[5437, 5438]\"\r\nCP-4.zip,1,CP,1163,3381,239,1,[3381]\r\nCP-18.zip,1,CP,1665,4353,25,1,[4353]\r\nNormal-25.zip,0,Normal,3842,5354,189,1,[5354]\r\nNormal-22.zip,0,Normal,2583,93,46,1,[93]\r\nNCP-11.zip,2,NCP,308,1763,116,2,\"[1763, 1764]\"\r\nCP-4.zip,1,CP,1180,3398,150,1,[3398]\r\nCP-7.zip,1,CP,1316,3668,63,3,\"[3667, 3668, 3669]\"\r\nCP-5.zip,1,CP,1213,3431,159,1,[3431]\r\nNormal-10.zip,0,Normal,1947,402,89,1,[402]\r\nCP-24.zip,1,CP,698,3060,124,1,[3060]\r\nCP-15.zip,1,CP,1562,4243,55,2,\"[4243, 4244]\"\r\nNCP-25.zip,2,NCP,3962,5473,58,1,[5473]\r\nCP-18.zip,1,CP,1772,3180,75,6,\"[3176, 3177, 3178, 3179, 3180, 3181]\"\r\nNormal-13.zip,0,Normal,2029,484,94,1,[484]\r\nNCP-16.zip,2,NCP,443,2041,50,2,\"[2040, 2041]\"\r\nNCP-24.zip,2,NCP,984,2529,259,2,\"[2529, 2530]\"\r\nCP-18.zip,1,CP,1773,3183,61,4,\"[3182, 3183, 3184, 3185]\"\r\nCP-5.zip,1,CP,1194,3412,158,1,[3412]\r\nNCP-14.zip,2,NCP,39,1211,58,2,\"[1210, 1211]\"\r\nCP-13.zip,1,CP,15,3174,98,1,[3174]\r\nCP-28.zip,1,CP,3775,5719,29,1,[5719]\r\nNCP-17.zip,2,NCP,477,2110,58,2,\"[2109, 2110]\"\r\nNormal-16.zip,0,Normal,2133,588,73,1,[588]\r\nNCP-4.zip,2,NCP,150,1444,181,2,\"[1444, 1445]\"\r\nCP-4.zip,1,CP,1188,3406,308,1,[3406]\r\nNCP-8.zip,2,NCP,251,1649,55,2,\"[1648, 1649]\"\r\nCP-1.zip,1,CP,1094,3312,329,1,[3312]\r\nNCP-12.zip,2,NCP,327,1802,130,2,\"[1802, 1803]\"\r\nNormal-7.zip,0,Normal,1830,285,84,1,[285]\r\nCP-12.zip,1,CP,1481,4045,58,3,\"[4044, 4045, 4046]\"\r\nNCP-19.zip,2,NCP,52,1238,57,2,\"[1237, 1238]\"\r\nNCP-20.zip,2,NCP,557,2271,132,2,\"[2271, 2272]\"\r\nNCP-14.zip,2,NCP,398,1946,167,2,\"[1946, 1947]\"\r\nNCP-8.zip,2,NCP,260,1667,68,2,\"[1666, 1667]\"\r\nNormal-2.zip,0,Normal,1754,1094,73,4,\"[1093, 1094, 1095, 1096]\"\r\nNormal-2.zip,0,Normal,1736,1032,124,5,\"[1032, 1033, 1034, 1035, 1036]\"\r\nNCP-11.zip,2,NCP,292,1730,58,2,\"[1729, 1730]\"\r\nCP-25.zip,1,CP,725,3087,80,1,[3087]\r\nCP-15.zip,1,CP,1558,4234,62,3,\"[4234, 4235, 4236]\"\r\nNormal-17.zip,0,Normal,2161,616,99,1,[616]\r\nNCP-23.zip,2,NCP,970,2513,62,1,[2513]\r\nNCP-10.zip,2,NCP,277,1700,152,2,\"[1700, 1701]\"\r\nNCP-14.zip,2,NCP,395,1941,71,2,\"[1940, 1941]\"\r\nNormal-2.zip,0,Normal,1755,1100,71,4,\"[1097, 1098, 1099, 1100]\"\r\nCP-26.zip,1,CP,3718,5648,254,2,\"[5647, 5648]\"\r\nNormal-25.zip,0,Normal,3841,5353,188,1,[5353]\r\nNormal-23.zip,0,Normal,2621,131,41,1,[131]\r\nNCP-20.zip,2,NCP,555,2267,133,2,\"[2267, 2268]\"\r\nNCP-7.zip,2,NCP,244,1634,165,2,\"[1634, 1635]\"\r\nNormal-6.zip,0,Normal,1821,276,102,1,[276]\r\nNCP-17.zip,2,NCP,459,2073,56,2,\"[2072, 2073]\"\r\nNCP-2.zip,2,NCP,124,1390,58,2,\"[1389, 1390]\"\r\nNormal-18.zip,0,Normal,2185,640,100,1,[640]\r\nNCP-5.zip,2,NCP,193,1530,124,2,\"[1530, 1531]\"\r\nNCP-8.zip,2,NCP,253,1652,139,2,\"[1652, 1653]\"\r\nNCP-23.zip,2,NCP,89,1313,58,4,\"[1311, 1312, 1313, 1315]\"\r\nCP-5.zip,1,CP,1216,3434,307,1,[3434]\r\nNCP-30.zip,2,NCP,979,2523,345,1,[2523]\r\nNCP-23.zip,2,NCP,97,1331,41,2,\"[1330, 1331]\"\r\nNCP-20.zip,2,NCP,555,2268,56,2,\"[2267, 2268]\"\r\nNormal-16.zip,0,Normal,2126,581,84,1,[581]\r\nNCP-18.zip,2,NCP,488,2133,58,2,\"[2131, 2133]\"\r\nNCP-10.zip,2,NCP,28,1189,61,2,\"[1188, 1189]\"\r\nNCP-15.zip,2,NCP,41,1214,151,2,\"[1214, 1215]\"\r\nNCP-12.zip,2,NCP,32,1196,145,2,\"[1196, 1197]\"\r\nCP-26.zip,1,CP,3722,5656,50,2,\"[5656, 5657]\"\r\nCP-15.zip,1,CP,1573,4261,22,1,[4261]\r\nNCP-27.zip,2,NCP,1028,2598,147,1,[2598]\r\nNormal-18.zip,0,Normal,2197,652,105,1,[652]\r\nNormal-16.zip,0,Normal,2152,607,66,1,[607]\r\nNCP-14.zip,2,NCP,380,1913,62,2,\"[1912, 1913]\"\r\nNormal-15.zip,0,Normal,2093,548,72,1,[548]\r\nNCP-3.zip,2,NCP,1299,2740,63,1,[2740]\r\nCP-8.zip,1,CP,1348,3740,59,3,\"[3739, 3740, 3741]\"\r\nNormal-6.zip,0,Normal,1822,277,101,1,[277]\r\nNormal-4.zip,0,Normal,800,235,116,1,[235]\r\nCP-10.zip,1,CP,1386,3827,66,2,\"[3827, 3828]\"\r\nNormal-12.zip,0,Normal,2004,459,106,1,[459]\r\nNCP-25.zip,2,NCP,3957,5470,47,1,[5470]\r\nCP-5.zip,1,CP,1204,3422,294,1,[3422]\r\nCP-11.zip,1,CP,1420,3905,59,2,\"[3905, 3906]\"\r\nCP-17.zip,1,CP,1649,4337,23,1,[4337]\r\nCP-28.zip,1,CP,3769,5713,18,1,[5713]\r\nNormal-8.zip,0,Normal,1868,323,91,1,[323]\r\nCP-1.zip,1,CP,1087,3219,400,1,[3219]\r\nCP-26.zip,1,CP,3640,5599,295,1,[5599]\r\nNCP-7.zip,2,NCP,248,1642,139,2,\"[1642, 1643]\"\r\nNCP-2.zip,2,NCP,125,1395,55,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-25.zip,2,NCP,3941,5538,38,1,[5538]\r\nCP-15.zip,1,CP,1558,4236,62,3,\"[4234, 4235, 4236]\"\r\nNCP-16.zip,2,NCP,443,2040,117,2,\"[2040, 2041]\"\r\nNormal-15.zip,0,Normal,2102,557,100,1,[557]\r\nNormal-2.zip,0,Normal,1755,1097,73,4,\"[1097, 1098, 1099, 1100]\"\r\nNormal-9.zip,0,Normal,1924,379,98,1,[379]\r\nCP-13.zip,1,CP,1517,4136,64,2,\"[4136, 4137]\"\r\nCP-1.zip,1,CP,1,3146,70,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNormal-19.zip,0,Normal,2226,681,99,1,[681]\r\nCP-13.zip,1,CP,1517,4137,64,2,\"[4136, 4137]\"\r\nNCP-23.zip,2,NCP,95,1326,165,2,\"[1326, 1327]\"\r\nNCP-19.zip,2,NCP,538,2234,60,2,\"[2233, 2234]\"\r\nCP-6.zip,1,CP,1253,3471,130,1,[3471]\r\nNCP-7.zip,2,NCP,242,1629,133,2,\"[1629, 1630]\"\r\nCP-8.zip,1,CP,1337,3714,60,2,\"[3714, 3715]\"\r\nNCP-23.zip,2,NCP,912,2454,373,1,[2454]\r\nNormal-23.zip,0,Normal,2622,132,38,1,[132]\r\nNormal-8.zip,0,Normal,1871,326,73,1,[326]\r\nNCP-5.zip,2,NCP,193,1531,52,2,\"[1530, 1531]\"\r\nNormal-24.zip,0,Normal,2646,156,41,1,[156]\r\nCP-14.zip,1,CP,1538,4185,159,3,\"[4185, 4186, 4187]\"\r\nCP-23.zip,1,CP,667,3029,226,1,[3029]\r\nCP-1.zip,1,CP,1,3147,70,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNCP-27.zip,2,NCP,1006,2566,42,2,\"[2566, 2567]\"\r\nNormal-1.zip,0,Normal,1711,977,63,2,\"[977, 978]\"\r\nNCP-14.zip,2,NCP,374,1899,139,2,\"[1899, 1900]\"\r\nNCP-16.zip,2,NCP,457,2069,57,2,\"[2068, 2069]\"\r\nCP-22.zip,1,CP,634,2996,680,1,[2996]\r\nNCP-23.zip,2,NCP,905,2447,26,1,[2447]\r\nNormal-2.zip,0,Normal,1759,1118,65,10,\"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]\"\r\nNCP-11.zip,2,NCP,290,1725,103,2,\"[1725, 1726]\"\r\nNCP-21.zip,2,NCP,77,1288,53,2,\"[1287, 1288]\"\r\nCP-30.zip,1,CP,4018,5567,33,1,[5567]\r\nCP-12.zip,1,CP,1483,4052,62,3,\"[4050, 4051, 4052]\"\r\nCP-24.zip,1,CP,692,3054,74,1,[3054]\r\nNCP-6.zip,2,NCP,204,1552,139,2,\"[1552, 1553]\"\r\nNCP-7.zip,2,NCP,24,1179,146,2,\"[1179, 1180]\"\r\nCP-6.zip,1,CP,1251,3469,133,1,[3469]\r\nNormal-1.zip,0,Normal,1682,857,70,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNCP-10.zip,2,NCP,2712,2705,42,1,[2705]\r\nCP-2.zip,1,CP,1100,3318,201,1,[3318]\r\nNormal-1.zip,0,Normal,1671,795,67,3,\"[793, 794, 795]\"\r\nNCP-17.zip,2,NCP,461,2077,67,2,\"[2076, 2077]\"\r\nCP-15.zip,1,CP,1564,4249,51,2,\"[4248, 4249]\"\r\nNCP-4.zip,2,NCP,153,1450,137,2,\"[1450, 1451]\"\r\nCP-4.zip,1,CP,1166,3384,202,1,[3384]\r\nNCP-28.zip,2,NCP,851,2370,145,1,[2370]\r\nNCP-23.zip,2,NCP,95,1327,69,2,\"[1326, 1327]\"\r\nNormal-18.zip,0,Normal,2196,651,95,1,[651]\r\nCP-27.zip,1,CP,3749,5693,20,1,[5693]\r\nNormal-6.zip,0,Normal,1797,252,85,1,[252]\r\nCP-14.zip,1,CP,1544,4203,122,3,\"[4203, 4204, 4205]\"\r\nCP-8.zip,1,CP,1345,3731,55,2,\"[3731, 3732]\"\r\nNCP-8.zip,2,NCP,2678,2649,55,1,[2649]\r\nNCP-23.zip,2,NCP,89,1315,66,4,\"[1311, 1312, 1313, 1315]\"\r\nNormal-17.zip,0,Normal,2167,622,76,1,[622]\r\nCP-22.zip,1,CP,631,2993,130,1,[2993]\r\nCP-16.zip,1,CP,1618,4306,26,1,[4306]\r\nNCP-17.zip,2,NCP,471,2097,139,2,\"[2097, 2098]\"\r\nNCP-15.zip,2,NCP,416,1986,58,2,\"[1984, 1986]\"\r\nCP-10.zip,1,CP,1389,3833,121,3,\"[3833, 3834, 3835]\"\r\nCP-24.zip,1,CP,696,3058,74,1,[3058]\r\nNCP-26.zip,2,NCP,3996,5494,37,1,[5494]\r\nCP-15.zip,1,CP,1565,4251,66,2,\"[4250, 4251]\"\r\nNCP-7.zip,2,NCP,248,1643,58,2,\"[1642, 1643]\"\r\nNCP-30.zip,2,NCP,932,2474,20,1,[2474]\r\nCP-8.zip,1,CP,1332,3703,41,2,\"[3703, 3704]\"\r\nNormal-2.zip,0,Normal,1754,1093,73,4,\"[1093, 1094, 1095, 1096]\"\r\nNCP-3.zip,2,NCP,131,1408,50,2,\"[1407, 1408]\"\r\nNCP-13.zip,2,NCP,37,1206,147,2,\"[1206, 1207]\"\r\nNCP-7.zip,2,NCP,242,1630,56,2,\"[1629, 1630]\"\r\nCP-26.zip,1,CP,3643,5603,257,2,\"[5602, 5603]\"\r\nNormal-24.zip,0,Normal,2639,149,28,1,[149]\r\nNormal-13.zip,0,Normal,2037,492,82,1,[492]\r\nCP-16.zip,1,CP,1610,4298,22,1,[4298]\r\nNCP-15.zip,2,NCP,415,1982,149,2,\"[1982, 1983]\"\r\nNCP-2.zip,2,NCP,125,1394,55,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNormal-23.zip,0,Normal,2616,126,39,1,[126]\r\nCP-26.zip,1,CP,3635,5594,291,1,[5594]\r\nNormal-18.zip,0,Normal,2211,666,85,1,[666]\r\nNCP-17.zip,2,NCP,481,2117,163,2,\"[2117, 2118]\"\r\nNCP-13.zip,2,NCP,37,1207,62,2,\"[1206, 1207]\"\r\nNormal-2.zip,0,Normal,1749,1070,61,4,\"[1069, 1070, 1071, 1072]\"\r\nNCP-29.zip,2,NCP,927,2469,20,1,[2469]\r\nCP-6.zip,1,CP,1226,3444,190,1,[3444]\r\nNCP-14.zip,2,NCP,394,1938,147,2,\"[1938, 1939]\"\r\nCP-19.zip,1,CP,1791,3212,71,4,\"[3210, 3211, 3212, 3213]\"\r\nCP-8.zip,1,CP,1334,3708,56,2,\"[3707, 3708]\"\r\nNCP-12.zip,2,NCP,324,1796,120,2,\"[1796, 1797]\"\r\nCP-30.zip,1,CP,3929,5626,71,2,\"[5626, 5627]\"\r\nNormal-7.zip,0,Normal,1832,287,91,1,[287]\r\nNormal-1.zip,0,Normal,1713,981,71,2,\"[980, 981]\"\r\nNCP-2.zip,2,NCP,111,1363,133,2,\"[1363, 1364]\"\r\nNormal-3.zip,0,Normal,1764,1144,66,4,\"[1143, 1144, 1145, 1146]\"\r\nCP-15.zip,1,CP,1560,4239,63,2,\"[4239, 4240]\"\r\nNCP-22.zip,2,NCP,84,1302,54,2,\"[1301, 1302]\"\r\nNormal-2.zip,0,Normal,1744,1059,71,2,\"[1058, 1059]\"\r\nCP-21.zip,1,CP,590,2952,86,1,[2952]\r\nNormal-9.zip,0,Normal,1901,356,83,1,[356]\r\nNCP-17.zip,2,NCP,461,2076,160,2,\"[2076, 2077]\"\r\nCP-24.zip,1,CP,683,3045,138,1,[3045]\r\nNormal-11.zip,0,Normal,1983,438,105,1,[438]\r\nNCP-14.zip,2,NCP,39,1210,139,2,\"[1210, 1211]\"\r\nNCP-18.zip,2,NCP,494,2144,156,2,\"[2144, 2145]\"\r\nNCP-14.zip,2,NCP,388,1927,68,2,\"[1926, 1927]\"\r\nNCP-28.zip,2,NCP,853,2373,664,1,[2373]\r\nNormal-22.zip,0,Normal,2588,98,33,1,[98]\r\nNCP-17.zip,2,NCP,46,1225,124,2,\"[1225, 1226]\"\r\nNCP-2.zip,2,NCP,126,1396,152,2,\"[1396, 1398]\"\r\nNCP-15.zip,2,NCP,418,1990,58,2,\"[1989, 1990]\"\r\nNormal-3.zip,0,Normal,765,200,136,1,[200]\r\nCP-9.zip,1,CP,1370,3792,62,2,\"[3792, 3793]\"\r\nCP-13.zip,1,CP,1490,4071,166,3,\"[4071, 4072, 4073]\"\r\nCP-5.zip,1,CP,1212,3430,187,1,[3430]\r\nNCP-29.zip,2,NCP,894,2434,16,1,[2434]\r\nCP-19.zip,1,CP,1788,3199,58,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nCP-12.zip,1,CP,1466,4011,52,2,\"[4011, 4012]\"\r\nCP-1.zip,1,CP,1088,3223,50,4,\"[3220, 3221, 3222, 3223]\"\r\nNCP-25.zip,2,NCP,3947,5503,41,1,[5503]\r\nCP-30.zip,1,CP,3931,5632,143,4,\"[5630, 5631, 5632, 5633]\"\r\nNCP-2.zip,2,NCP,124,1389,139,2,\"[1389, 1390]\"\r\nCP-7.zip,1,CP,1307,3645,53,4,\"[3645, 3646, 3647, 3648]\"\r\nNCP-27.zip,2,NCP,823,2334,183,1,[2334]\r\nNormal-1.zip,0,Normal,1728,1016,72,4,\"[1013, 1014, 1015, 1016]\"\r\nNormal-2.zip,0,Normal,1754,1096,69,4,\"[1093, 1094, 1095, 1096]\"\r\nCP-12.zip,1,CP,1473,4026,51,3,\"[4026, 4027, 4028]\"\r\nNormal-3.zip,0,Normal,1764,1146,62,4,\"[1143, 1144, 1145, 1146]\"\r\nCP-2.zip,1,CP,1103,3321,180,1,[3321]\r\nCP-4.zip,1,CP,1181,3399,238,1,[3399]\r\nCP-19.zip,1,CP,2436,2904,138,1,[2904]\r\nCP-28.zip,1,CP,3795,5739,23,1,[5739]\r\nCP-29.zip,1,CP,3805,5749,20,1,[5749]\r\nNCP-3.zip,2,NCP,1300,2741,60,1,[2741]\r\nNCP-23.zip,2,NCP,898,2439,48,1,[2439]\r\nNormal-23.zip,0,Normal,2612,122,31,1,[122]\r\nNCP-7.zip,2,NCP,24,1180,61,2,\"[1179, 1180]\"\r\nNormal-6.zip,0,Normal,1807,262,95,1,[262]\r\nNCP-30.zip,2,NCP,996,2551,189,2,\"[2551, 2552]\"\r\nNormal-9.zip,0,Normal,1893,348,82,1,[348]\r\nNCP-11.zip,2,NCP,290,1726,44,2,\"[1725, 1726]\"\r\nNCP-21.zip,2,NCP,80,1293,129,2,\"[1293, 1294]\"\r\nNormal-24.zip,0,Normal,2655,165,37,1,[165]\r\nNCP-30.zip,2,NCP,996,2552,218,2,\"[2551, 2552]\"\r\nCP-18.zip,1,CP,1653,4341,29,1,[4341]\r\nNCP-5.zip,2,NCP,187,1518,136,2,\"[1518, 1519]\"\r\nNCP-26.zip,2,NCP,3993,5517,39,1,[5517]\r\nNCP-10.zip,2,NCP,273,1692,128,2,\"[1692, 1693]\"\r\nNCP-5.zip,2,NCP,179,1502,122,2,\"[1503, 1502]\"\r\nNormal-26.zip,0,Normal,3887,5400,67,3,\"[5400, 5401, 5404]\"\r\nNCP-7.zip,2,NCP,234,1613,139,2,\"[1613, 1614]\"\r\nNormal-1.zip,0,Normal,1725,1006,60,1,[1006]\r\nNCP-15.zip,2,NCP,419,1992,55,2,\"[1991, 1992]\"\r\nCP-14.zip,1,CP,1523,4151,65,2,\"[4150, 4151]\"\r\nNCP-23.zip,2,NCP,938,2480,195,2,\"[2480, 2481]\"\r\nNCP-13.zip,2,NCP,342,1835,149,2,\"[1835, 1836]\"\r\nCP-24.zip,1,CP,680,3042,86,1,[3042]\r\nNCP-14.zip,2,NCP,394,1939,62,2,\"[1938, 1939]\"\r\nNCP-11.zip,2,NCP,288,1722,49,2,\"[1721, 1722]\"\r\nCP-14.zip,1,CP,1527,4162,58,3,\"[4160, 4161, 4162]\"\r\nCP-6.zip,1,CP,1241,3459,132,1,[3459]\r\nCP-10.zip,1,CP,1408,3878,198,3,\"[3878, 3879, 3880]\"\r\nNCP-14.zip,2,NCP,397,1945,66,2,\"[1944, 1945]\"\r\nCP-1.zip,1,CP,1,3145,248,5,\"[3143, 3144, 3145, 3146, 3147]\"\r\nNormal-15.zip,0,Normal,2111,566,95,1,[566]\r\nNormal-12.zip,0,Normal,2007,462,85,1,[462]\r\nNCP-6.zip,2,NCP,222,1589,52,2,\"[1588, 1589]\"\r\nNormal-25.zip,0,Normal,3856,5368,220,1,[5368]\r\nCP-6.zip,1,CP,1245,3463,306,1,[3463]\r\nCP-9.zip,1,CP,1380,3814,56,1,[3814]\r\nCP-11.zip,1,CP,1442,3955,58,3,\"[3954, 3955, 3956]\"\r\nNormal-26.zip,0,Normal,3889,5407,68,2,\"[5407, 5408]\"\r\nNormal-4.zip,0,Normal,773,208,321,1,[208]\r\nCP-23.zip,1,CP,671,3033,448,1,[3033]\r\nCP-23.zip,1,CP,674,3036,126,1,[3036]\r\nCP-19.zip,1,CP,1788,3200,54,8,\"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]\"\r\nNCP-12.zip,2,NCP,328,1804,112,2,\"[1804, 1805]\"\r\nNormal-22.zip,0,Normal,2581,91,44,1,[91]\r\nCP-7.zip,1,CP,1316,3669,62,3,\"[3667, 3668, 3669]\"\r\nNCP-1.zip,2,NCP,1046,2618,70,1,[2618]\r\nNCP-16.zip,2,NCP,456,2067,57,2,\"[2066, 2067]\"\r\nNormal-1.zip,0,Normal,1730,1020,63,5,\"[1019, 1020, 1021, 1022, 1023]\"\r\nNCP-17.zip,2,NCP,468,2092,65,2,\"[2091, 2092]\"\r\nNCP-7.zip,2,NCP,2488,2688,40,1,[2688]\r\nCP-10.zip,1,CP,1396,3852,58,3,\"[3851, 3852, 3853]\"\r\nNCP-16.zip,2,NCP,447,2049,58,2,\"[2048, 2049]\"\r\nNormal-8.zip,0,Normal,1864,319,88,1,[319]\r\nCP-15.zip,1,CP,1560,4240,63,2,\"[4239, 4240]\"\r\nCP-12.zip,1,CP,1484,4055,46,3,\"[4053, 4054, 4055]\"\r\nNormal-1.zip,0,Normal,1682,853,81,6,\"[847, 848, 852, 853, 857, 858]\"\r\nNormal-22.zip,0,Normal,2580,90,37,1,[90]\r\nCP-2.zip,1,CP,1128,3346,196,1,[3346]\r\nNCP-7.zip,2,NCP,240,1625,158,2,\"[1625, 1626]\"\r\nNormal-15.zip,0,Normal,2086,541,91,1,[541]\r\nNormal-7.zip,0,Normal,1837,292,94,1,[292]\r\nCP-1.zip,1,CP,1069,3111,77,4,\"[3108, 3109, 3110, 3111]\"\r\nCP-14.zip,1,CP,1549,4216,61,2,\"[4215, 4216]\"\r\nNormal-11.zip,0,Normal,1970,425,88,1,[425]\r\nNCP-13.zip,2,NCP,342,1836,61,2,\"[1835, 1836]\"\r\nCP-25.zip,1,CP,728,3090,86,1,[3090]\r\nNCP-21.zip,2,NCP,68,1268,115,2,\"[1268, 1269]\"\r\nCP-8.zip,1,CP,1342,3725,58,3,\"[3723, 3724, 3725]\"\r\nCP-12.zip,1,CP,1481,4046,58,3,\"[4044, 4045, 4046]\"\r\nCP-5.zip,1,CP,1210,3428,156,1,[3428]\r\nNCP-3.zip,2,NCP,136,1417,53,2,\"[1416, 1417]\"\r\nNCP-2.zip,2,NCP,125,1393,54,5,\"[1391, 1392, 1393, 1394, 1395]\"\r\nNCP-23.zip,2,NCP,97,1330,97,2,\"[1330, 1331]\"\r\nNCP-1.zip,2,NCP,1021,2588,209,4,\"[2587, 2588, 2589, 2590]\"\r\nNCP-12.zip,2,NCP,317,1782,50,2,\"[1781, 1782]\"\r\nNCP-14.zip,2,NCP,388,1926,162,2,\"[1926, 1927]\"\r\nCP-26.zip,1,CP,3641,5600,300,1,[5600]\r\nNormal-3.zip,0,Normal,760,195,117,1,[195]\r\nNCP-12.zip,2,NCP,325,1798,117,2,\"[1798, 1799]\"\r\nNormal-1.zip,0,Normal,1671,793,72,3,\"[793, 794, 795]\"\r\nNormal-5.zip,0,Normal,807,242,132,1,[242]\r\nCP-19.zip,1,CP,1791,3211,55,4,\"[3210, 3211, 3212, 3213]\"\r\nNormal-4.zip,0,Normal,792,227,108,1,[227]\r\nCP-15.zip,1,CP,1564,4248,51,2,\"[4248, 4249]\"\r\nNCP-12.zip,2,NCP,324,1797,51,2,\"[1796, 1797]\"\r\nCP-13.zip,1,CP,1514,4130,61,2,\"[4129, 4130]\"\r\nCP-30.zip,1,CP,4013,5562,29,1,[5562]\r\nCP-7.zip,1,CP,13,3173,255,4,\"[3170, 3171, 3172, 3173]\"\r\nCP-5.zip,1,CP,1214,3432,282,1,[3432]\r\nNormal-8.zip,0,Normal,1878,333,88,1,[333]\r\nNormal-21.zip,0,Normal,2297,752,83,1,[752]\r\nCP-20.zip,1,CP,2668,3259,52,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-11.zip,1,CP,1436,3940,45,2,\"[3940, 3941]\"\r\nNCP-13.zip,2,NCP,364,1880,56,2,\"[1879, 1880]\"\r\nCP-9.zip,1,CP,1369,3790,67,2,\"[3790, 3791]\"\r\nNCP-21.zip,2,NCP,65,1263,128,2,\"[1263, 1264]\"\r\nCP-23.zip,1,CP,661,3023,116,1,[3023]\r\nCP-30.zip,1,CP,3937,5643,66,2,\"[5643, 5644]\"\r\nCP-25.zip,1,CP,8,3514,36,2,\"[3513, 3514]\"\r\nNCP-15.zip,2,NCP,421,1996,67,2,\"[1995, 1996]\"\r\nCP-25.zip,1,CP,738,3100,110,1,[3100]\r\nNCP-11.zip,2,NCP,304,1755,67,2,\"[1754, 1755]\"\r\nNCP-22.zip,2,NCP,834,2348,226,2,\"[2347, 2348]\"\r\nNormal-1.zip,0,Normal,1680,840,66,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-13.zip,1,CP,1519,4141,68,2,\"[4141, 4142]\"\r\nNCP-12.zip,2,NCP,315,1777,107,2,\"[1777, 1778]\"\r\nNormal-2.zip,0,Normal,1753,1088,66,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nCP-8.zip,1,CP,1341,3722,57,1,[3722]\r\nCP-13.zip,1,CP,1491,4075,48,3,\"[4074, 4075, 4076]\"\r\nCP-28.zip,1,CP,3785,5729,28,1,[5729]\r\nNCP-6.zip,2,NCP,212,1568,165,2,\"[1568, 1569]\"\r\nCP-12.zip,1,CP,1477,4035,54,2,\"[4035, 4036]\"\r\nCP-16.zip,1,CP,1605,4293,23,1,[4293]\r\nNCP-29.zip,2,NCP,926,2468,24,1,[2468]\r\nCP-10.zip,1,CP,1394,3847,62,2,\"[3847, 3848]\"\r\nNCP-21.zip,2,NCP,580,2318,58,2,\"[2317, 2318]\"\r\nNCP-19.zip,2,NCP,526,2208,137,2,\"[2208, 2209]\"\r\nCP-13.zip,1,CP,1494,4085,65,3,\"[4083, 4084, 4085]\"\r\nNormal-27.zip,0,Normal,3895,5421,71,4,\"[5418, 5419, 5420, 5421]\"\r\nNCP-8.zip,2,NCP,267,1680,129,2,\"[1680, 1681]\"\r\nNCP-18.zip,2,NCP,49,1232,61,2,\"[1231, 1232]\"\r\nCP-21.zip,1,CP,589,2951,300,1,[2951]\r\nCP-25.zip,1,CP,8,3513,42,2,\"[3513, 3514]\"\r\nCP-27.zip,1,CP,3765,5709,20,1,[5709]\r\nNCP-4.zip,2,NCP,147,1438,173,2,\"[1438, 1439]\"\r\nNormal-27.zip,0,Normal,3904,5436,82,1,[5436]\r\nNCP-14.zip,2,NCP,384,1921,54,2,\"[1920, 1921]\"\r\nCP-18.zip,1,CP,1780,3560,69,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-14.zip,1,CP,1522,4148,61,2,\"[4148, 4149]\"\r\nNCP-8.zip,2,NCP,256,1658,139,2,\"[1658, 1659]\"\r\nCP-10.zip,1,CP,1406,3874,60,2,\"[3874, 3875]\"\r\nCP-4.zip,1,CP,1177,3395,210,1,[3395]\r\nNormal-1.zip,0,Normal,1673,804,291,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNCP-2.zip,2,NCP,122,1385,149,2,\"[1385, 1386]\"\r\nCP-9.zip,1,CP,1354,3752,46,3,\"[3751, 3752, 3753]\"\r\nNCP-23.zip,2,NCP,922,2464,240,1,[2464]\r\nCP-20.zip,1,CP,2668,3251,58,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-6.zip,0,Normal,1796,251,96,1,[251]\r\nCP-9.zip,1,CP,1374,3803,50,2,\"[3802, 3803]\"\r\nNCP-7.zip,2,NCP,237,1620,61,2,\"[1619, 1620]\"\r\nNCP-13.zip,2,NCP,363,1878,58,2,\"[1877, 1878]\"\r\nCP-1.zip,1,CP,1084,3130,67,1,[3130]\r\nNormal-14.zip,0,Normal,2082,537,78,1,[537]\r\nCP-18.zip,1,CP,1656,4344,26,1,[4344]\r\nNCP-18.zip,2,NCP,491,2138,149,2,\"[2138, 2139]\"\r\nCP-22.zip,1,CP,609,2971,76,1,[2971]\r\nNormal-18.zip,0,Normal,2198,653,88,1,[653]\r\nNCP-6.zip,2,NCP,212,1569,69,2,\"[1568, 1569]\"\r\nCP-21.zip,1,CP,607,2969,178,1,[2969]\r\nNCP-9.zip,2,NCP,269,1685,64,2,\"[1684, 1685]\"\r\nCP-9.zip,1,CP,1364,3777,56,3,\"[3776, 3777, 3778]\"\r\nCP-17.zip,1,CP,1622,4310,27,1,[4310]\r\nCP-16.zip,1,CP,1601,4289,19,1,[4289]\r\nCP-10.zip,1,CP,1388,3832,51,2,\"[3831, 3832]\"\r\nNormal-27.zip,0,Normal,3908,5442,56,1,[5442]\r\nCP-25.zip,1,CP,732,3094,159,1,[3094]\r\nNCP-14.zip,2,NCP,40,1212,149,2,\"[1212, 1213]\"\r\nNCP-21.zip,2,NCP,65,1264,54,2,\"[1263, 1264]\"\r\nCP-12.zip,1,CP,1477,4036,54,2,\"[4035, 4036]\"\r\nNormal-10.zip,0,Normal,1953,408,94,1,[408]\r\nCP-15.zip,1,CP,1577,4265,22,1,[4265]\r\nNormal-14.zip,0,Normal,2055,510,91,1,[510]\r\nNormal-17.zip,0,Normal,2154,609,94,1,[609]\r\nNormal-27.zip,0,Normal,3895,5418,61,4,\"[5418, 5419, 5420, 5421]\"\r\nNormal-19.zip,0,Normal,2227,682,73,1,[682]\r\nNormal-11.zip,0,Normal,1975,430,101,1,[430]\r\nCP-15.zip,1,CP,1584,4272,20,1,[4272]\r\nNormal-20.zip,0,Normal,2262,717,84,1,[717]\r\nCP-14.zip,1,CP,1543,4200,190,3,\"[4200, 4201, 4202]\"\r\nNormal-3.zip,0,Normal,753,188,300,1,[188]\r\nCP-12.zip,1,CP,1475,4032,50,2,\"[4031, 4032]\"\r\nNCP-16.zip,2,NCP,458,2071,55,2,\"[2070, 2071]\"\r\nNCP-5.zip,2,NCP,180,1504,136,2,\"[1504, 1505]\"\r\nCP-30.zip,1,CP,3938,5645,94,1,[5645]\r\nCP-9.zip,1,CP,1364,3778,56,3,\"[3776, 3777, 3778]\"\r\nNormal-23.zip,0,Normal,2632,142,39,1,[142]\r\nNormal-5.zip,0,Normal,810,245,324,1,[245]\r\nNCP-5.zip,2,NCP,174,1493,56,2,\"[1492, 1493]\"\r\nCP-17.zip,1,CP,1632,4320,23,1,[4320]\r\nNCP-2.zip,2,NCP,112,1366,56,2,\"[1365, 1366]\"\r\nCP-18.zip,1,CP,1780,3554,67,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-20.zip,1,CP,2668,3252,51,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-6.zip,0,Normal,1820,275,83,1,[275]\r\nNormal-1.zip,0,Normal,1673,809,57,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNormal-18.zip,0,Normal,2204,659,94,1,[659]\r\nCP-14.zip,1,CP,1531,4169,59,2,\"[4169, 4170]\"\r\nCP-12.zip,1,CP,1474,4030,62,2,\"[4029, 4030]\"\r\nNormal-18.zip,0,Normal,2215,670,80,1,[670]\r\nNCP-21.zip,2,NCP,579,2315,150,2,\"[2315, 2316]\"\r\nNCP-28.zip,2,NCP,854,2374,265,1,[2374]\r\nNormal-25.zip,0,Normal,3838,5350,201,1,[5350]\r\nCP-9.zip,1,CP,1352,3747,61,1,[3747]\r\nNormal-1.zip,0,Normal,1719,994,76,2,\"[993, 994]\"\r\nNCP-28.zip,2,NCP,852,2372,47,2,\"[2371, 2372]\"\r\nNormal-19.zip,0,Normal,2225,680,94,1,[680]\r\nNormal-16.zip,0,Normal,2148,603,86,1,[603]\r\nNCP-19.zip,2,NCP,544,2245,147,2,\"[2245, 2246]\"\r\nCP-29.zip,1,CP,3826,5770,26,1,[5770]\r\nNCP-7.zip,2,NCP,229,1602,156,2,\"[1602, 1603]\"\r\nNormal-1.zip,0,Normal,1673,807,283,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNormal-6.zip,0,Normal,1823,278,85,1,[278]\r\nNCP-27.zip,2,NCP,824,2335,259,1,[2335]\r\nCP-18.zip,1,CP,1776,3535,64,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-18.zip,2,NCP,513,2183,68,2,\"[2182, 2183]\"\r\nCP-30.zip,1,CP,3934,5639,77,3,\"[5638, 5639, 5640]\"\r\nCP-4.zip,1,CP,1168,3386,203,1,[3386]\r\nNCP-12.zip,2,NCP,323,1794,116,2,\"[1794, 1795]\"\r\nCP-8.zip,1,CP,1340,3720,64,2,\"[3720, 3721]\"\r\nCP-5.zip,1,CP,1223,3441,232,1,[3441]\r\nNCP-4.zip,2,NCP,166,1477,58,2,\"[1476, 1477]\"\r\nNCP-6.zip,2,NCP,219,1583,65,2,\"[1582, 1583]\"\r\nNCP-1.zip,2,NCP,101,1340,57,2,\"[1339, 1340]\"\r\nNCP-11.zip,2,NCP,298,1742,145,2,\"[1742, 1743]\"\r\nNormal-1.zip,0,Normal,1684,874,71,5,\"[870, 871, 873, 874, 875]\"\r\nCP-14.zip,1,CP,1554,4227,41,2,\"[4226, 4227]\"\r\nNCP-18.zip,2,NCP,489,2134,139,2,\"[2134, 2135]\"\r\nNormal-23.zip,0,Normal,2615,125,36,1,[125]\r\nNCP-8.zip,2,NCP,2674,2693,45,1,[2693]\r\nNCP-6.zip,2,NCP,226,1596,142,2,\"[1596, 1597]\"\r\nNCP-10.zip,2,NCP,274,1695,67,2,\"[1694, 1695]\"\r\nNormal-10.zip,0,Normal,1944,399,97,1,[399]\r\nCP-6.zip,1,CP,1236,3454,159,1,[3454]\r\nCP-20.zip,1,CP,2668,3257,53,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-23.zip,1,CP,670,3032,78,1,[3032]\r\nNCP-20.zip,2,NCP,548,2253,144,2,\"[2253, 2254]\"\r\nCP-18.zip,1,CP,1769,3516,23,1,[3516]\r\nNormal-3.zip,0,Normal,754,189,308,1,[189]\r\nNCP-7.zip,2,NCP,239,1623,146,2,\"[1623, 1624]\"\r\nNCP-14.zip,2,NCP,392,1935,58,2,\"[1934, 1935]\"\r\nNormal-6.zip,0,Normal,1824,279,86,1,[279]\r\nNormal-2.zip,0,Normal,1753,1087,77,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-30.zip,2,NCP,997,2554,49,2,\"[2553, 2554]\"\r\nCP-26.zip,1,CP,3727,5663,42,1,[5663]\r\nCP-11.zip,1,CP,1433,3934,62,2,\"[3934, 3935]\"\r\nNormal-18.zip,0,Normal,2187,642,92,1,[642]\r\nNCP-2.zip,2,NCP,112,1365,133,2,\"[1365, 1366]\"\r\nNCP-6.zip,2,NCP,219,1582,156,2,\"[1582, 1583]\"\r\nNormal-10.zip,0,Normal,1939,394,93,1,[394]\r\nCP-18.zip,1,CP,1775,3532,57,4,\"[3530, 3531, 3532, 3533]\"\r\nCP-2.zip,1,CP,11,3165,268,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNormal-12.zip,0,Normal,2012,467,102,1,[467]\r\nCP-21.zip,1,CP,587,2949,151,1,[2949]\r\nNormal-15.zip,0,Normal,2116,571,92,1,[571]\r\nCP-1.zip,1,CP,10,3156,289,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNormal-27.zip,0,Normal,3895,5419,61,4,\"[5418, 5419, 5420, 5421]\"\r\nNormal-25.zip,0,Normal,3854,5366,197,1,[5366]\r\nNormal-4.zip,0,Normal,771,206,306,1,[206]\r\nNCP-3.zip,2,NCP,129,1403,132,2,\"[1403, 1404]\"\r\nNormal-13.zip,0,Normal,2042,497,90,1,[497]\r\nNormal-2.zip,0,Normal,1753,1090,296,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-17.zip,2,NCP,478,2111,145,2,\"[2111, 2112]\"\r\nNormal-17.zip,0,Normal,2171,626,92,1,[626]\r\nCP-10.zip,1,CP,1410,3884,51,2,\"[3883, 3884]\"\r\nCP-3.zip,1,CP,1140,3358,370,1,[3358]\r\nNCP-22.zip,2,NCP,885,2422,52,2,\"[2422, 2423]\"\r\nNCP-27.zip,2,NCP,1050,2624,428,2,\"[2623, 2624]\"\r\nNCP-17.zip,2,NCP,478,2112,61,2,\"[2111, 2112]\"\r\nCP-20.zip,1,CP,2668,3254,47,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-16.zip,2,NCP,433,2019,120,2,\"[2019, 2020]\"\r\nNCP-19.zip,2,NCP,517,2191,58,2,\"[2190, 2191]\"\r\nNormal-24.zip,0,Normal,2657,167,27,1,[167]\r\nCP-8.zip,1,CP,1339,3718,59,2,\"[3718, 3719]\"\r\nNCP-17.zip,2,NCP,482,2119,139,2,\"[2119, 2120]\"\r\nCP-17.zip,1,CP,1635,4323,27,1,[4323]\r\nNormal-10.zip,0,Normal,1930,385,98,1,[385]\r\nNormal-1.zip,0,Normal,1679,837,70,6,\"[833, 834, 835, 836, 837, 838]\"\r\nNCP-25.zip,2,NCP,3942,5539,37,1,[5539]\r\nNormal-17.zip,0,Normal,2180,635,95,1,[635]\r\nNormal-1.zip,0,Normal,1680,839,66,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNormal-1.zip,0,Normal,1705,965,69,2,\"[965, 966]\"\r\nNCP-5.zip,2,NCP,174,1492,134,2,\"[1492, 1493]\"\r\nNCP-14.zip,2,NCP,386,1923,62,1,[1923]\r\nCP-22.zip,1,CP,625,2987,100,1,[2987]\r\nCP-20.zip,1,CP,2450,2929,90,2,\"[2928, 2929]\"\r\nNormal-10.zip,0,Normal,1949,404,92,1,[404]\r\nCP-14.zip,1,CP,1546,4208,58,2,\"[4208, 4209]\"\r\nNCP-21.zip,2,NCP,63,1260,58,2,\"[1259, 1260]\"\r\nNormal-23.zip,0,Normal,2624,134,38,1,[134]\r\nNCP-10.zip,2,NCP,272,1690,153,2,\"[1690, 1691]\"\r\nCP-5.zip,1,CP,1209,3427,313,1,[3427]\r\nNCP-11.zip,2,NCP,293,1731,122,2,\"[1731, 1732]\"\r\nCP-9.zip,1,CP,1383,3822,71,2,\"[3821, 3822]\"\r\nNormal-4.zip,0,Normal,793,228,94,1,[228]\r\nNCP-2.zip,2,NCP,1057,2633,570,1,[2633]\r\nNormal-1.zip,0,Normal,1679,835,67,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-4.zip,1,CP,1185,3403,131,1,[3403]\r\nCP-11.zip,1,CP,1446,3965,63,2,\"[3965, 3966]\"\r\nCP-15.zip,1,CP,1576,4264,23,1,[4264]\r\nCP-12.zip,1,CP,1487,4062,68,3,\"[4061, 4062, 4063]\"\r\nCP-9.zip,1,CP,1381,3817,66,3,\"[3815, 3816, 3817]\"\r\nCP-28.zip,1,CP,3767,5711,17,1,[5711]\r\nNormal-23.zip,0,Normal,2610,120,41,1,[120]\r\nCP-10.zip,1,CP,1394,3848,62,2,\"[3847, 3848]\"\r\nNCP-4.zip,2,NCP,160,1465,61,2,\"[1464, 1465]\"\r\nCP-14.zip,1,CP,1543,4201,57,3,\"[4200, 4201, 4202]\"\r\nCP-23.zip,1,CP,652,3014,277,1,[3014]\r\nCP-16.zip,1,CP,1607,4295,17,1,[4295]\r\nNormal-18.zip,0,Normal,2213,668,84,1,[668]\r\nNormal-16.zip,0,Normal,2121,576,87,1,[576]\r\nNormal-23.zip,0,Normal,2627,137,41,1,[137]\r\nNCP-21.zip,2,NCP,582,2322,54,2,\"[2321, 2322]\"\r\nCP-19.zip,1,CP,2431,2893,361,1,[2893]\r\nNormal-1.zip,0,Normal,1717,989,67,2,\"[989, 990]\"\r\nCP-10.zip,1,CP,1385,3825,64,2,\"[3825, 3826]\"\r\nCP-5.zip,1,CP,1198,3416,162,1,[3416]\r\nNCP-21.zip,2,NCP,578,2314,55,2,\"[2313, 2314]\"\r\nNCP-20.zip,2,NCP,56,1246,68,2,\"[1245, 1246]\"\r\nNCP-19.zip,2,NCP,532,2222,139,2,\"[2222, 2223]\"\r\nNormal-21.zip,0,Normal,2283,738,87,1,[738]\r\nNormal-19.zip,0,Normal,2222,677,78,1,[677]\r\nCP-9.zip,1,CP,1361,3770,50,2,\"[3770, 3771]\"\r\nNCP-15.zip,2,NCP,420,1993,177,2,\"[1993, 1994]\"\r\nCP-18.zip,1,CP,1776,3538,76,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNormal-1.zip,0,Normal,1706,968,64,2,\"[967, 968]\"\r\nCP-20.zip,1,CP,2668,3253,51,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-5.zip,2,NCP,171,1486,143,2,\"[1486, 1487]\"\r\nNormal-3.zip,0,Normal,750,185,281,1,[185]\r\nCP-18.zip,1,CP,1780,3565,80,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-13.zip,2,NCP,362,1876,63,2,\"[1875, 1876]\"\r\nCP-6.zip,1,CP,1234,3452,191,1,[3452]\r\nNormal-1.zip,0,Normal,1684,873,133,5,\"[870, 871, 873, 874, 875]\"\r\nNormal-6.zip,0,Normal,1812,267,99,1,[267]\r\nNCP-17.zip,2,NCP,474,2103,114,2,\"[2103, 2104]\"\r\nNormal-7.zip,0,Normal,1857,312,80,1,[312]\r\nNormal-12.zip,0,Normal,1992,447,104,1,[447]\r\nCP-18.zip,1,CP,1664,4352,20,1,[4352]\r\nNormal-27.zip,0,Normal,3895,5420,71,4,\"[5418, 5419, 5420, 5421]\"\r\nNCP-19.zip,2,NCP,517,2190,139,2,\"[2190, 2191]\"\r\nNormal-23.zip,0,Normal,2625,135,39,1,[135]\r\nNormal-5.zip,0,Normal,811,246,124,1,[246]\r\nCP-4.zip,1,CP,1162,3380,212,1,[3380]\r\nCP-22.zip,1,CP,611,2973,76,1,[2973]\r\nCP-9.zip,1,CP,1381,3815,261,3,\"[3815, 3816, 3817]\"\r\nCP-9.zip,1,CP,1371,3794,200,3,\"[3794, 3795, 3796]\"\r\nNCP-16.zip,2,NCP,432,2017,128,2,\"[2017, 2018]\"\r\nNormal-20.zip,0,Normal,2278,733,90,1,[733]\r\nNormal-19.zip,0,Normal,2240,695,78,1,[695]\r\nCP-28.zip,1,CP,3786,5730,29,1,[5730]\r\nNormal-15.zip,0,Normal,2097,552,89,1,[552]\r\nNCP-18.zip,2,NCP,500,2156,162,2,\"[2156, 2157]\"\r\nCP-9.zip,1,CP,1374,3802,50,2,\"[3802, 3803]\"\r\nNormal-23.zip,0,Normal,2606,116,33,1,[116]\r\nCP-26.zip,1,CP,3651,5550,395,1,[5550]\r\nNormal-9.zip,0,Normal,1912,367,92,1,[367]\r\nNCP-25.zip,2,NCP,3953,5466,44,1,[5466]\r\nCP-25.zip,1,CP,724,3086,100,1,[3086]\r\nNormal-21.zip,0,Normal,2292,747,82,1,[747]\r\nCP-7.zip,1,CP,1262,3480,384,1,[3480]\r\nNormal-10.zip,0,Normal,1931,386,80,1,[386]\r\nNCP-20.zip,2,NCP,563,2284,141,2,\"[2284, 2285]\"\r\nCP-2.zip,1,CP,1123,3341,213,1,[3341]\r\nNCP-17.zip,2,NCP,486,2127,153,2,\"[2127, 2128]\"\r\nCP-26.zip,1,CP,3733,5673,32,3,\"[5673, 5674, 5675]\"\r\nCP-3.zip,1,CP,1152,3370,69,1,[3370]\r\nNCP-28.zip,2,NCP,838,2353,89,1,[2353]\r\nNormal-1.zip,0,Normal,1717,990,67,2,\"[989, 990]\"\r\nNCP-30.zip,2,NCP,997,2553,54,2,\"[2553, 2554]\"\r\nNCP-17.zip,2,NCP,48,1230,61,2,\"[1229, 1230]\"\r\nNCP-17.zip,2,NCP,467,2089,138,2,\"[2089, 2090]\"\r\nNCP-20.zip,2,NCP,564,2286,143,2,\"[2286, 2287]\"\r\nNormal-7.zip,0,Normal,1854,309,82,1,[309]\r\nNormal-2.zip,0,Normal,1747,1065,60,1,[1065]\r\nNCP-19.zip,2,NCP,535,2228,47,2,\"[2227, 2228]\"\r\nNCP-26.zip,2,NCP,3974,5508,52,1,[5508]\r\nNormal-7.zip,0,Normal,1829,284,92,1,[284]\r\nNormal-1.zip,0,Normal,1673,808,57,6,\"[804, 805, 806, 807, 808, 809]\"\r\nNCP-2.zip,2,NCP,1271,2712,56,1,[2712]\r\nCP-30.zip,1,CP,3934,5638,59,3,\"[5638, 5639, 5640]\"\r\nNCP-26.zip,2,NCP,3979,5486,52,1,[5486]\r\nNCP-20.zip,2,NCP,554,2265,128,2,\"[2265, 2266]\"\r\nNCP-6.zip,2,NCP,221,1587,53,2,\"[1586, 1587]\"\r\nNCP-20.zip,2,NCP,558,2273,119,2,\"[2273, 2274]\"\r\nCP-8.zip,1,CP,1321,3678,58,2,\"[3678, 3679]\"\r\nNCP-6.zip,2,NCP,226,1597,60,2,\"[1596, 1597]\"\r\nNCP-21.zip,2,NCP,76,1286,51,2,\"[1285, 1286]\"\r\nNCP-1.zip,2,NCP,1042,2613,143,2,\"[2613, 2614]\"\r\nNCP-13.zip,2,NCP,366,1884,67,2,\"[1883, 1884]\"\r\nNCP-18.zip,2,NCP,490,2136,147,2,\"[2136, 2137]\"\r\nNCP-28.zip,2,NCP,856,2376,227,2,\"[2376, 2377]\"\r\nCP-19.zip,1,CP,2445,2920,283,2,\"[2920, 2921]\"\r\nNormal-1.zip,0,Normal,1673,806,59,6,\"[804, 805, 806, 807, 808, 809]\"\r\nCP-25.zip,1,CP,9,3151,72,4,\"[3148, 3149, 3150, 3151]\"\r\nNormal-25.zip,0,Normal,3847,5359,219,1,[5359]\r\nNormal-12.zip,0,Normal,2005,460,77,1,[460]\r\nCP-30.zip,1,CP,3936,5642,59,1,[5642]\r\nNCP-12.zip,2,NCP,326,1800,117,2,\"[1800, 1801]\"\r\nNormal-13.zip,0,Normal,2045,500,85,1,[500]\r\nCP-15.zip,1,CP,1583,4271,18,1,[4271]\r\nNormal-20.zip,0,Normal,2261,716,83,1,[716]\r\nNormal-20.zip,0,Normal,2276,731,91,1,[731]\r\nCP-18.zip,1,CP,1776,3536,75,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-27.zip,2,NCP,1034,2605,19,1,[2605]\r\nNCP-16.zip,2,NCP,445,2044,139,2,\"[2044, 2045]\"\r\nCP-12.zip,1,CP,1461,4001,53,2,\"[4000, 4001]\"\r\nCP-12.zip,1,CP,1485,4056,114,3,\"[4056, 4057, 4058]\"\r\nNCP-7.zip,2,NCP,231,1606,139,2,\"[1606, 1607]\"\r\nNCP-13.zip,2,NCP,343,1838,55,2,\"[1837, 1838]\"\r\nNCP-6.zip,2,NCP,202,1548,161,2,\"[1548, 1549]\"\r\nNormal-17.zip,0,Normal,2160,615,96,1,[615]\r\nCP-28.zip,1,CP,3780,5724,27,1,[5724]\r\nCP-9.zip,1,CP,1354,3753,46,3,\"[3751, 3752, 3753]\"\r\nCP-16.zip,1,CP,1598,4286,23,1,[4286]\r\nCP-19.zip,1,CP,2445,2921,119,2,\"[2920, 2921]\"\r\nCP-9.zip,1,CP,1361,3771,50,2,\"[3770, 3771]\"\r\nNCP-15.zip,2,NCP,412,1974,54,2,\"[1973, 1974]\"\r\nNormal-8.zip,0,Normal,1861,316,76,1,[316]\r\nNormal-3.zip,0,Normal,1766,1150,57,3,\"[1149, 1150, 1151]\"\r\nNormal-17.zip,0,Normal,2182,637,96,1,[637]\r\nNormal-7.zip,0,Normal,1833,288,102,1,[288]\r\nNormal-9.zip,0,Normal,1894,349,99,1,[349]\r\nNormal-22.zip,0,Normal,2319,774,101,1,[774]\r\nNormal-1.zip,0,Normal,1680,844,64,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-24.zip,1,CP,679,3041,94,1,[3041]\r\nCP-30.zip,1,CP,3832,5776,23,1,[5776]\r\nCP-25.zip,1,CP,720,3082,84,1,[3082]\r\nNormal-19.zip,0,Normal,2235,690,89,1,[690]\r\nCP-11.zip,1,CP,1429,3927,52,2,\"[3926, 3927]\"\r\nNormal-7.zip,0,Normal,1835,290,83,1,[290]\r\nNCP-7.zip,2,NCP,239,1624,61,2,\"[1623, 1624]\"\r\nNormal-27.zip,0,Normal,3899,5430,76,2,\"[5429, 5430]\"\r\nCP-4.zip,1,CP,1165,3383,151,1,[3383]\r\nNCP-3.zip,2,NCP,1297,2738,56,1,[2738]\r\nNCP-22.zip,2,NCP,832,2345,25,1,[2345]\r\nNCP-25.zip,2,NCP,3952,5505,46,1,[5505]\r\nNCP-26.zip,2,NCP,3977,5509,56,1,[5509]\r\nCP-16.zip,1,CP,1609,4297,20,1,[4297]\r\nNormal-21.zip,0,Normal,2294,749,103,1,[749]\r\nNCP-25.zip,2,NCP,3967,5507,46,1,[5507]\r\nCP-13.zip,1,CP,1495,4089,48,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-7.zip,1,CP,1317,3672,58,3,\"[3670, 3671, 3672]\"\r\nNormal-26.zip,0,Normal,3877,5389,25,1,[5389]\r\nCP-20.zip,1,CP,2766,3297,41,1,[3297]\r\nCP-18.zip,1,CP,1661,4349,32,1,[4349]\r\nNCP-19.zip,2,NCP,535,2227,112,2,\"[2227, 2228]\"\r\nCP-2.zip,1,CP,1120,3338,159,1,[3338]\r\nNCP-2.zip,2,NCP,118,1377,142,2,\"[1377, 1378]\"\r\nNormal-7.zip,0,Normal,1843,298,96,1,[298]\r\nNCP-15.zip,2,NCP,400,1950,155,1,[1950]\r\nNCP-25.zip,2,NCP,3704,5531,60,1,[5531]\r\nNormal-15.zip,0,Normal,2095,550,99,1,[550]\r\nNormal-1.zip,0,Normal,1684,870,68,5,\"[870, 871, 873, 874, 875]\"\r\nNCP-16.zip,2,NCP,44,1222,52,2,\"[1221, 1222]\"\r\nNCP-11.zip,2,NCP,31,1194,137,2,\"[1194, 1195]\"\r\nNCP-15.zip,2,NCP,409,1968,64,2,\"[1967, 1968]\"\r\nNCP-16.zip,2,NCP,451,2057,48,3,\"[2056, 2057, 2058]\"\r\nNormal-2.zip,0,Normal,1753,1086,77,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNCP-8.zip,2,NCP,262,1670,139,2,\"[1670, 1671]\"\r\nNormal-10.zip,0,Normal,1955,410,93,1,[410]\r\nNormal-6.zip,0,Normal,1826,281,104,1,[281]\r\nNCP-28.zip,2,NCP,852,2371,47,2,\"[2371, 2372]\"\r\nNCP-27.zip,2,NCP,1000,2558,39,1,[2558]\r\nCP-1.zip,1,CP,1072,3115,52,1,[3115]\r\nNormal-13.zip,0,Normal,2052,507,71,1,[507]\r\nCP-7.zip,1,CP,1314,3663,30,2,\"[3663, 3664]\"\r\nNCP-21.zip,2,NCP,67,1267,70,2,\"[1266, 1267]\"\r\nNCP-3.zip,2,NCP,132,1409,117,1,[1409]\r\nNormal-18.zip,0,Normal,2205,660,91,1,[660]\r\nNormal-14.zip,0,Normal,2054,509,88,1,[509]\r\nNormal-5.zip,0,Normal,809,244,114,1,[244]\r\nNCP-27.zip,2,NCP,1029,2599,39,1,[2599]\r\nNCP-26.zip,2,NCP,3972,5481,58,1,[5481]\r\nNormal-13.zip,0,Normal,2026,481,85,1,[481]\r\nNCP-17.zip,2,NCP,47,1227,139,2,\"[1227, 1228]\"\r\nCP-27.zip,1,CP,3763,5707,20,1,[5707]\r\nNormal-6.zip,0,Normal,1798,253,93,1,[253]\r\nNCP-9.zip,2,NCP,2703,2669,41,1,[2669]\r\nCP-1.zip,1,CP,1071,3113,57,2,\"[3113, 3114]\"\r\nNCP-16.zip,2,NCP,430,2014,64,2,\"[2013, 2014]\"\r\nNCP-4.zip,2,NCP,144,1432,139,2,\"[1432, 1433]\"\r\nNormal-4.zip,0,Normal,780,215,116,1,[215]\r\nNormal-12.zip,0,Normal,2020,475,88,1,[475]\r\nNCP-13.zip,2,NCP,366,1883,161,2,\"[1883, 1884]\"\r\nNormal-2.zip,0,Normal,1761,1127,18,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-29.zip,2,NCP,899,2441,42,2,\"[2440, 2441]\"\r\nCP-16.zip,1,CP,1612,4300,26,1,[4300]\r\nNCP-15.zip,2,NCP,412,1973,129,2,\"[1973, 1974]\"\r\nNCP-10.zip,2,NCP,2717,2710,42,1,[2710]\r\nCP-19.zip,1,CP,1792,3214,71,2,\"[3214, 3215]\"\r\nNormal-20.zip,0,Normal,2269,724,113,1,[724]\r\nCP-11.zip,1,CP,1451,3976,51,2,\"[3975, 3976]\"\r\nNormal-11.zip,0,Normal,1978,433,94,1,[433]\r\nNCP-3.zip,2,NCP,1282,2723,70,1,[2723]\r\nCP-23.zip,1,CP,654,3016,74,1,[3016]\r\nNCP-13.zip,2,NCP,345,1842,62,2,\"[1841, 1842]\"\r\nCP-22.zip,1,CP,610,2972,70,1,[2972]\r\nCP-29.zip,1,CP,3799,5743,23,1,[5743]\r\nNCP-18.zip,2,NCP,506,2168,124,2,\"[2168, 2169]\"\r\nNormal-19.zip,0,Normal,2218,673,84,1,[673]\r\nNCP-7.zip,2,NCP,243,1632,31,3,\"[1631, 1632, 1633]\"\r\nNCP-25.zip,2,NCP,3948,5504,50,1,[5504]\r\nCP-7.zip,1,CP,1312,3658,65,2,\"[3658, 3659]\"\r\nNCP-16.zip,2,NCP,451,2058,23,3,\"[2056, 2057, 2058]\"\r\nCP-12.zip,1,CP,1461,4000,53,2,\"[4000, 4001]\"\r\nCP-1.zip,1,CP,10,3154,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-10.zip,1,CP,1388,3831,51,2,\"[3831, 3832]\"\r\nNormal-1.zip,0,Normal,1702,957,69,2,\"[957, 958]\"\r\nNormal-17.zip,0,Normal,2181,636,100,1,[636]\r\nNCP-19.zip,2,NCP,521,2198,139,2,\"[2198, 2199]\"\r\nNormal-9.zip,0,Normal,1922,377,87,1,[377]\r\nNormal-8.zip,0,Normal,1872,327,86,1,[327]\r\nCP-9.zip,1,CP,1369,3791,67,2,\"[3790, 3791]\"\r\nCP-29.zip,1,CP,3815,5759,23,1,[5759]\r\nNCP-2.zip,2,NCP,118,1378,60,2,\"[1377, 1378]\"\r\nCP-19.zip,1,CP,1793,3216,69,1,[3216]\r\nNCP-5.zip,2,NCP,178,1501,52,2,\"[1500, 1501]\"\r\nCP-13.zip,1,CP,1495,4087,50,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-18.zip,1,CP,1780,3566,41,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-8.zip,1,CP,1323,3682,62,2,\"[3682, 3683]\"\r\nCP-20.zip,1,CP,2754,3285,30,1,[3285]\r\nNormal-26.zip,0,Normal,3865,5377,24,1,[5377]\r\nNormal-23.zip,0,Normal,2614,124,37,1,[124]\r\nCP-12.zip,1,CP,1465,4009,67,2,\"[4009, 4010]\"\r\nCP-14.zip,1,CP,1537,4183,53,3,\"[4182, 4183, 4184]\"\r\nNormal-1.zip,0,Normal,1719,993,76,2,\"[993, 994]\"\r\nNCP-3.zip,2,NCP,128,1401,122,2,\"[1401, 1402]\"\r\nCP-28.zip,1,CP,3778,5722,25,1,[5722]\r\nNCP-1.zip,2,NCP,1018,2584,252,1,[2584]\r\nNCP-9.zip,2,NCP,27,1187,33,2,\"[1186, 1187]\"\r\nCP-13.zip,1,CP,1494,4084,65,3,\"[4083, 4084, 4085]\"\r\nNCP-13.zip,2,NCP,344,1839,152,2,\"[1839, 1840]\"\r\nCP-21.zip,1,CP,604,2966,134,1,[2966]\r\nNCP-1.zip,2,NCP,1037,2608,32,1,[2608]\r\nCP-12.zip,1,CP,1485,4057,49,3,\"[4056, 4057, 4058]\"\r\nNCP-16.zip,2,NCP,45,1223,152,2,\"[1223, 1224]\"\r\nNormal-14.zip,0,Normal,2058,513,95,1,[513]\r\nNCP-12.zip,2,NCP,323,1795,49,2,\"[1794, 1795]\"\r\nNCP-26.zip,2,NCP,3999,5496,52,1,[5496]\r\nNormal-15.zip,0,Normal,2107,562,92,1,[562]\r\nCP-12.zip,1,CP,1478,4038,53,2,\"[4037, 4038]\"\r\nNormal-15.zip,0,Normal,2099,554,85,1,[554]\r\nNCP-21.zip,2,NCP,64,1261,132,2,\"[1261, 1262]\"\r\nCP-9.zip,1,CP,1384,3824,66,2,\"[3823, 3824]\"\r\nNCP-18.zip,2,NCP,511,2178,132,2,\"[2178, 2179]\"\r\nCP-6.zip,1,CP,1227,3445,307,1,[3445]\r\nNormal-23.zip,0,Normal,2633,143,40,1,[143]\r\nNCP-10.zip,2,NCP,2722,2678,53,1,[2678]\r\nNCP-15.zip,2,NCP,427,2008,56,2,\"[2007, 2008]\"\r\nNCP-23.zip,2,NCP,94,1324,153,2,\"[1324, 1325]\"\r\nCP-19.zip,1,CP,2446,2922,690,1,[2922]\r\nCP-26.zip,1,CP,3728,5664,229,1,[5664]\r\nCP-20.zip,1,CP,2668,3249,45,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-27.zip,0,Normal,3899,5429,75,2,\"[5429, 5430]\"\r\nNormal-9.zip,0,Normal,1902,357,93,1,[357]\r\nNCP-9.zip,2,NCP,27,1186,75,2,\"[1186, 1187]\"\r\nNCP-18.zip,2,NCP,508,2172,145,2,\"[2172, 2173]\"\r\nNormal-8.zip,0,Normal,1862,317,91,1,[317]\r\nNCP-3.zip,2,NCP,128,1402,52,2,\"[1401, 1402]\"\r\nNCP-8.zip,2,NCP,257,1660,152,2,\"[1660, 1661]\"\r\nNCP-30.zip,2,NCP,973,2516,57,1,[2516]\r\nCP-9.zip,1,CP,1357,3759,61,3,\"[3758, 3759, 3760]\"\r\nNormal-26.zip,0,Normal,3864,5376,178,1,[5376]\r\nCP-25.zip,1,CP,727,3089,104,1,[3089]\r\nNCP-8.zip,2,NCP,259,1664,155,2,\"[1664, 1665]\"\r\nCP-10.zip,1,CP,1390,3838,56,3,\"[3836, 3837, 3838]\"\r\nNormal-21.zip,0,Normal,2295,750,79,1,[750]\r\nNCP-18.zip,2,NCP,49,1231,146,2,\"[1231, 1232]\"\r\nCP-10.zip,1,CP,1391,3840,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNCP-17.zip,2,NCP,48,1229,145,2,\"[1229, 1230]\"\r\nNCP-21.zip,2,NCP,73,1278,130,3,\"[1278, 1279, 1280]\"\r\nNCP-11.zip,2,NCP,296,1738,58,2,\"[1737, 1738]\"\r\nNCP-3.zip,2,NCP,129,1404,56,2,\"[1403, 1404]\"\r\nNCP-12.zip,2,NCP,330,1808,153,2,\"[1808, 1809]\"\r\nCP-14.zip,1,CP,1529,4165,100,3,\"[4165, 4166, 4167]\"\r\nCP-4.zip,1,CP,1187,3405,325,1,[3405]\r\nNCP-11.zip,2,NCP,307,1761,136,2,\"[1761, 1762]\"\r\nCP-26.zip,1,CP,3725,5661,258,2,\"[5660, 5661]\"\r\nNormal-10.zip,0,Normal,1950,405,102,1,[405]\r\nCP-15.zip,1,CP,1563,4247,61,3,\"[4245, 4246, 4247]\"\r\nNCP-4.zip,2,NCP,144,1433,58,2,\"[1432, 1433]\"\r\nNCP-28.zip,2,NCP,855,2375,39,1,[2375]\r\nNormal-1.zip,0,Normal,1726,1008,69,2,\"[1007, 1008]\"\r\nCP-22.zip,1,CP,629,2991,304,1,[2991]\r\nNCP-4.zip,2,NCP,142,1428,141,2,\"[1428, 1429]\"\r\nCP-21.zip,1,CP,592,2954,104,1,[2954]\r\nCP-1.zip,1,CP,10,3159,293,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-9.zip,1,CP,1357,3760,61,3,\"[3758, 3759, 3760]\"\r\nNormal-24.zip,0,Normal,2648,158,32,1,[158]\r\nNCP-9.zip,2,NCP,269,1684,153,2,\"[1684, 1685]\"\r\nNormal-15.zip,0,Normal,2108,563,101,1,[563]\r\nCP-25.zip,1,CP,9,3148,290,4,\"[3148, 3149, 3150, 3151]\"\r\nNCP-13.zip,2,NCP,364,1879,132,2,\"[1879, 1880]\"\r\nNormal-23.zip,0,Normal,2605,115,35,1,[115]\r\nNCP-10.zip,2,NCP,282,1711,51,2,\"[1710, 1711]\"\r\nCP-14.zip,1,CP,1546,4209,58,2,\"[4208, 4209]\"\r\nNCP-29.zip,2,NCP,925,2467,22,1,[2467]\r\nNormal-21.zip,0,Normal,2296,751,102,1,[751]\r\nCP-2.zip,1,CP,1114,3332,361,1,[3332]\r\nNCP-5.zip,2,NCP,19,1171,61,2,\"[1170, 1171]\"\r\nNCP-13.zip,2,NCP,363,1877,139,2,\"[1877, 1878]\"\r\nCP-12.zip,1,CP,1475,4031,50,2,\"[4031, 4032]\"\r\nNCP-14.zip,2,NCP,399,1949,62,2,\"[1948, 1949]\"\r\nCP-17.zip,1,CP,1626,4314,26,1,[4314]\r\nCP-18.zip,1,CP,1780,3556,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNormal-19.zip,0,Normal,2236,691,83,1,[691]\r\nCP-15.zip,1,CP,1572,4260,19,1,[4260]\r\nCP-6.zip,1,CP,1240,3458,137,1,[3458]\r\nNCP-21.zip,2,NCP,76,1285,121,2,\"[1285, 1286]\"\r\nCP-22.zip,1,CP,623,2985,463,1,[2985]\r\nCP-27.zip,1,CP,3760,5704,23,1,[5704]\r\nCP-23.zip,1,CP,672,3034,86,1,[3034]\r\nNCP-1.zip,2,NCP,1026,2596,21,1,[2596]\r\nCP-22.zip,1,CP,635,2997,106,1,[2997]\r\nNCP-14.zip,2,NCP,375,1901,115,3,\"[1901, 1902, 1903]\"\r\nNCP-11.zip,2,NCP,304,1754,161,2,\"[1754, 1755]\"\r\nNCP-15.zip,2,NCP,408,1965,131,2,\"[1965, 1966]\"\r\nNCP-9.zip,2,NCP,2702,2668,41,1,[2668]\r\nCP-11.zip,1,CP,1452,3978,56,2,\"[3977, 3978]\"\r\nNCP-29.zip,2,NCP,891,2430,22,1,[2430]\r\nNCP-16.zip,2,NCP,458,2070,131,2,\"[2070, 2071]\"\r\nNormal-2.zip,0,Normal,1753,1092,60,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNormal-1.zip,0,Normal,1702,958,69,2,\"[957, 958]\"\r\nNormal-2.zip,0,Normal,1761,1126,45,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nCP-12.zip,1,CP,1487,4063,68,3,\"[4061, 4062, 4063]\"\r\nNCP-25.zip,2,NCP,3958,5471,38,1,[5471]\r\nCP-15.zip,1,CP,1556,4231,40,2,\"[4230, 4231]\"\r\nNCP-16.zip,2,NCP,431,2015,160,2,\"[2015, 2016]\"\r\nNormal-2.zip,0,Normal,1745,1060,298,3,\"[1060, 1061, 1062]\"\r\nNCP-23.zip,2,NCP,906,2448,55,1,[2448]\r\nCP-2.zip,1,CP,11,3163,265,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-17.zip,2,NCP,487,2130,70,2,\"[2129, 2130]\"\r\nCP-16.zip,1,CP,1600,4288,19,1,[4288]\r\nNCP-21.zip,2,NCP,580,2317,139,2,\"[2317, 2318]\"\r\nNormal-1.zip,0,Normal,1673,805,59,6,\"[804, 805, 806, 807, 808, 809]\"\r\nCP-29.zip,1,CP,3801,5745,26,1,[5745]\r\nNormal-1.zip,0,Normal,1726,1007,69,2,\"[1007, 1008]\"\r\nNCP-29.zip,2,NCP,893,2432,25,2,\"[2432, 2433]\"\r\nCP-3.zip,1,CP,1143,3361,177,1,[3361]\r\nCP-8.zip,1,CP,1343,3726,56,2,\"[3726, 3727]\"\r\nNCP-2.zip,2,NCP,115,1371,118,2,\"[1371, 1372]\"\r\nNCP-11.zip,2,NCP,31,1195,57,2,\"[1194, 1195]\"\r\nCP-1.zip,1,CP,1071,3114,57,2,\"[3113, 3114]\"\r\nNCP-23.zip,2,NCP,951,2494,38,1,[2494]\r\nNormal-1.zip,0,Normal,1706,967,64,2,\"[967, 968]\"\r\nNCP-8.zip,2,NCP,262,1671,58,2,\"[1670, 1671]\"\r\nNormal-10.zip,0,Normal,1943,398,94,1,[398]\r\nNCP-8.zip,2,NCP,257,1661,64,2,\"[1660, 1661]\"\r\nNormal-24.zip,0,Normal,2644,154,39,1,[154]\r\nNCP-15.zip,2,NCP,407,1964,52,2,\"[1963, 1964]\"\r\nNormal-26.zip,0,Normal,3883,5395,61,1,[5395]\r\nNCP-9.zip,2,NCP,2685,2698,52,1,[2698]\r\nNCP-30.zip,2,NCP,992,2545,213,1,[2545]\r\nCP-21.zip,1,CP,596,2958,255,1,[2958]\r\nCP-7.zip,1,CP,1314,3664,30,2,\"[3663, 3664]\"\r\nNCP-16.zip,2,NCP,432,2018,54,2,\"[2017, 2018]\"\r\nNCP-14.zip,2,NCP,371,1894,59,2,\"[1893, 1894]\"\r\nNCP-7.zip,2,NCP,2482,2685,45,1,[2685]\r\nNormal-1.zip,0,Normal,1679,834,66,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-29.zip,1,CP,3824,5768,23,1,[5768]\r\nNormal-2.zip,0,Normal,1753,1089,66,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nNormal-7.zip,0,Normal,1859,314,85,1,[314]\r\nNCP-21.zip,2,NCP,578,2313,130,2,\"[2313, 2314]\"\r\nCP-10.zip,1,CP,1402,3866,55,3,\"[3865, 3866, 3867]\"\r\nNormal-4.zip,0,Normal,791,226,138,1,[226]\r\nNormal-13.zip,0,Normal,2039,494,101,1,[494]\r\nNormal-15.zip,0,Normal,2115,570,94,1,[570]\r\nCP-12.zip,1,CP,1470,4021,54,2,\"[4020, 4021]\"\r\nCP-24.zip,1,CP,695,3057,201,1,[3057]\r\nNormal-12.zip,0,Normal,1994,449,95,1,[449]\r\nNormal-5.zip,0,Normal,804,239,325,1,[239]\r\nCP-17.zip,1,CP,1623,4311,23,1,[4311]\r\nNormal-18.zip,0,Normal,2208,663,95,1,[663]\r\nNCP-19.zip,2,NCP,526,2209,58,2,\"[2208, 2209]\"\r\nNCP-16.zip,2,NCP,45,1224,64,2,\"[1223, 1224]\"\r\nNormal-1.zip,0,Normal,1679,838,70,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-2.zip,1,CP,11,3161,244,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNormal-19.zip,0,Normal,2239,694,89,1,[694]\r\nNCP-7.zip,2,NCP,243,1631,145,3,\"[1631, 1632, 1633]\"\r\nNCP-7.zip,2,NCP,243,1633,61,3,\"[1631, 1632, 1633]\"\r\nCP-18.zip,1,CP,1780,3561,63,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-11.zip,1,CP,1429,3926,52,2,\"[3926, 3927]\"\r\nNCP-7.zip,2,NCP,237,1619,146,2,\"[1619, 1620]\"\r\nCP-7.zip,1,CP,1319,3674,61,2,\"[3674, 3675]\"\r\nNCP-28.zip,2,NCP,829,2342,36,1,[2342]\r\nNormal-18.zip,0,Normal,2186,641,84,1,[641]\r\nNormal-16.zip,0,Normal,2127,582,84,1,[582]\r\nCP-5.zip,1,CP,1197,3415,191,1,[3415]\r\nCP-10.zip,1,CP,1414,3893,63,3,\"[3891, 3892, 3893]\"\r\nNCP-14.zip,2,NCP,384,1920,127,2,\"[1920, 1921]\"\r\nCP-7.zip,1,CP,1317,3671,116,3,\"[3670, 3671, 3672]\"\r\nNCP-22.zip,2,NCP,81,1295,125,2,\"[1295, 1296]\"\r\nCP-3.zip,1,CP,1156,3374,173,1,[3374]\r\nNormal-2.zip,0,Normal,1761,1129,60,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-8.zip,2,NCP,252,1651,58,2,\"[1650, 1651]\"\r\nNCP-25.zip,2,NCP,3959,5472,44,1,[5472]\r\nNormal-11.zip,0,Normal,1988,443,90,1,[443]\r\nCP-30.zip,1,CP,3833,5777,23,1,[5777]\r\nNCP-26.zip,2,NCP,3985,5491,50,1,[5491]\r\nCP-20.zip,1,CP,2668,3255,28,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNormal-14.zip,0,Normal,2077,532,92,1,[532]\r\nNormal-14.zip,0,Normal,2059,514,95,1,[514]\r\nCP-29.zip,1,CP,3829,5773,26,1,[5773]\r\nNCP-15.zip,2,NCP,402,1954,62,2,\"[1953, 1954]\"\r\nCP-29.zip,1,CP,3800,5744,29,1,[5744]\r\nCP-9.zip,1,CP,1383,3821,71,2,\"[3821, 3822]\"\r\nNCP-6.zip,2,NCP,225,1594,135,2,\"[1594, 1595]\"\r\nCP-27.zip,1,CP,3759,5703,23,1,[5703]\r\nCP-4.zip,1,CP,1190,3408,173,1,[3408]\r\nNCP-29.zip,2,NCP,889,2427,38,2,\"[2427, 2428]\"\r\nNCP-14.zip,2,NCP,375,1902,40,3,\"[1901, 1902, 1903]\"\r\nNormal-19.zip,0,Normal,2238,693,91,1,[693]\r\nNCP-2.zip,2,NCP,1273,2714,56,1,[2714]\r\nNCP-18.zip,2,NCP,497,2151,53,2,\"[2150, 2151]\"\r\nCP-25.zip,1,CP,715,3077,609,1,[3077]\r\nCP-7.zip,1,CP,1264,3482,126,1,[3482]\r\nCP-1.zip,1,CP,10,3157,46,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNormal-20.zip,0,Normal,2266,721,94,1,[721]\r\nCP-11.zip,1,CP,1433,3935,62,2,\"[3934, 3935]\"\r\nNCP-18.zip,2,NCP,511,2179,56,2,\"[2178, 2179]\"\r\nCP-3.zip,1,CP,1138,3356,158,1,[3356]\r\nNormal-20.zip,0,Normal,2249,704,66,1,[704]\r\nNormal-6.zip,0,Normal,1809,264,94,1,[264]\r\nCP-14.zip,1,CP,1547,4210,142,3,\"[4210, 4211, 4212]\"\r\nCP-21.zip,1,CP,586,2948,174,1,[2948]\r\nCP-23.zip,1,CP,650,3012,102,1,[3012]\r\nCP-14.zip,1,CP,1522,4149,61,2,\"[4148, 4149]\"\r\nNCP-8.zip,2,NCP,250,1646,144,2,\"[1646, 1647]\"\r\nNormal-26.zip,0,Normal,3884,5397,298,2,\"[5396, 5397]\"\r\nCP-28.zip,1,CP,3773,5717,20,1,[5717]\r\nNormal-21.zip,0,Normal,2309,764,88,1,[764]\r\nNCP-12.zip,2,NCP,326,1801,50,2,\"[1800, 1801]\"\r\nNormal-1.zip,0,Normal,1729,1017,74,2,\"[1017, 1018]\"\r\nNormal-1.zip,0,Normal,1684,871,68,5,\"[870, 871, 873, 874, 875]\"\r\nCP-15.zip,1,CP,1567,4254,118,2,\"[4254, 4255]\"\r\nNCP-4.zip,2,NCP,163,1470,154,2,\"[1470, 1471]\"\r\nNormal-1.zip,0,Normal,1705,966,69,2,\"[965, 966]\"\r\nCP-11.zip,1,CP,1446,3966,63,2,\"[3965, 3966]\"\r\nNCP-6.zip,2,NCP,225,1595,57,2,\"[1594, 1595]\"\r\nNCP-11.zip,2,NCP,293,1732,52,2,\"[1731, 1732]\"\r\nNCP-28.zip,2,NCP,839,2354,209,1,[2354]\r\nNCP-18.zip,2,NCP,513,2182,163,2,\"[2182, 2183]\"\r\nNormal-8.zip,0,Normal,1889,344,87,1,[344]\r\nCP-2.zip,1,CP,1112,3330,154,1,[3330]\r\nNormal-26.zip,0,Normal,3874,5386,28,1,[5386]\r\nCP-29.zip,1,CP,3813,5757,21,1,[5757]\r\nCP-7.zip,1,CP,1317,3670,229,3,\"[3670, 3671, 3672]\"\r\nNCP-20.zip,2,NCP,553,2264,58,2,\"[2263, 2264]\"\r\nCP-29.zip,1,CP,3820,5764,31,1,[5764]\r\nNCP-17.zip,2,NCP,482,2120,58,2,\"[2119, 2120]\"\r\nNCP-7.zip,2,NCP,233,1610,86,2,\"[1610, 1612]\"\r\nNCP-18.zip,2,NCP,500,2157,68,2,\"[2156, 2157]\"\r\nNormal-4.zip,0,Normal,799,234,118,1,[234]\r\nNCP-23.zip,2,NCP,94,1325,64,2,\"[1324, 1325]\"\r\nCP-18.zip,1,CP,1780,3563,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-23.zip,2,NCP,902,2444,45,1,[2444]\r\nCP-2.zip,1,CP,11,3162,260,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-3.zip,2,NCP,135,1415,58,2,\"[1414, 1415]\"\r\nCP-8.zip,1,CP,1350,3745,55,1,[3745]\r\nNormal-14.zip,0,Normal,2065,520,81,1,[520]\r\nNCP-5.zip,2,NCP,188,1521,57,2,\"[1520, 1521]\"\r\nNormal-2.zip,0,Normal,1745,1061,60,3,\"[1060, 1061, 1062]\"\r\nNCP-15.zip,2,NCP,424,2002,64,2,\"[2001, 2002]\"\r\nNormal-4.zip,0,Normal,790,225,126,1,[225]\r\nNCP-4.zip,2,NCP,142,1429,59,2,\"[1428, 1429]\"\r\nCP-7.zip,1,CP,1310,3653,51,2,\"[3653, 3654]\"\r\nCP-14.zip,1,CP,1537,4182,53,3,\"[4182, 4183, 4184]\"\r\nCP-17.zip,1,CP,1625,4313,26,1,[4313]\r\nNormal-1.zip,0,Normal,1680,843,64,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNCP-11.zip,2,NCP,311,1769,134,2,\"[1769, 1770]\"\r\nCP-1.zip,1,CP,1075,3118,553,2,\"[3118, 3119]\"\r\nNormal-4.zip,0,Normal,770,205,116,1,[205]\r\nCP-7.zip,1,CP,1311,3655,160,3,\"[3655, 3656, 3657]\"\r\nNormal-1.zip,0,Normal,1724,1005,55,1,[1005]\r\nNCP-20.zip,2,NCP,563,2285,59,2,\"[2284, 2285]\"\r\nNCP-4.zip,2,NCP,163,1471,65,2,\"[1470, 1471]\"\r\nNormal-15.zip,0,Normal,2114,569,101,1,[569]\r\nNormal-12.zip,0,Normal,2016,471,89,1,[471]\r\nCP-23.zip,1,CP,657,3019,343,1,[3019]\r\nNormal-1.zip,0,Normal,1729,1018,74,2,\"[1017, 1018]\"\r\nCP-18.zip,1,CP,1780,3558,73,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-5.zip,2,NCP,183,1511,52,2,\"[1510, 1511]\"\r\nCP-1.zip,1,CP,1074,3117,61,1,[3117]\r\nNormal-8.zip,0,Normal,1870,325,88,1,[325]\r\nCP-6.zip,1,CP,1254,3472,125,1,[3472]\r\nCP-21.zip,1,CP,2775,3306,43,1,[3306]\r\nCP-16.zip,1,CP,1587,4275,20,1,[4275]\r\nNCP-26.zip,2,NCP,3984,5490,54,1,[5490]\r\nCP-27.zip,1,CP,3747,5691,20,1,[5691]\r\nCP-13.zip,1,CP,1495,4088,48,4,\"[4086, 4087, 4088, 4089]\"\r\nCP-9.zip,1,CP,1384,3823,66,2,\"[3823, 3824]\"\r\nNCP-1.zip,2,NCP,100,1338,58,2,\"[1337, 1338]\"\r\nNCP-27.zip,2,NCP,1025,2595,252,1,[2595]\r\nNCP-18.zip,2,NCP,510,2177,43,2,\"[2176, 2177]\"\r\nNCP-11.zip,2,NCP,298,1743,61,2,\"[1742, 1743]\"\r\nNormal-17.zip,0,Normal,2174,629,88,1,[629]\r\nCP-23.zip,1,CP,677,3039,309,1,[3039]\r\nNormal-21.zip,0,Normal,2284,739,80,1,[739]\r\nNormal-18.zip,0,Normal,2193,648,85,1,[648]\r\nNCP-27.zip,2,NCP,1015,2579,39,1,[2579]\r\nNCP-6.zip,2,NCP,214,1572,144,2,\"[1572, 1573]\"\r\nCP-6.zip,1,CP,1248,3466,141,1,[3466]\r\nNormal-27.zip,0,Normal,3901,5433,66,1,[5433]\r\nCP-13.zip,1,CP,1519,4142,68,2,\"[4141, 4142]\"\r\nNCP-14.zip,2,NCP,385,1922,64,1,[1922]\r\nCP-7.zip,1,CP,1311,3657,67,3,\"[3655, 3656, 3657]\"\r\nCP-14.zip,1,CP,1547,4212,58,3,\"[4210, 4211, 4212]\"\r\nCP-4.zip,1,CP,1186,3404,204,1,[3404]\r\nNCP-4.zip,2,NCP,165,1474,131,2,\"[1474, 1475]\"\r\nCP-1.zip,1,CP,10,3160,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-3.zip,1,CP,1157,3375,204,1,[3375]\r\nNCP-11.zip,2,NCP,307,1762,57,2,\"[1761, 1762]\"\r\nCP-11.zip,1,CP,1441,3952,53,3,\"[3951, 3952, 3953]\"\r\nNCP-21.zip,2,NCP,63,1259,139,2,\"[1259, 1260]\"\r\nNormal-6.zip,0,Normal,1806,261,100,1,[261]\r\nCP-17.zip,1,CP,1627,4315,26,1,[4315]\r\nNormal-14.zip,0,Normal,2064,519,91,1,[519]\r\nNCP-5.zip,2,NCP,180,1505,57,2,\"[1504, 1505]\"\r\nNormal-16.zip,0,Normal,2134,589,72,1,[589]\r\nNormal-14.zip,0,Normal,2063,518,99,1,[518]\r\nCP-11.zip,1,CP,1451,3975,51,2,\"[3975, 3976]\"\r\nNormal-24.zip,0,Normal,2647,157,34,1,[157]\r\nNCP-21.zip,2,NCP,66,1265,58,1,[1265]\r\nNormal-25.zip,0,Normal,3843,5355,180,1,[5355]\r\nCP-25.zip,1,CP,729,3091,106,1,[3091]\r\nCP-20.zip,1,CP,2668,3256,53,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-6.zip,2,NCP,200,1544,123,2,\"[1544, 1545]\"\r\nNormal-1.zip,0,Normal,1685,879,65,4,\"[877, 878, 879, 880]\"\r\nNCP-24.zip,2,NCP,972,2515,120,1,[2515]\r\nCP-14.zip,1,CP,1547,4211,58,3,\"[4210, 4211, 4212]\"\r\nCP-18.zip,1,CP,1775,3530,58,4,\"[3530, 3531, 3532, 3533]\"\r\nCP-11.zip,1,CP,1427,3921,43,2,\"[3921, 3922]\"\r\nCP-18.zip,1,CP,1776,3534,64,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-13.zip,2,NCP,368,1888,54,2,\"[1887, 1888]\"\r\nCP-23.zip,1,CP,644,3006,134,1,[3006]\r\nCP-7.zip,1,CP,1312,3659,65,2,\"[3658, 3659]\"\r\nNCP-4.zip,2,NCP,139,1422,132,2,\"[1422, 1423]\"\r\nNCP-15.zip,2,NCP,422,1998,63,2,\"[1997, 1998]\"\r\nCP-10.zip,1,CP,1391,3842,59,4,\"[3839, 3840, 3841, 3842]\"\r\nCP-11.zip,1,CP,1441,3953,53,3,\"[3951, 3952, 3953]\"\r\nNCP-4.zip,2,NCP,154,1452,110,2,\"[1452, 1453]\"\r\nNCP-6.zip,2,NCP,202,1549,67,2,\"[1548, 1549]\"\r\nCP-11.zip,1,CP,1436,3941,45,2,\"[3940, 3941]\"\r\nNCP-16.zip,2,NCP,431,2016,67,2,\"[2015, 2016]\"\r\nNormal-26.zip,0,Normal,3870,5382,30,1,[5382]\r\nNormal-17.zip,0,Normal,2159,614,89,1,[614]\r\nCP-11.zip,1,CP,1427,3922,43,2,\"[3921, 3922]\"\r\nCP-6.zip,1,CP,1228,3446,307,1,[3446]\r\nNCP-15.zip,2,NCP,422,1997,156,2,\"[1997, 1998]\"\r\nNormal-1.zip,0,Normal,1679,836,67,6,\"[833, 834, 835, 836, 837, 838]\"\r\nCP-16.zip,1,CP,1604,4292,22,1,[4292]\r\nCP-4.zip,1,CP,1179,3397,153,1,[3397]\r\nNCP-6.zip,2,NCP,221,1586,125,2,\"[1586, 1587]\"\r\nCP-18.zip,1,CP,1780,3564,41,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-4.zip,2,NCP,139,1423,56,2,\"[1422, 1423]\"\r\nNormal-1.zip,0,Normal,1685,880,65,4,\"[877, 878, 879, 880]\"\r\nCP-18.zip,1,CP,1780,3557,73,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-7.zip,1,CP,1269,3487,172,1,[3487]\r\nNormal-1.zip,0,Normal,1680,841,69,6,\"[839, 840, 841, 842, 843, 844]\"\r\nCP-13.zip,1,CP,1491,4074,113,3,\"[4074, 4075, 4076]\"\r\nNCP-13.zip,2,NCP,344,1840,63,2,\"[1839, 1840]\"\r\nNCP-17.zip,2,NCP,476,2108,53,2,\"[2107, 2108]\"\r\nNormal-12.zip,0,Normal,1997,452,104,1,[452]\r\nNormal-2.zip,0,Normal,1745,1062,60,3,\"[1060, 1061, 1062]\"\r\nNormal-19.zip,0,Normal,2224,679,82,1,[679]\r\nCP-2.zip,1,CP,1101,3319,187,1,[3319]\r\nNormal-26.zip,0,Normal,3873,5385,25,1,[5385]\r\nCP-15.zip,1,CP,1578,4266,22,1,[4266]\r\nNormal-22.zip,0,Normal,2591,101,37,1,[101]\r\nNormal-11.zip,0,Normal,1966,421,90,1,[421]\r\nNCP-17.zip,2,NCP,480,2115,139,2,\"[2115, 2116]\"\r\nCP-19.zip,1,CP,2,3503,34,1,[3503]\r\nCP-16.zip,1,CP,1616,4304,29,1,[4304]\r\nCP-10.zip,1,CP,1410,3883,51,2,\"[3883, 3884]\"\r\nCP-24.zip,1,CP,701,3063,66,1,[3063]\r\nNCP-6.zip,2,NCP,200,1545,52,2,\"[1544, 1545]\"\r\nCP-1.zip,1,CP,10,3155,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-4.zip,2,NCP,160,1464,146,2,\"[1464, 1465]\"\r\nNormal-8.zip,0,Normal,1890,345,99,1,[345]\r\nNCP-9.zip,2,NCP,2694,2660,39,1,[2660]\r\nCP-30.zip,1,CP,3930,5628,62,2,\"[5628, 5629]\"\r\nCP-25.zip,1,CP,9,3149,290,4,\"[3148, 3149, 3150, 3151]\"\r\nNormal-13.zip,0,Normal,2022,477,92,1,[477]\r\nNormal-1.zip,0,Normal,1680,842,69,6,\"[839, 840, 841, 842, 843, 844]\"\r\nNCP-7.zip,2,NCP,229,1603,65,2,\"[1602, 1603]\"\r\nNormal-1.zip,0,Normal,1712,979,70,1,[979]\r\nNormal-12.zip,0,Normal,2002,457,96,1,[457]\r\nCP-6.zip,1,CP,1233,3451,150,1,[3451]\r\nNCP-18.zip,2,NCP,489,2135,58,2,\"[2134, 2135]\"\r\nCP-7.zip,1,CP,1310,3654,51,2,\"[3653, 3654]\"\r\nCP-22.zip,1,CP,636,2998,102,1,[2998]\r\nNCP-21.zip,2,NCP,70,1273,51,2,\"[1272, 1273]\"\r\nNormal-23.zip,0,Normal,2603,113,41,1,[113]\r\nCP-8.zip,1,CP,1323,3683,62,2,\"[3682, 3683]\"\r\nNormal-20.zip,0,Normal,2274,729,85,1,[729]\r\nNCP-29.zip,2,NCP,889,2428,121,2,\"[2427, 2428]\"\r\nNCP-1.zip,2,NCP,1040,2611,113,1,[2611]\r\nNormal-21.zip,0,Normal,2298,753,80,1,[753]\r\nCP-19.zip,1,CP,1792,3215,71,2,\"[3214, 3215]\"\r\nNormal-27.zip,0,Normal,3916,5459,77,1,[5459]\r\nNormal-21.zip,0,Normal,2311,766,91,1,[766]\r\nNCP-13.zip,2,NCP,343,1837,130,2,\"[1837, 1838]\"\r\nNCP-26.zip,2,NCP,3989,5513,45,1,[5513]\r\nCP-13.zip,1,CP,1495,4086,112,4,\"[4086, 4087, 4088, 4089]\"\r\nNormal-5.zip,0,Normal,812,247,126,1,[247]\r\nNormal-15.zip,0,Normal,2098,553,84,1,[553]\r\nNormal-16.zip,0,Normal,2119,574,93,1,[574]\r\nCP-25.zip,1,CP,731,3093,82,1,[3093]\r\nCP-16.zip,1,CP,1597,4285,23,1,[4285]\r\nCP-26.zip,1,CP,3726,5662,232,1,[5662]\r\nCP-4.zip,1,CP,1183,3401,294,1,[3401]\r\nCP-10.zip,1,CP,1391,3839,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNCP-23.zip,2,NCP,901,2443,320,1,[2443]\r\nNormal-11.zip,0,Normal,1957,412,78,1,[412]\r\nNCP-17.zip,2,NCP,474,2104,48,2,\"[2103, 2104]\"\r\nNCP-9.zip,2,NCP,2698,2664,57,1,[2664]\r\nNCP-7.zip,2,NCP,233,1612,45,2,\"[1610, 1612]\"\r\nNCP-9.zip,2,NCP,2686,2699,48,1,[2699]\r\nCP-18.zip,1,CP,1776,3537,75,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nCP-3.zip,1,CP,1158,3376,193,1,[3376]\r\nCP-27.zip,1,CP,3755,5699,23,1,[5699]\r\nCP-13.zip,1,CP,1509,4120,59,3,\"[4118, 4119, 4120]\"\r\nNCP-29.zip,2,NCP,910,2452,76,1,[2452]\r\nCP-2.zip,1,CP,11,3166,274,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-16.zip,2,NCP,433,2020,51,2,\"[2019, 2020]\"\r\nNormal-26.zip,0,Normal,3863,5375,231,1,[5375]\r\nNormal-7.zip,0,Normal,1851,306,102,1,[306]\r\nNCP-23.zip,2,NCP,917,2459,272,1,[2459]\r\nNCP-26.zip,2,NCP,3986,5492,42,1,[5492]\r\nCP-12.zip,1,CP,1478,4037,53,2,\"[4037, 4038]\"\r\nNCP-2.zip,2,NCP,115,1372,50,2,\"[1371, 1372]\"\r\nNCP-13.zip,2,NCP,362,1875,151,2,\"[1875, 1876]\"\r\nNormal-22.zip,0,Normal,2592,102,39,1,[102]\r\nCP-9.zip,1,CP,1357,3758,61,3,\"[3758, 3759, 3760]\"\r\nNormal-6.zip,0,Normal,1825,280,81,1,[280]\r\nNormal-4.zip,0,Normal,775,210,134,1,[210]\r\nNCP-13.zip,2,NCP,365,1881,117,2,\"[1881, 1882]\"\r\nCP-24.zip,1,CP,709,3071,302,1,[3071]\r\nCP-17.zip,1,CP,1630,4318,23,1,[4318]\r\nCP-15.zip,1,CP,1557,4232,43,2,\"[4232, 4233]\"\r\nNCP-23.zip,2,NCP,956,2499,156,1,[2499]\r\nCP-2.zip,1,CP,1106,3324,164,1,[3324]\r\nNormal-9.zip,0,Normal,1895,350,92,1,[350]\r\nCP-21.zip,1,CP,599,2961,68,1,[2961]\r\nNCP-16.zip,2,NCP,448,2051,58,2,\"[2050, 2051]\"\r\nCP-5.zip,1,CP,1206,3424,176,1,[3424]\r\nCP-26.zip,1,CP,3648,5540,170,1,[5540]\r\nCP-1.zip,1,CP,1091,3309,354,1,[3309]\r\nNCP-10.zip,2,NCP,2713,2706,39,1,[2706]\r\nNCP-30.zip,2,NCP,949,2492,42,1,[2492]\r\nNCP-17.zip,2,NCP,480,2116,58,2,\"[2115, 2116]\"\r\nCP-7.zip,1,CP,1306,3643,48,3,\"[3642, 3643, 3644]\"\r\nNormal-7.zip,0,Normal,1840,295,108,1,[295]\r\nCP-18.zip,1,CP,1780,3562,63,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNCP-1.zip,2,NCP,1011,2575,111,2,\"[2574, 2575]\"\r\nNormal-16.zip,0,Normal,2132,587,97,1,[587]\r\nCP-29.zip,1,CP,3814,5758,29,1,[5758]\r\nCP-18.zip,1,CP,1768,3175,175,1,[3175]\r\nNormal-13.zip,0,Normal,2028,483,89,1,[483]\r\nNCP-16.zip,2,NCP,454,2062,139,2,\"[2062, 2063]\"\r\nCP-8.zip,1,CP,1333,3706,52,2,\"[3705, 3706]\"\r\nCP-25.zip,1,CP,737,3099,84,1,[3099]\r\nNCP-9.zip,2,NCP,2683,2653,46,1,[2653]\r\nNormal-11.zip,0,Normal,1958,413,90,1,[413]\r\nNormal-7.zip,0,Normal,1855,310,86,1,[310]\r\nNCP-10.zip,2,NCP,282,1710,120,2,\"[1710, 1711]\"\r\nNCP-8.zip,2,NCP,252,1650,139,2,\"[1650, 1651]\"\r\nNCP-3.zip,2,NCP,133,1411,41,2,\"[1410, 1411]\"\r\nCP-21.zip,1,CP,588,2950,116,1,[2950]\r\nNormal-15.zip,0,Normal,2094,549,78,1,[549]\r\nNCP-20.zip,2,NCP,562,2282,113,2,\"[2282, 2283]\"\r\nNormal-5.zip,0,Normal,806,241,104,1,[241]\r\nCP-3.zip,1,CP,1145,3363,169,1,[3363]\r\nNCP-28.zip,2,NCP,847,2365,53,1,[2365]\r\nNCP-4.zip,2,NCP,143,1431,54,2,\"[1430, 1431]\"\r\nNCP-15.zip,2,NCP,407,1963,124,2,\"[1963, 1964]\"\r\nNormal-6.zip,0,Normal,1817,272,85,1,[272]\r\nCP-32.zip,1,CP,1089,3224,90,1,[3224]\r\nNCP-22.zip,2,NCP,834,2347,194,2,\"[2347, 2348]\"\r\nCP-9.zip,1,CP,1381,3816,66,3,\"[3815, 3816, 3817]\"\r\nNormal-8.zip,0,Normal,1866,321,75,1,[321]\r\nNCP-22.zip,2,NCP,86,1306,50,2,\"[1305, 1306]\"\r\nCP-26.zip,1,CP,3725,5660,251,2,\"[5660, 5661]\"\r\nNCP-18.zip,2,NCP,497,2150,126,2,\"[2150, 2151]\"\r\nNCP-27.zip,2,NCP,1043,2615,45,1,[2615]\r\nCP-4.zip,1,CP,1167,3385,149,1,[3385]\r\nNormal-4.zip,0,Normal,782,217,340,1,[217]\r\nNCP-15.zip,2,NCP,421,1995,161,2,\"[1995, 1996]\"\r\nNormal-9.zip,0,Normal,1897,352,88,1,[352]\r\nNCP-13.zip,2,NCP,365,1882,50,2,\"[1881, 1882]\"\r\nCP-1.zip,1,CP,1067,3106,62,1,[3106]\r\nCP-22.zip,1,CP,642,3004,128,1,[3004]\r\nCP-20.zip,1,CP,2668,3258,52,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nCP-10.zip,1,CP,1406,3875,60,2,\"[3874, 3875]\"\r\nCP-1.zip,1,CP,10,3158,285,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-21.zip,2,NCP,60,1254,59,2,\"[1253, 1254]\"\r\nNormal-26.zip,0,Normal,3884,5396,62,2,\"[5396, 5397]\"\r\nNCP-25.zip,2,NCP,3710,5537,66,1,[5537]\r\nCP-9.zip,1,CP,1371,3795,60,3,\"[3794, 3795, 3796]\"\r\nCP-20.zip,1,CP,2450,2928,92,2,\"[2928, 2929]\"\r\nNCP-4.zip,2,NCP,166,1476,139,2,\"[1476, 1477]\"\r\nNCP-20.zip,2,NCP,554,2266,54,2,\"[2265, 2266]\"\r\nNCP-18.zip,2,NCP,491,2139,62,2,\"[2138, 2139]\"\r\nCP-2.zip,1,CP,1098,3316,171,1,[3316]\r\nCP-12.zip,1,CP,1465,4010,67,2,\"[4009, 4010]\"\r\nNCP-20.zip,2,NCP,548,2254,61,2,\"[2253, 2254]\"\r\nNormal-16.zip,0,Normal,2150,605,88,1,[605]\r\nNCP-16.zip,2,NCP,451,2056,51,3,\"[2056, 2057, 2058]\"\r\nNormal-11.zip,0,Normal,1965,420,88,1,[420]\r\nNCP-1.zip,2,NCP,101,1339,136,2,\"[1339, 1340]\"\r\nNormal-12.zip,0,Normal,2008,463,92,1,[463]\r\nCP-10.zip,1,CP,1402,3867,55,3,\"[3865, 3866, 3867]\"\r\nNCP-2.zip,2,NCP,122,1386,62,2,\"[1385, 1386]\"\r\nCP-20.zip,1,CP,2457,2941,108,1,[2941]\r\nNCP-14.zip,2,NCP,38,1208,137,2,\"[1208, 1209]\"\r\nNormal-10.zip,0,Normal,1933,388,103,1,[388]\r\nCP-1.zip,1,CP,10,3152,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nNCP-20.zip,2,NCP,562,2283,48,2,\"[2282, 2283]\"\r\nNCP-12.zip,2,NCP,335,1819,55,2,\"[1818, 1819]\"\r\nNCP-21.zip,2,NCP,579,2316,63,2,\"[2315, 2316]\"\r\nNormal-7.zip,0,Normal,1856,311,80,1,[311]\r\nNCP-18.zip,2,NCP,506,2169,51,2,\"[2168, 2169]\"\r\nCP-8.zip,1,CP,1339,3719,59,2,\"[3718, 3719]\"\r\nCP-18.zip,1,CP,1652,4340,25,1,[4340]\r\nNCP-11.zip,2,NCP,296,1737,139,2,\"[1737, 1738]\"\r\nNormal-8.zip,0,Normal,1886,341,84,1,[341]\r\nNCP-8.zip,2,NCP,250,1647,60,2,\"[1646, 1647]\"\r\nCP-14.zip,1,CP,1537,4184,53,3,\"[4182, 4183, 4184]\"\r\nNCP-17.zip,2,NCP,486,2128,64,2,\"[2127, 2128]\"\r\nCP-8.zip,1,CP,1335,3711,62,3,\"[3709, 3710, 3711]\"\r\nCP-27.zip,1,CP,3739,5683,19,1,[5683]\r\nNCP-25.zip,2,NCP,3950,5464,41,1,[5464]\r\nCP-12.zip,1,CP,1474,4029,62,2,\"[4029, 4030]\"\r\nNormal-10.zip,0,Normal,1946,401,93,1,[401]\r\nNCP-30.zip,2,NCP,947,2490,41,1,[2490]\r\nNCP-14.zip,2,NCP,371,1893,141,2,\"[1893, 1894]\"\r\nNCP-8.zip,2,NCP,2676,2694,54,1,[2694]\r\nNCP-1.zip,2,NCP,1011,2574,117,2,\"[2574, 2575]\"\r\nNormal-9.zip,0,Normal,1906,361,93,1,[361]\r\nNCP-4.zip,2,NCP,147,1439,72,2,\"[1438, 1439]\"\r\nCP-12.zip,1,CP,1485,4058,49,3,\"[4056, 4057, 4058]\"\r\nNormal-7.zip,0,Normal,1838,293,86,1,[293]\r\nCP-25.zip,1,CP,9,3150,72,4,\"[3148, 3149, 3150, 3151]\"\r\nNCP-12.zip,2,NCP,330,1809,64,2,\"[1808, 1809]\"\r\nNCP-8.zip,2,NCP,267,1681,54,2,\"[1680, 1681]\"\r\nNCP-20.zip,2,NCP,553,2263,137,2,\"[2263, 2264]\"\r\nNCP-29.zip,2,NCP,893,2433,24,2,\"[2432, 2433]\"\r\nNCP-21.zip,2,NCP,582,2321,128,2,\"[2321, 2322]\"\r\nNormal-24.zip,0,Normal,2642,152,38,1,[152]\r\nCP-25.zip,1,CP,726,3088,183,1,[3088]\r\nNCP-5.zip,2,NCP,171,1487,60,2,\"[1486, 1487]\"\r\nCP-22.zip,1,CP,632,2994,132,1,[2994]\r\nNormal-7.zip,0,Normal,1850,305,99,1,[305]\r\nNCP-30.zip,2,NCP,945,2488,45,1,[2488]\r\nNormal-19.zip,0,Normal,2244,699,98,1,[699]\r\nCP-1.zip,1,CP,1073,3116,52,1,[3116]\r\nNormal-21.zip,0,Normal,2310,765,91,1,[765]\r\nCP-1.zip,1,CP,10,3153,297,9,\"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]\"\r\nCP-1.zip,1,CP,1075,3119,70,2,\"[3118, 3119]\"\r\nCP-12.zip,1,CP,1470,4020,54,2,\"[4020, 4021]\"\r\nNCP-26.zip,2,NCP,3997,5519,56,1,[5519]\r\nNCP-10.zip,2,NCP,274,1694,160,2,\"[1694, 1695]\"\r\nNormal-15.zip,0,Normal,2089,544,98,1,[544]\r\nCP-24.zip,1,CP,681,3043,102,1,[3043]\r\nNCP-20.zip,2,NCP,573,2305,63,2,\"[2304, 2305]\"\r\nCP-15.zip,1,CP,1557,4233,43,2,\"[4232, 4233]\"\r\nNCP-30.zip,2,NCP,990,2543,59,1,[2543]\r\nCP-7.zip,1,CP,1305,3640,20,2,\"[3640, 3641]\"\r\nNCP-5.zip,2,NCP,183,1510,123,2,\"[1510, 1511]\"\r\nCP-15.zip,1,CP,1582,4270,20,1,[4270]\r\nCP-29.zip,1,CP,3817,5761,25,1,[5761]\r\nNCP-20.zip,2,NCP,56,1245,164,2,\"[1245, 1246]\"\r\nNCP-21.zip,2,NCP,58,1250,55,2,\"[1249, 1250]\"\r\nCP-8.zip,1,CP,1335,3710,62,3,\"[3709, 3710, 3711]\"\r\nNormal-3.zip,0,Normal,1766,1149,60,3,\"[1149, 1150, 1151]\"\r\nNCP-10.zip,2,NCP,2716,2709,49,1,[2709]\r\nCP-10.zip,1,CP,1402,3865,131,3,\"[3865, 3866, 3867]\"\r\nCP-10.zip,1,CP,1391,3841,59,4,\"[3839, 3840, 3841, 3842]\"\r\nNormal-22.zip,0,Normal,2594,104,42,1,[104]\r\nCP-26.zip,1,CP,3733,5675,174,3,\"[5673, 5674, 5675]\"\r\nNormal-25.zip,0,Normal,3715,5345,30,1,[5345]\r\nNormal-3.zip,0,Normal,762,197,363,1,[197]\r\nNCP-15.zip,2,NCP,420,1994,71,2,\"[1993, 1994]\"\r\nNormal-12.zip,0,Normal,1996,451,90,1,[451]\r\nNCP-22.zip,2,NCP,885,2423,195,2,\"[2422, 2423]\"\r\nNCP-29.zip,2,NCP,921,2463,36,1,[2463]\r\nNormal-25.zip,0,Normal,3848,5360,192,1,[5360]\r\nCP-28.zip,1,CP,3776,5720,30,1,[5720]\r\nNCP-15.zip,2,NCP,402,1953,148,2,\"[1953, 1954]\"\r\nNormal-19.zip,0,Normal,2232,687,99,1,[687]\r\nCP-11.zip,1,CP,1447,3968,63,2,\"[3967, 3968]\"\r\nNormal-17.zip,0,Normal,2176,631,91,1,[631]\r\nNCP-12.zip,2,NCP,315,1778,46,2,\"[1777, 1778]\"\r\nCP-2.zip,1,CP,1102,3320,182,1,[3320]\r\nNCP-14.zip,2,NCP,373,1897,122,2,\"[1897, 1898]\"\r\nCP-4.zip,1,CP,1175,3393,189,1,[3393]\r\nNCP-14.zip,2,NCP,392,1934,143,2,\"[1934, 1935]\"\r\nCP-8.zip,1,CP,1321,3679,58,2,\"[3678, 3679]\"\r\nNCP-16.zip,2,NCP,430,2013,152,2,\"[2013, 2014]\"\r\nNCP-26.zip,2,NCP,3988,5512,53,1,[5512]\r\nNormal-22.zip,0,Normal,2316,771,92,1,[771]\r\nCP-14.zip,1,CP,1531,4170,59,2,\"[4169, 4170]\"\r\nNormal-3.zip,0,Normal,748,183,261,1,[183]\r\nNCP-23.zip,2,NCP,943,2486,334,1,[2486]\r\nNormal-18.zip,0,Normal,2202,657,82,1,[657]\r\nCP-27.zip,1,CP,3735,5679,26,1,[5679]\r\nNCP-15.zip,2,NCP,409,1967,153,2,\"[1967, 1968]\"\r\nCP-4.zip,1,CP,1171,3389,180,1,[3389]\r\nCP-11.zip,1,CP,1452,3977,56,2,\"[3977, 3978]\"\r\nNormal-1.zip,0,Normal,1684,875,71,5,\"[870, 871, 873, 874, 875]\"\r\nCP-8.zip,1,CP,1333,3705,52,2,\"[3705, 3706]\"\r\nNCP-3.zip,2,NCP,135,1414,138,2,\"[1414, 1415]\"\r\nNCP-25.zip,2,NCP,3965,5506,53,1,[5506]\r\nNCP-8.zip,2,NCP,258,1662,135,2,\"[1662, 1663]\"\r\nNormal-10.zip,0,Normal,1926,381,87,1,[381]\r\nCP-16.zip,1,CP,1596,4284,22,1,[4284]\r\nCP-14.zip,1,CP,1554,4226,41,2,\"[4226, 4227]\"\r\nCP-26.zip,1,CP,3645,5605,38,1,[5605]\r\nCP-2.zip,1,CP,1110,3328,143,1,[3328]\r\nNCP-22.zip,2,NCP,81,1296,53,2,\"[1295, 1296]\"\r\nNormal-1.zip,0,Normal,1685,877,65,4,\"[877, 878, 879, 880]\"\r\nNCP-29.zip,2,NCP,923,2465,19,1,[2465]\r\nNCP-14.zip,2,NCP,399,1948,149,2,\"[1948, 1949]\"\r\nNCP-18.zip,2,NCP,510,2176,102,2,\"[2176, 2177]\"\r\nNCP-20.zip,2,NCP,558,2274,51,2,\"[2273, 2274]\"\r\nNormal-2.zip,0,Normal,1762,1131,70,2,\"[1130, 1131]\"\r\nCP-19.zip,1,CP,2434,2898,102,3,\"[2898, 2899, 2900]\"\r\nNormal-19.zip,0,Normal,2219,674,106,1,[674]\r\nNormal-8.zip,0,Normal,1869,324,94,1,[324]\r\nNCP-21.zip,2,NCP,70,1272,120,2,\"[1272, 1273]\"\r\nNCP-10.zip,2,NCP,2710,2703,48,1,[2703]\r\nNormal-9.zip,0,Normal,1904,359,94,1,[359]\r\nNCP-20.zip,2,NCP,564,2287,60,2,\"[2286, 2287]\"\r\nNCP-15.zip,2,NCP,424,2001,161,2,\"[2001, 2002]\"\r\nCP-14.zip,1,CP,1529,4166,42,3,\"[4165, 4166, 4167]\"\r\nNormal-16.zip,0,Normal,2138,593,72,1,[593]\r\nCP-16.zip,1,CP,1613,4301,27,1,[4301]\r\nCP-24.zip,1,CP,697,3059,114,1,[3059]\r\nCP-10.zip,1,CP,1390,3836,215,3,\"[3836, 3837, 3838]\"\r\nNormal-6.zip,0,Normal,1805,260,79,1,[260]\r\nCP-10.zip,1,CP,1390,3837,56,3,\"[3836, 3837, 3838]\"\r\nCP-3.zip,1,CP,1150,3368,214,1,[3368]\r\nCP-2.zip,1,CP,1116,3334,183,1,[3334]\r\nNormal-14.zip,0,Normal,2057,512,78,1,[512]\r\nNCP-19.zip,2,NCP,532,2223,58,2,\"[2222, 2223]\"\r\nCP-29.zip,1,CP,3810,5754,24,1,[5754]\r\nCP-14.zip,1,CP,1539,4188,131,3,\"[4188, 4189, 4190]\"\r\nCP-10.zip,1,CP,1385,3826,64,2,\"[3825, 3826]\"\r\nNCP-29.zip,2,NCP,929,2471,21,1,[2471]\r\nNCP-28.zip,2,NCP,856,2377,229,2,\"[2376, 2377]\"\r\nNCP-15.zip,2,NCP,408,1966,55,2,\"[1965, 1966]\"\r\nCP-7.zip,1,CP,1319,3675,61,2,\"[3674, 3675]\"\r\nNCP-1.zip,2,NCP,1022,2591,48,1,[2591]\r\nNormal-20.zip,0,Normal,2254,709,75,1,[709]\r\nNCP-22.zip,2,NCP,862,2385,33,1,[2385]\r\nCP-29.zip,1,CP,3812,5756,27,1,[5756]\r\nCP-11.zip,1,CP,1447,3967,63,2,\"[3967, 3968]\"\r\nCP-15.zip,1,CP,1556,4230,40,2,\"[4230, 4231]\"\r\nCP-1.zip,1,CP,1080,3125,64,1,[3125]\r\nNormal-4.zip,0,Normal,778,213,114,1,[213]\r\nCP-14.zip,1,CP,1529,4167,42,3,\"[4165, 4166, 4167]\"\r\nCP-2.zip,1,CP,11,3167,283,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nNCP-20.zip,2,NCP,549,2256,36,2,\"[2255, 2256]\"\r\nNCP-3.zip,2,NCP,1292,2733,66,1,[2733]\r\nNormal-13.zip,0,Normal,2047,502,93,1,[502]\r\nNCP-20.zip,2,NCP,549,2255,83,2,\"[2255, 2256]\"\r\nCP-15.zip,1,CP,1563,4246,122,3,\"[4245, 4246, 4247]\"\r\nNCP-25.zip,2,NCP,3956,5469,49,1,[5469]\r\nNCP-22.zip,2,NCP,833,2346,484,1,[2346]\r\nCP-12.zip,1,CP,1487,4061,163,3,\"[4061, 4062, 4063]\"\r\nCP-7.zip,1,CP,1306,3642,52,3,\"[3642, 3643, 3644]\"\r\nNCP-17.zip,2,NCP,47,1228,58,2,\"[1227, 1228]\"\r\nCP-8.zip,1,CP,1338,3716,67,2,\"[3716, 3717]\"\r\nNormal-25.zip,0,Normal,3711,5341,27,1,[5341]\r\nNCP-16.zip,2,NCP,452,2059,63,1,[2059]\r\nNormal-23.zip,0,Normal,2604,114,36,1,[114]\r\nNCP-28.zip,2,NCP,849,2368,224,1,[2368]\r\nNCP-29.zip,2,NCP,886,2424,52,1,[2424]\r\nNCP-28.zip,2,NCP,875,2408,218,1,[2408]\r\nNCP-20.zip,2,NCP,573,2304,151,2,\"[2304, 2305]\"\r\nNCP-22.zip,2,NCP,83,1300,70,2,\"[1299, 1300]\"\r\nNormal-14.zip,0,Normal,2056,511,84,1,[511]\r\nNormal-7.zip,0,Normal,1844,299,93,1,[299]\r\nCP-13.zip,1,CP,1494,4083,154,3,\"[4083, 4084, 4085]\"\r\nCP-5.zip,1,CP,1201,3419,171,1,[3419]\r\nNCP-23.zip,2,NCP,897,2438,40,1,[2438]\r\nNormal-27.zip,0,Normal,3914,5456,55,2,\"[5456, 5457]\"\r\nCP-9.zip,1,CP,1354,3751,181,3,\"[3751, 3752, 3753]\"\r\nNCP-29.zip,2,NCP,899,2440,34,2,\"[2440, 2441]\"\r\nCP-10.zip,1,CP,1414,3891,151,3,\"[3891, 3892, 3893]\"\r\nCP-14.zip,1,CP,1543,4202,57,3,\"[4200, 4201, 4202]\"\r\nNormal-25.zip,0,Normal,3837,5349,208,1,[5349]\r\nNCP-10.zip,2,NCP,272,1691,64,2,\"[1690, 1691]\"\r\nNormal-9.zip,0,Normal,1905,360,93,1,[360]\r\nCP-8.zip,1,CP,1340,3721,64,2,\"[3720, 3721]\"\r\nNCP-5.zip,2,NCP,19,1170,146,2,\"[1170, 1171]\"\r\nNormal-2.zip,0,Normal,1738,1041,75,1,[1041]\r\nNCP-2.zip,2,NCP,108,1354,58,2,\"[1353, 1354]\"\r\nNormal-25.zip,0,Normal,3844,5356,201,1,[5356]\r\nCP-20.zip,1,CP,2459,2945,108,1,[2945]\r\nCP-10.zip,1,CP,1414,3892,63,3,\"[3891, 3892, 3893]\"\r\nNormal-18.zip,0,Normal,2201,656,66,1,[656]\r\nNCP-21.zip,2,NCP,78,1289,166,2,\"[1289, 1290]\"\r\nCP-18.zip,1,CP,1776,3539,76,6,\"[3534, 3535, 3536, 3537, 3538, 3539]\"\r\nNCP-1.zip,2,NCP,1010,2572,126,2,\"[2572, 2573]\"\r\nCP-11.zip,1,CP,1441,3951,203,3,\"[3951, 3952, 3953]\"\r\nCP-13.zip,1,CP,1512,4125,50,2,\"[4125, 4126]\"\r\nCP-30.zip,1,CP,3934,5640,53,3,\"[5638, 5639, 5640]\"\r\nNCP-4.zip,2,NCP,143,1430,128,2,\"[1430, 1431]\"\r\nNormal-17.zip,0,Normal,2166,621,93,1,[621]\r\nNCP-22.zip,2,NCP,83,1299,167,2,\"[1299, 1300]\"\r\nCP-29.zip,1,CP,3804,5748,29,1,[5748]\r\nCP-22.zip,1,CP,624,2986,90,1,[2986]\r\nNCP-7.zip,2,NCP,231,1607,58,2,\"[1606, 1607]\"\r\nNCP-8.zip,2,NCP,258,1663,57,2,\"[1662, 1663]\"\r\nNormal-10.zip,0,Normal,1956,411,89,1,[411]\r\nNCP-4.zip,2,NCP,165,1475,55,2,\"[1474, 1475]\"\r\nNormal-2.zip,0,Normal,1753,1091,60,7,\"[1086, 1087, 1088, 1089, 1090, 1091, 1092]\"\r\nCP-6.zip,1,CP,1247,3465,218,1,[3465]\r\nCP-17.zip,1,CP,1644,4332,23,1,[4332]\r\nNCP-5.zip,2,NCP,188,1520,134,2,\"[1520, 1521]\"\r\nCP-13.zip,1,CP,1509,4118,233,3,\"[4118, 4119, 4120]\"\r\nCP-19.zip,1,CP,2434,2899,102,3,\"[2898, 2899, 2900]\"\r\nNormal-27.zip,0,Normal,3914,5457,55,2,\"[5456, 5457]\"\r\nNCP-3.zip,2,NCP,133,1410,100,2,\"[1410, 1411]\"\r\nCP-24.zip,1,CP,690,3052,134,1,[3052]\r\nNCP-6.zip,2,NCP,208,1560,134,2,\"[1560, 1561]\"\r\nNormal-26.zip,0,Normal,3872,5384,29,1,[5384]\r\nCP-7.zip,1,CP,1258,3476,202,1,[3476]\r\nNCP-4.zip,2,NCP,154,1453,47,2,\"[1452, 1453]\"\r\nCP-8.zip,1,CP,1335,3709,207,3,\"[3709, 3710, 3711]\"\r\nCP-7.zip,1,CP,1305,3641,50,2,\"[3640, 3641]\"\r\nCP-25.zip,1,CP,716,3078,640,1,[3078]\r\nNormal-2.zip,0,Normal,1761,1125,45,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nNCP-14.zip,2,NCP,38,1209,57,2,\"[1208, 1209]\"\r\nNormal-1.zip,0,Normal,1685,878,65,4,\"[877, 878, 879, 880]\"\r\nNCP-17.zip,2,NCP,467,2090,58,2,\"[2089, 2090]\"\r\nCP-14.zip,1,CP,1539,4189,54,3,\"[4188, 4189, 4190]\"\r\nNCP-16.zip,2,NCP,454,2063,58,2,\"[2062, 2063]\"\r\nCP-13.zip,1,CP,1491,4076,48,3,\"[4074, 4075, 4076]\"\r\nNormal-4.zip,0,Normal,794,229,341,1,[229]\r\nNCP-19.zip,2,NCP,521,2199,58,2,\"[2198, 2199]\"\r\nCP-7.zip,1,CP,1311,3656,67,3,\"[3655, 3656, 3657]\"\r\nNormal-22.zip,0,Normal,2584,94,44,1,[94]\r\nCP-23.zip,1,CP,678,3040,46,1,[3040]\r\nCP-14.zip,1,CP,1539,4190,54,3,\"[4188, 4189, 4190]\"\r\nCP-30.zip,1,CP,3937,5644,55,2,\"[5643, 5644]\"\r\nNCP-15.zip,2,NCP,427,2007,132,2,\"[2007, 2008]\"\r\nNCP-28.zip,2,NCP,843,2358,279,1,[2358]\r\nNCP-14.zip,2,NCP,375,1903,49,3,\"[1901, 1902, 1903]\"\r\nNCP-11.zip,2,NCP,306,1759,153,2,\"[1759, 1760]\"\r\nNCP-16.zip,2,NCP,44,1221,124,2,\"[1221, 1222]\"\r\nNCP-8.zip,2,NCP,256,1659,58,2,\"[1658, 1659]\"\r\nCP-8.zip,1,CP,1338,3717,67,2,\"[3716, 3717]\"\r\nCP-18.zip,1,CP,1780,3553,67,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-7.zip,1,CP,1267,3485,151,1,[3485]\r\nCP-13.zip,1,CP,1509,4119,118,3,\"[4118, 4119, 4120]\"\r\nNormal-3.zip,0,Normal,1766,1151,62,3,\"[1149, 1150, 1151]\"\r\nCP-10.zip,1,CP,1405,3873,60,2,\"[3872, 3873]\"\r\nCP-1.zip,1,CP,1079,3124,63,1,[3124]\r\nCP-18.zip,1,CP,1780,3559,69,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nNormal-7.zip,0,Normal,1852,307,94,1,[307]\r\nCP-5.zip,1,CP,1195,3413,247,1,[3413]\r\nNCP-20.zip,2,NCP,556,2270,53,2,\"[2269, 2270]\"\r\nNCP-2.zip,2,NCP,108,1353,139,2,\"[1353, 1354]\"\r\nNCP-16.zip,2,NCP,445,2045,58,2,\"[2044, 2045]\"\r\nCP-13.zip,1,CP,1512,4126,50,2,\"[4125, 4126]\"\r\nNCP-21.zip,2,NCP,64,1262,55,2,\"[1261, 1262]\"\r\nCP-5.zip,1,CP,1211,3429,143,1,[3429]\r\nNCP-1.zip,2,NCP,1042,2614,143,2,\"[2613, 2614]\"\r\nNCP-21.zip,2,NCP,73,1280,55,3,\"[1278, 1279, 1280]\"\r\nCP-9.zip,1,CP,1364,3776,133,3,\"[3776, 3777, 3778]\"\r\nNCP-21.zip,2,NCP,58,1249,131,2,\"[1249, 1250]\"\r\nCP-20.zip,1,CP,2668,3250,44,11,\"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]\"\r\nNCP-21.zip,2,NCP,73,1279,57,3,\"[1278, 1279, 1280]\"\r\nCP-26.zip,1,CP,3733,5674,159,3,\"[5673, 5674, 5675]\"\r\nNormal-19.zip,0,Normal,2247,702,86,1,[702]\r\nNCP-28.zip,2,NCP,867,2394,161,1,[2394]\r\nCP-22.zip,1,CP,633,2995,114,1,[2995]\r\nCP-9.zip,1,CP,1371,3796,60,3,\"[3794, 3795, 3796]\"\r\nNCP-22.zip,2,NCP,86,1305,117,2,\"[1305, 1306]\"\r\nNCP-14.zip,2,NCP,40,1213,63,2,\"[1212, 1213]\"\r\nNormal-26.zip,0,Normal,3892,5415,72,1,[5415]\r\nCP-7.zip,1,CP,1306,3644,237,3,\"[3642, 3643, 3644]\"\r\nCP-24.zip,1,CP,702,3064,78,1,[3064]\r\nNCP-26.zip,2,NCP,3975,5483,44,1,[5483]\r\nCP-4.zip,1,CP,1164,3382,193,1,[3382]\r\nNormal-11.zip,0,Normal,1960,415,98,1,[415]\r\nCP-5.zip,1,CP,1203,3421,231,1,[3421]\r\nCP-19.zip,1,CP,2434,2900,104,3,\"[2898, 2899, 2900]\"\r\nNCP-29.zip,2,NCP,890,2429,203,1,[2429]\r\nNCP-16.zip,2,NCP,448,2050,139,2,\"[2050, 2051]\"\r\nCP-18.zip,1,CP,1780,3555,60,14,\"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]\"\r\nCP-12.zip,1,CP,1457,3991,69,1,[3991]\r\nNormal-3.zip,0,Normal,756,191,106,1,[191]\r\nNCP-29.zip,2,NCP,900,2442,506,1,[2442]\r\nNCP-17.zip,2,NCP,476,2107,127,2,\"[2107, 2108]\"\r\nCP-28.zip,1,CP,3794,5738,26,1,[5738]\r\nCP-23.zip,1,CP,669,3031,70,1,[3031]\r\nNormal-9.zip,0,Normal,1911,366,96,1,[366]\r\nNormal-9.zip,0,Normal,1919,374,99,1,[374]\r\nNCP-12.zip,2,NCP,335,1818,129,2,\"[1818, 1819]\"\r\nCP-18.zip,1,CP,1651,4339,31,1,[4339]\r\nNormal-4.zip,0,Normal,798,233,122,1,[233]\r\nNCP-18.zip,2,NCP,508,2173,61,2,\"[2172, 2173]\"\r\nNCP-21.zip,2,NCP,67,1266,168,2,\"[1266, 1267]\"\r\nNCP-6.zip,2,NCP,214,1573,60,2,\"[1572, 1573]\"\r\nCP-10.zip,1,CP,1405,3872,60,2,\"[3872, 3873]\"\r\nNCP-6.zip,2,NCP,208,1561,56,2,\"[1560, 1561]\"\r\nNCP-14.zip,2,NCP,373,1898,52,2,\"[1897, 1898]\"\r\nNCP-3.zip,2,NCP,1281,2722,65,1,[2722]\r\nCP-24.zip,1,CP,707,3069,72,1,[3069]\r\nNCP-28.zip,2,NCP,831,2344,278,1,[2344]\r\nNormal-17.zip,0,Normal,2179,634,101,1,[634]\r\nNCP-21.zip,2,NCP,60,1253,141,2,\"[1253, 1254]\"\r\nNCP-8.zip,2,NCP,259,1665,65,2,\"[1664, 1665]\"\r\nNCP-11.zip,2,NCP,311,1770,55,2,\"[1769, 1770]\"\r\nNCP-27.zip,2,NCP,1050,2623,46,2,\"[2623, 2624]\"\r\nNCP-18.zip,2,NCP,490,2137,62,2,\"[2136, 2137]\"\r\nNormal-27.zip,0,Normal,3900,5431,64,2,\"[5431, 5432]\"\r\nNormal-15.zip,0,Normal,2110,565,83,1,[565]\r\nNCP-13.zip,2,NCP,368,1887,129,2,\"[1887, 1888]\"\r\nNCP-27.zip,2,NCP,817,2326,120,1,[2326]\r\nCP-15.zip,1,CP,1567,4255,59,2,\"[4254, 4255]\"\r\nNCP-5.zip,2,NCP,178,1500,124,2,\"[1500, 1501]\"\r\nNCP-13.zip,2,NCP,345,1841,147,2,\"[1841, 1842]\"\r\nNormal-2.zip,0,Normal,1761,1128,60,5,\"[1125, 1126, 1127, 1128, 1129]\"\r\nCP-8.zip,1,CP,1343,3727,56,2,\"[3726, 3727]\"\r\nNCP-30.zip,2,NCP,936,2478,21,1,[2478]\r\nNCP-11.zip,2,NCP,306,1760,64,2,\"[1759, 1760]\"\r\nNCP-17.zip,2,NCP,487,2129,167,2,\"[2129, 2130]\"\r\nCP-30.zip,1,CP,3930,5629,62,2,\"[5628, 5629]\"\r\nNCP-9.zip,2,NCP,2692,2700,48,1,[2700]\r\nNCP-20.zip,2,NCP,556,2269,125,2,\"[2269, 2270]\"\r\nCP-18.zip,1,CP,1775,3531,58,4,\"[3530, 3531, 3532, 3533]\"\r\nNCP-23.zip,2,NCP,896,2437,39,1,[2437]\r\nCP-21.zip,1,CP,5,3509,275,1,[3509]\r\nNormal-19.zip,0,Normal,2217,672,71,1,[672]\r\nNCP-1.zip,2,NCP,1010,2573,126,2,\"[2572, 2573]\"\r\nNCP-1.zip,2,NCP,100,1337,139,2,\"[1337, 1338]\"\r\nNCP-26.zip,2,NCP,3998,5495,41,1,[5495]\r\nCP-25.zip,1,CP,711,3073,112,1,[3073]\r\nCP-24.zip,1,CP,699,3061,64,1,[3061]\r\nCP-4.zip,1,CP,1173,3391,201,1,[3391]\r\nCP-27.zip,1,CP,3740,5684,23,1,[5684]\r\nCP-16.zip,1,CP,1590,4278,20,1,[4278]\r\nNormal-2.zip,0,Normal,1762,1130,70,2,\"[1130, 1131]\"\r\nNormal-1.zip,0,Normal,1679,833,66,6,\"[833, 834, 835, 836, 837, 838]\"\r\nNCP-29.zip,2,NCP,928,2470,25,1,[2470]\r\nCP-18.zip,1,CP,1775,3533,57,4,\"[3530, 3531, 3532, 3533]\"\r\nNormal-3.zip,0,Normal,766,201,94,1,[201]\r\nNormal-11.zip,0,Normal,1964,419,100,1,[419]\r\nNCP-9.zip,2,NCP,2690,2657,48,1,[2657]\r\nNCP-21.zip,2,NCP,78,1290,69,2,\"[1289, 1290]\"\r\nNormal-16.zip,0,Normal,2147,602,95,1,[602]\r\nNCP-19.zip,2,NCP,544,2246,62,2,\"[2245, 2246]\"\r\nNormal-27.zip,0,Normal,3900,5432,64,2,\"[5431, 5432]\"\r\nNormal-8.zip,0,Normal,1860,315,92,1,[315]\r\nCP-21.zip,1,CP,601,2963,104,1,[2963]\r\nCP-2.zip,1,CP,11,3164,287,7,\"[3161, 3162, 3163, 3164, 3165, 3166, 3167]\"\r\nCP-15.zip,1,CP,1563,4245,241,3,\"[4245, 4246, 4247]\"\r\n"
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_fold2_valid.csv",
    "content": "zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids\r\nNormal-2.zip,0,Normal,1740,1050,21,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-10.zip,1,CP,1387,3830,51,2,\"[3829, 3830]\"\r\nNCP-10.zip,2,NCP,2719,2675,44,1,[2675]\r\nCP-1.zip,1,CP,1065,3104,58,1,[3104]\r\nCP-10.zip,1,CP,1392,3843,62,2,\"[3843, 3844]\"\r\nCP-13.zip,1,CP,1508,4117,57,3,\"[4115, 4116, 4117]\"\r\nNCP-22.zip,2,NCP,863,2387,282,2,\"[2386, 2387]\"\r\nNormal-3.zip,0,Normal,763,198,102,1,[198]\r\nNormal-23.zip,0,Normal,2635,145,27,1,[145]\r\nNCP-20.zip,2,NCP,572,2303,58,2,\"[2302, 2303]\"\r\nNormal-1.zip,0,Normal,1683,862,65,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-10.zip,1,CP,1398,3856,44,2,\"[3856, 3857]\"\r\nCP-15.zip,1,CP,1566,4252,54,2,\"[4252, 4253]\"\r\nNCP-10.zip,2,NCP,280,1707,51,2,\"[1706, 1707]\"\r\nCP-19.zip,1,CP,1785,3187,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-15.zip,1,CP,1570,4258,22,1,[4258]\r\nCP-10.zip,1,CP,1413,3890,66,2,\"[3889, 3890]\"\r\nCP-7.zip,1,CP,1303,3618,42,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,2435,2903,295,3,\"[2901, 2902, 2903]\"\r\nNCP-22.zip,2,NCP,860,2382,212,2,\"[2382, 2383]\"\r\nNCP-22.zip,2,NCP,883,2419,52,2,\"[2419, 2420]\"\r\nNormal-2.zip,0,Normal,1751,1079,61,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNormal-25.zip,0,Normal,3852,5364,195,1,[5364]\r\nNCP-20.zip,2,NCP,559,2275,127,2,\"[2275, 2276]\"\r\nNCP-18.zip,2,NCP,498,2153,58,2,\"[2152, 2153]\"\r\nNormal-27.zip,0,Normal,3911,5448,64,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNormal-17.zip,0,Normal,2158,613,100,1,[613]\r\nNCP-7.zip,2,NCP,246,1639,58,2,\"[1638, 1639]\"\r\nNCP-17.zip,2,NCP,473,2102,61,2,\"[2101, 2102]\"\r\nNormal-2.zip,0,Normal,1732,1025,73,1,[1025]\r\nNCP-10.zip,2,NCP,271,1688,146,2,\"[1688, 1689]\"\r\nCP-7.zip,1,CP,1303,3627,252,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-11.zip,2,NCP,286,1717,121,2,\"[1717, 1718]\"\r\nNormal-19.zip,0,Normal,2223,678,95,1,[678]\r\nNCP-22.zip,2,NCP,822,2333,31,2,\"[2332, 2333]\"\r\nNCP-28.zip,2,NCP,870,2400,47,2,\"[2399, 2400]\"\r\nNCP-21.zip,2,NCP,75,1284,54,2,\"[1283, 1284]\"\r\nNCP-17.zip,2,NCP,469,2094,66,2,\"[2093, 2094]\"\r\nNCP-8.zip,2,NCP,255,1656,139,2,\"[1656, 1657]\"\r\nNCP-6.zip,2,NCP,211,1566,137,2,\"[1566, 1567]\"\r\nNCP-25.zip,2,NCP,3966,5476,43,1,[5476]\r\nNCP-21.zip,2,NCP,575,2309,61,2,\"[2308, 2309]\"\r\nNormal-2.zip,0,Normal,1740,1045,102,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNormal-1.zip,0,Normal,1681,845,69,1,[845]\r\nNCP-11.zip,2,NCP,310,1768,70,2,\"[1767, 1768]\"\r\nNCP-22.zip,2,NCP,87,1307,145,2,\"[1307, 1308]\"\r\nNormal-4.zip,0,Normal,786,221,124,1,[221]\r\nNormal-20.zip,0,Normal,2270,725,86,1,[725]\r\nNCP-18.zip,2,NCP,515,2187,58,2,\"[2186, 2187]\"\r\nNCP-5.zip,2,NCP,172,1488,139,2,\"[1488, 1489]\"\r\nNCP-20.zip,2,NCP,551,2260,65,2,\"[2259, 2260]\"\r\nNCP-21.zip,2,NCP,61,1256,60,2,\"[1255, 1256]\"\r\nCP-13.zip,1,CP,1508,4116,57,3,\"[4115, 4116, 4117]\"\r\nNCP-22.zip,2,NCP,863,2386,228,2,\"[2386, 2387]\"\r\nCP-10.zip,1,CP,1413,3889,67,2,\"[3889, 3890]\"\r\nNormal-2.zip,0,Normal,1740,1047,60,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-19.zip,1,CP,1785,3188,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nCP-7.zip,1,CP,1303,3624,224,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-1.zip,0,Normal,1683,868,64,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-7.zip,1,CP,1303,3611,257,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3610,51,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-21.zip,2,NCP,61,1255,142,2,\"[1255, 1256]\"\r\nNormal-1.zip,0,Normal,1683,865,72,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-7.zip,1,CP,1303,3630,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1392,3844,62,2,\"[3843, 3844]\"\r\nCP-15.zip,1,CP,1566,4253,54,2,\"[4252, 4253]\"\r\nNormal-27.zip,0,Normal,3911,5447,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNormal-27.zip,0,Normal,3911,5449,64,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-22.zip,2,NCP,87,1308,61,2,\"[1307, 1308]\"\r\nNormal-1.zip,0,Normal,1683,861,65,6,\"[861, 862, 864, 865, 868, 869]\"\r\nCP-7.zip,1,CP,1303,3613,232,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-20.zip,2,NCP,551,2259,154,2,\"[2259, 2260]\"\r\nCP-19.zip,1,CP,1785,3191,79,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-18.zip,2,NCP,515,2186,139,2,\"[2186, 2187]\"\r\nNormal-2.zip,0,Normal,1740,1048,60,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-7.zip,1,CP,1303,3626,51,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3606,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,2435,2902,100,3,\"[2901, 2902, 2903]\"\r\nCP-7.zip,1,CP,1303,3612,49,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3619,213,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-5.zip,2,NCP,172,1489,59,2,\"[1488, 1489]\"\r\nCP-7.zip,1,CP,1303,3617,27,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1398,3857,44,2,\"[3856, 3857]\"\r\nCP-7.zip,1,CP,1303,3608,55,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-28.zip,2,NCP,870,2399,247,2,\"[2399, 2400]\"\r\nNCP-22.zip,2,NCP,883,2420,200,2,\"[2419, 2420]\"\r\nCP-7.zip,1,CP,1303,3609,271,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-10.zip,1,CP,1387,3829,51,2,\"[3829, 3830]\"\r\nNCP-8.zip,2,NCP,255,1657,58,2,\"[1656, 1657]\"\r\nNormal-2.zip,0,Normal,1740,1051,59,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNormal-2.zip,0,Normal,1751,1081,62,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-17.zip,2,NCP,469,2093,159,2,\"[2093, 2094]\"\r\nCP-7.zip,1,CP,1303,3621,230,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3607,247,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5452,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-17.zip,2,NCP,473,2101,145,2,\"[2101, 2102]\"\r\nCP-19.zip,1,CP,2435,2901,104,3,\"[2901, 2902, 2903]\"\r\nNormal-2.zip,0,Normal,1740,1049,21,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-19.zip,1,CP,1785,3189,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-10.zip,2,NCP,271,1689,61,2,\"[1688, 1689]\"\r\nCP-7.zip,1,CP,1303,3629,244,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3631,242,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5451,65,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNCP-22.zip,2,NCP,822,2332,36,2,\"[2332, 2333]\"\r\nCP-7.zip,1,CP,1303,3622,28,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-19.zip,1,CP,1785,3190,79,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-11.zip,2,NCP,310,1767,169,2,\"[1767, 1768]\"\r\nNormal-2.zip,0,Normal,1751,1080,61,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-21.zip,2,NCP,575,2308,144,2,\"[2308, 2309]\"\r\nCP-19.zip,1,CP,1785,3186,67,6,\"[3186, 3187, 3188, 3189, 3190, 3191]\"\r\nNCP-21.zip,2,NCP,75,1283,128,2,\"[1283, 1284]\"\r\nNCP-11.zip,2,NCP,286,1718,51,2,\"[1717, 1718]\"\r\nCP-7.zip,1,CP,1303,3628,50,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-2.zip,0,Normal,1740,1046,300,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nCP-7.zip,1,CP,1303,3620,45,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3614,27,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-27.zip,0,Normal,3911,5450,68,6,\"[5447, 5448, 5449, 5450, 5451, 5452]\"\r\nNormal-1.zip,0,Normal,1683,864,72,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNCP-20.zip,2,NCP,572,2302,138,2,\"[2302, 2303]\"\r\nNormal-2.zip,0,Normal,1751,1084,67,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nCP-7.zip,1,CP,1303,3625,32,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNormal-2.zip,0,Normal,1740,1052,59,8,\"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]\"\r\nNCP-10.zip,2,NCP,280,1706,121,2,\"[1706, 1707]\"\r\nNCP-18.zip,2,NCP,498,2152,139,2,\"[2152, 2153]\"\r\nCP-7.zip,1,CP,1303,3623,45,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3615,44,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nCP-7.zip,1,CP,1303,3616,209,26,\"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]\"\r\nNCP-6.zip,2,NCP,211,1567,58,2,\"[1566, 1567]\"\r\nNormal-2.zip,0,Normal,1751,1083,67,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nNCP-22.zip,2,NCP,860,2383,183,2,\"[2382, 2383]\"\r\nNCP-20.zip,2,NCP,559,2276,54,2,\"[2275, 2276]\"\r\nNormal-2.zip,0,Normal,1751,1082,62,6,\"[1079, 1080, 1081, 1082, 1083, 1084]\"\r\nCP-13.zip,1,CP,1508,4115,57,3,\"[4115, 4116, 4117]\"\r\nNormal-1.zip,0,Normal,1683,869,64,6,\"[861, 862, 864, 865, 868, 869]\"\r\nNCP-7.zip,2,NCP,246,1638,139,2,\"[1638, 1639]\"\r\nNormal-12.zip,0,Normal,2015,470,94,1,[470]\r\nNCP-6.zip,2,NCP,206,1557,58,2,\"[1556, 1557]\"\r\nCP-1.zip,1,CP,1096,3314,196,1,[3314]\r\nNCP-16.zip,2,NCP,43,1220,65,2,\"[1219, 1220]\"\r\nNCP-18.zip,2,NCP,499,2155,58,2,\"[2154, 2155]\"\r\nCP-10.zip,1,CP,1409,3881,66,2,\"[3881, 3882]\"\r\nNormal-4.zip,0,Normal,777,212,83,1,[212]\r\nNCP-9.zip,2,NCP,2708,2701,59,1,[2701]\r\nCP-11.zip,1,CP,1432,3933,60,2,\"[3932, 3933]\"\r\nNCP-4.zip,2,NCP,141,1426,129,2,\"[1426, 1427]\"\r\nCP-23.zip,1,CP,673,3035,76,1,[3035]\r\nNCP-29.zip,2,NCP,879,2414,173,1,[2414]\r\nNCP-19.zip,2,NCP,536,2229,145,2,\"[2229, 2230]\"\r\nNCP-18.zip,2,NCP,504,2165,65,2,\"[2164, 2165]\"\r\nNormal-1.zip,0,Normal,1678,829,34,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-8.zip,2,NCP,264,1674,179,2,\"[1674, 1675]\"\r\nNCP-4.zip,2,NCP,155,1454,139,2,\"[1454, 1455]\"\r\nCP-11.zip,1,CP,1418,3900,180,3,\"[3900, 3901, 3902]\"\r\nNCP-5.zip,2,NCP,194,1532,133,2,\"[1532, 1533]\"\r\nNCP-13.zip,2,NCP,361,1873,143,2,\"[1873, 1874]\"\r\nNormal-1.zip,0,Normal,1710,976,78,2,\"[975, 976]\"\r\nNormal-15.zip,0,Normal,2091,546,106,1,[546]\r\nNCP-19.zip,2,NCP,518,2192,135,2,\"[2192, 2193]\"\r\nNormal-18.zip,0,Normal,2190,645,90,1,[645]\r\nNormal-12.zip,0,Normal,2013,468,87,1,[468]\r\nNCP-11.zip,2,NCP,302,1751,62,2,\"[1750, 1751]\"\r\nNormal-15.zip,0,Normal,2109,564,103,1,[564]\r\nNCP-8.zip,2,NCP,264,1675,75,2,\"[1674, 1675]\"\r\nCP-23.zip,1,CP,653,3015,285,1,[3015]\r\nNCP-7.zip,2,NCP,235,1615,139,2,\"[1615, 1616]\"\r\nCP-19.zip,1,CP,1786,3194,77,3,\"[3192, 3193, 3194]\"\r\nCP-1.zip,1,CP,0,3137,37,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-15.zip,2,NCP,423,1999,133,2,\"[1999, 2000]\"\r\nCP-6.zip,1,CP,1232,3450,91,1,[3450]\r\nCP-14.zip,1,CP,1526,4158,51,3,\"[4157, 4158, 4159]\"\r\nCP-4.zip,1,CP,1184,3402,193,1,[3402]\r\nNCP-17.zip,2,NCP,483,2122,56,2,\"[2121, 2122]\"\r\nCP-12.zip,1,CP,1459,3996,69,3,\"[3995, 3996, 3997]\"\r\nCP-17.zip,1,CP,1637,4325,20,1,[4325]\r\nCP-10.zip,1,CP,1411,3885,66,2,\"[3885, 3886]\"\r\nNCP-9.zip,2,NCP,2707,2673,44,1,[2673]\r\nNCP-29.zip,2,NCP,892,2431,20,1,[2431]\r\nCP-26.zip,1,CP,3720,5653,243,2,\"[5652, 5653]\"\r\nNormal-13.zip,0,Normal,2023,478,96,1,[478]\r\nCP-11.zip,1,CP,1439,3947,62,2,\"[3946, 3947]\"\r\nNormal-6.zip,0,Normal,1801,256,89,1,[256]\r\nNCP-16.zip,2,NCP,442,2038,131,2,\"[2038, 2039]\"\r\nNormal-9.zip,0,Normal,1920,375,100,1,[375]\r\nCP-13.zip,1,CP,1489,4067,457,4,\"[4067, 4068, 4069, 4070]\"\r\nCP-9.zip,1,CP,1378,3811,50,2,\"[3810, 3811]\"\r\nNCP-12.zip,2,NCP,336,1821,50,2,\"[1820, 1821]\"\r\nNCP-3.zip,2,NCP,1295,2736,61,1,[2736]\r\nNormal-20.zip,0,Normal,2268,723,85,1,[723]\r\nNormal-20.zip,0,Normal,2281,736,84,1,[736]\r\nCP-1.zip,1,CP,1083,3128,71,2,\"[3128, 3129]\"\r\nCP-14.zip,1,CP,1545,4207,65,2,\"[4206, 4207]\"\r\nNormal-21.zip,0,Normal,2306,761,103,1,[761]\r\nNCP-13.zip,2,NCP,350,1852,47,2,\"[1851, 1852]\"\r\nCP-8.zip,1,CP,1326,3688,53,2,\"[3688, 3689]\"\r\nNCP-7.zip,2,NCP,236,1617,283,2,\"[1617, 1618]\"\r\nNormal-1.zip,0,Normal,1722,1001,73,2,\"[1001, 1002]\"\r\nNCP-5.zip,2,NCP,177,1498,139,2,\"[1498, 1499]\"\r\nNormal-1.zip,0,Normal,1708,971,74,2,\"[971, 972]\"\r\nNCP-8.zip,2,NCP,2680,2651,46,1,[2651]\r\nNCP-20.zip,2,NCP,570,2298,139,2,\"[2298, 2299]\"\r\nNormal-1.zip,0,Normal,1723,1004,77,2,\"[1003, 1004]\"\r\nNCP-10.zip,2,NCP,2723,2679,40,1,[2679]\r\nNormal-21.zip,0,Normal,2302,757,96,1,[757]\r\nNormal-18.zip,0,Normal,2199,654,85,1,[654]\r\nNormal-25.zip,0,Normal,3858,5370,234,1,[5370]\r\nNormal-21.zip,0,Normal,2286,741,84,1,[741]\r\nNormal-1.zip,0,Normal,1720,995,74,2,\"[995, 996]\"\r\nNormal-3.zip,0,Normal,769,204,138,1,[204]\r\nNCP-9.zip,2,NCP,2687,2654,51,1,[2654]\r\nNormal-16.zip,0,Normal,2124,579,101,1,[579]\r\nNCP-6.zip,2,NCP,206,1556,139,2,\"[1556, 1557]\"\r\nNormal-20.zip,0,Normal,2256,711,86,1,[711]\r\nCP-10.zip,1,CP,1411,3886,66,2,\"[3885, 3886]\"\r\nCP-11.zip,1,CP,1418,3901,54,3,\"[3900, 3901, 3902]\"\r\nNCP-4.zip,2,NCP,155,1455,58,2,\"[1454, 1455]\"\r\nNCP-19.zip,2,NCP,536,2230,61,2,\"[2229, 2230]\"\r\nCP-13.zip,1,CP,1489,4068,229,4,\"[4067, 4068, 4069, 4070]\"\r\nNormal-1.zip,0,Normal,1722,1002,73,2,\"[1001, 1002]\"\r\nCP-14.zip,1,CP,1526,4157,124,3,\"[4157, 4158, 4159]\"\r\nCP-13.zip,1,CP,1489,4069,58,4,\"[4067, 4068, 4069, 4070]\"\r\nCP-1.zip,1,CP,0,3134,37,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-5.zip,2,NCP,177,1499,58,2,\"[1498, 1499]\"\r\nNCP-13.zip,2,NCP,350,1851,109,2,\"[1851, 1852]\"\r\nNormal-1.zip,0,Normal,1678,827,58,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-1.zip,1,CP,1083,3129,71,2,\"[3128, 3129]\"\r\nCP-1.zip,1,CP,0,3140,269,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-9.zip,1,CP,1378,3810,50,2,\"[3810, 3811]\"\r\nCP-8.zip,1,CP,1326,3689,53,2,\"[3688, 3689]\"\r\nCP-1.zip,1,CP,0,3133,290,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-4.zip,2,NCP,141,1427,54,2,\"[1426, 1427]\"\r\nNormal-1.zip,0,Normal,1723,1003,77,2,\"[1003, 1004]\"\r\nNCP-15.zip,2,NCP,423,2000,56,2,\"[1999, 2000]\"\r\nNCP-11.zip,2,NCP,302,1750,152,2,\"[1750, 1751]\"\r\nNCP-20.zip,2,NCP,570,2299,58,2,\"[2298, 2299]\"\r\nCP-12.zip,1,CP,1459,3995,164,3,\"[3995, 3996, 3997]\"\r\nNCP-16.zip,2,NCP,442,2039,53,2,\"[2038, 2039]\"\r\nCP-1.zip,1,CP,0,3136,290,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-1.zip,1,CP,0,3135,269,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-14.zip,1,CP,1526,4159,51,3,\"[4157, 4158, 4159]\"\r\nCP-1.zip,1,CP,0,3131,285,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNCP-12.zip,2,NCP,336,1820,117,2,\"[1820, 1821]\"\r\nNCP-7.zip,2,NCP,235,1616,58,2,\"[1615, 1616]\"\r\nCP-11.zip,1,CP,1418,3902,54,3,\"[3900, 3901, 3902]\"\r\nNCP-7.zip,2,NCP,236,1618,119,2,\"[1617, 1618]\"\r\nCP-11.zip,1,CP,1439,3946,62,2,\"[3946, 3947]\"\r\nCP-1.zip,1,CP,0,3139,39,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nCP-1.zip,1,CP,0,3132,42,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNormal-1.zip,0,Normal,1708,972,74,2,\"[971, 972]\"\r\nCP-12.zip,1,CP,1459,3997,69,3,\"[3995, 3996, 3997]\"\r\nNCP-18.zip,2,NCP,504,2164,155,2,\"[2164, 2165]\"\r\nNormal-1.zip,0,Normal,1720,996,74,2,\"[995, 996]\"\r\nCP-19.zip,1,CP,1786,3192,81,3,\"[3192, 3193, 3194]\"\r\nNormal-1.zip,0,Normal,1678,830,34,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-26.zip,1,CP,3720,5652,48,2,\"[5652, 5653]\"\r\nCP-19.zip,1,CP,1786,3193,81,3,\"[3192, 3193, 3194]\"\r\nCP-13.zip,1,CP,1489,4070,58,4,\"[4067, 4068, 4069, 4070]\"\r\nNCP-13.zip,2,NCP,361,1874,60,2,\"[1873, 1874]\"\r\nNCP-17.zip,2,NCP,483,2121,137,2,\"[2121, 2122]\"\r\nNormal-1.zip,0,Normal,1678,832,62,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNCP-16.zip,2,NCP,43,1219,156,2,\"[1219, 1220]\"\r\nNCP-18.zip,2,NCP,499,2154,139,2,\"[2154, 2155]\"\r\nCP-10.zip,1,CP,1409,3882,66,2,\"[3881, 3882]\"\r\nNCP-5.zip,2,NCP,194,1533,56,2,\"[1532, 1533]\"\r\nNCP-19.zip,2,NCP,518,2193,57,2,\"[2192, 2193]\"\r\nCP-11.zip,1,CP,1432,3932,60,2,\"[3932, 3933]\"\r\nNormal-1.zip,0,Normal,1678,828,58,6,\"[827, 828, 829, 830, 831, 832]\"\r\nNormal-1.zip,0,Normal,1678,831,62,6,\"[827, 828, 829, 830, 831, 832]\"\r\nCP-1.zip,1,CP,0,3138,245,10,\"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]\"\r\nNormal-1.zip,0,Normal,1710,975,78,2,\"[975, 976]\"\r\nCP-14.zip,1,CP,1545,4206,65,2,\"[4206, 4207]\"\r\nNCP-5.zip,2,NCP,18,1169,57,2,\"[1168, 1169]\"\r\nNormal-15.zip,0,Normal,2096,551,93,1,[551]\r\nCP-21.zip,1,CP,2776,3307,31,1,[3307]\r\nNCP-16.zip,2,NCP,449,2053,61,2,\"[2052, 2053]\"\r\nNCP-15.zip,2,NCP,404,1958,46,2,\"[1957, 1958]\"\r\nNCP-6.zip,2,NCP,210,1565,55,2,\"[1564, 1565]\"\r\nCP-3.zip,1,CP,1144,3362,159,1,[3362]\r\nNormal-8.zip,0,Normal,1879,334,88,1,[334]\r\nNormal-1.zip,0,Normal,1721,1000,75,4,\"[1000, 997, 998, 999]\"\r\nNCP-21.zip,2,NCP,583,2323,147,2,\"[2323, 2324]\"\r\nNCP-1.zip,2,NCP,1039,2610,45,1,[2610]\r\nNormal-8.zip,0,Normal,1882,337,86,1,[337]\r\nNormal-21.zip,0,Normal,2307,762,80,1,[762]\r\nCP-14.zip,1,CP,1528,4163,61,2,\"[4163, 4164]\"\r\nCP-11.zip,1,CP,1443,3958,58,3,\"[3957, 3958, 3959]\"\r\nNCP-18.zip,2,NCP,496,2149,70,2,\"[2148, 2149]\"\r\nCP-7.zip,1,CP,1270,3489,204,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-7.zip,0,Normal,1834,289,82,1,[289]\r\nNCP-13.zip,2,NCP,351,1853,145,2,\"[1853, 1854]\"\r\nCP-18.zip,1,CP,1782,3584,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-1.zip,0,Normal,1676,816,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-11.zip,1,CP,1428,3923,221,3,\"[3923, 3924, 3925]\"\r\nCP-8.zip,1,CP,1330,3699,58,3,\"[3698, 3699, 3700]\"\r\nNormal-19.zip,0,Normal,2233,688,76,1,[688]\r\nNCP-18.zip,2,NCP,514,2184,160,2,\"[2184, 2185]\"\r\nNormal-6.zip,0,Normal,1804,259,102,1,[259]\r\nNormal-22.zip,0,Normal,2598,108,38,1,[108]\r\nCP-14.zip,1,CP,1534,4176,58,2,\"[4176, 4177]\"\r\nCP-5.zip,1,CP,1217,3435,320,1,[3435]\r\nNCP-14.zip,2,NCP,378,1908,168,2,\"[1908, 1909]\"\r\nCP-18.zip,1,CP,1782,3582,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-25.zip,2,NCP,3963,5474,56,1,[5474]\r\nNCP-22.zip,2,NCP,82,1298,55,2,\"[1297, 1298]\"\r\nNCP-2.zip,2,NCP,1274,2715,55,1,[2715]\r\nCP-22.zip,1,CP,619,2981,102,1,[2981]\r\nNormal-24.zip,0,Normal,2661,171,31,1,[171]\r\nCP-14.zip,1,CP,1540,4192,58,3,\"[4191, 4192, 4193]\"\r\nNCP-10.zip,2,NCP,2724,2680,43,1,[2680]\r\nNormal-2.zip,0,Normal,1742,1055,60,1,[1055]\r\nCP-12.zip,1,CP,1486,4060,63,2,\"[4059, 4060]\"\r\nNCP-19.zip,2,NCP,527,2211,48,2,\"[2210, 2211]\"\r\nCP-10.zip,1,CP,1393,3846,60,2,\"[3845, 3846]\"\r\nNormal-1.zip,0,Normal,1721,997,68,4,\"[1000, 997, 998, 999]\"\r\nNormal-25.zip,0,Normal,3839,5351,220,1,[5351]\r\nNormal-12.zip,0,Normal,1991,446,306,1,[446]\r\nCP-19.zip,1,CP,1794,3595,38,2,\"[3594, 3595]\"\r\nNormal-1.zip,0,Normal,1669,785,54,5,\"[782, 783, 784, 785, 786]\"\r\nCP-18.zip,1,CP,1782,3580,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNormal-11.zip,0,Normal,1963,418,95,1,[418]\r\nCP-19.zip,1,CP,1789,3205,59,4,\"[3204, 3205, 3206, 3207]\"\r\nCP-11.zip,1,CP,1428,3924,56,3,\"[3923, 3924, 3925]\"\r\nNormal-9.zip,0,Normal,1918,373,85,1,[373]\r\nCP-4.zip,1,CP,1176,3394,161,1,[3394]\r\nCP-10.zip,1,CP,1397,3855,60,2,\"[3854, 3855]\"\r\nNormal-16.zip,0,Normal,2118,573,89,1,[573]\r\nCP-16.zip,1,CP,1594,4282,26,1,[4282]\r\nNCP-4.zip,2,NCP,140,1424,128,2,\"[1424, 1425]\"\r\nCP-1.zip,1,CP,1077,3121,74,2,\"[3121, 3122]\"\r\nNormal-16.zip,0,Normal,2142,597,84,1,[597]\r\nNCP-15.zip,2,NCP,410,1969,143,2,\"[1969, 1970]\"\r\nNormal-3.zip,0,Normal,749,184,89,1,[184]\r\nNormal-1.zip,0,Normal,1718,991,66,2,\"[991, 992]\"\r\nNCP-5.zip,2,NCP,176,1497,53,2,\"[1496, 1497]\"\r\nCP-29.zip,1,CP,3819,5763,31,1,[5763]\r\nNCP-8.zip,2,NCP,265,1677,50,2,\"[1676, 1677]\"\r\nCP-7.zip,1,CP,1270,3495,148,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-26.zip,2,NCP,3982,5489,34,1,[5489]\r\nCP-12.zip,1,CP,1468,4016,54,3,\"[4015, 4016, 4017]\"\r\nCP-3.zip,1,CP,1139,3357,332,1,[3357]\r\nNormal-14.zip,0,Normal,2070,525,104,1,[525]\r\nNormal-1.zip,0,Normal,1672,798,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-11.zip,1,CP,1435,3939,46,2,\"[3938, 3939]\"\r\nCP-30.zip,1,CP,4019,5568,38,1,[5568]\r\nCP-18.zip,1,CP,1777,3540,67,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nCP-23.zip,1,CP,666,3028,192,1,[3028]\r\nNormal-1.zip,0,Normal,1703,959,70,2,\"[959, 960]\"\r\nCP-3.zip,1,CP,1133,3351,213,1,[3351]\r\nNCP-8.zip,2,NCP,2677,2695,51,1,[2695]\r\nNCP-13.zip,2,NCP,357,1866,63,2,\"[1865, 1866]\"\r\nNCP-13.zip,2,NCP,346,1843,139,2,\"[1843, 1844]\"\r\nCP-13.zip,1,CP,1504,4107,64,1,[4107]\r\nNormal-3.zip,0,Normal,745,180,105,1,[180]\r\nNormal-1.zip,0,Normal,1676,820,72,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNormal-26.zip,0,Normal,3869,5381,27,1,[5381]\r\nCP-18.zip,1,CP,1774,3528,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-14.zip,2,NCP,379,1911,62,2,\"[1910, 1911]\"\r\nNormal-21.zip,0,Normal,2301,756,88,1,[756]\r\nNCP-1.zip,2,NCP,104,1345,139,2,\"[1345, 1346]\"\r\nCP-18.zip,1,CP,1771,3519,51,4,\"[3518, 3519, 3520, 3521]\"\r\nNCP-2.zip,2,NCP,116,1373,127,2,\"[1373, 1374]\"\r\nCP-22.zip,1,CP,643,3005,126,1,[3005]\r\nNCP-17.zip,2,NCP,466,2087,145,2,\"[2087, 2088]\"\r\nCP-26.zip,1,CP,3723,5658,43,1,[5658]\r\nCP-11.zip,1,CP,1443,3957,139,3,\"[3957, 3958, 3959]\"\r\nNormal-8.zip,0,Normal,1884,339,82,1,[339]\r\nCP-15.zip,1,CP,1586,4274,23,1,[4274]\r\nCP-8.zip,1,CP,1349,3743,58,3,\"[3742, 3743, 3744]\"\r\nNormal-22.zip,0,Normal,2586,96,30,1,[96]\r\nNormal-4.zip,0,Normal,785,220,292,1,[220]\r\nCP-19.zip,1,CP,2428,2887,124,1,[2887]\r\nNCP-5.zip,2,NCP,181,1507,58,2,\"[1506, 1507]\"\r\nNCP-13.zip,2,NCP,352,1856,58,2,\"[1855, 1856]\"\r\nNCP-2.zip,2,NCP,109,1355,143,2,\"[1355, 1356]\"\r\nCP-13.zip,1,CP,1493,4080,125,3,\"[4080, 4081, 4082]\"\r\nCP-4.zip,1,CP,1191,3409,220,1,[3409]\r\nCP-17.zip,1,CP,1642,4330,25,1,[4330]\r\nCP-7.zip,1,CP,1304,3635,232,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-18.zip,2,NCP,496,2148,168,2,\"[2148, 2149]\"\r\nNCP-27.zip,2,NCP,1058,2635,46,1,[2635]\r\nNormal-14.zip,0,Normal,2071,526,103,1,[526]\r\nCP-26.zip,1,CP,3719,5650,55,3,\"[5649, 5650, 5651]\"\r\nNormal-24.zip,0,Normal,2663,173,48,1,[173]\r\nNCP-3.zip,2,NCP,1298,2739,60,1,[2739]\r\nCP-19.zip,1,CP,2430,2891,102,2,\"[2891, 2892]\"\r\nCP-12.zip,1,CP,1458,3993,69,3,\"[3992, 3993, 3994]\"\r\nNormal-1.zip,0,Normal,1677,823,64,4,\"[823, 824, 825, 826]\"\r\nCP-12.zip,1,CP,1469,4018,47,2,\"[4018, 4019]\"\r\nCP-7.zip,1,CP,1268,3486,336,1,[3486]\r\nNormal-18.zip,0,Normal,2203,658,75,1,[658]\r\nCP-21.zip,1,CP,593,2955,100,1,[2955]\r\nNormal-16.zip,0,Normal,2143,598,87,1,[598]\r\nNCP-20.zip,2,NCP,552,2261,146,2,\"[2261, 2262]\"\r\nNCP-11.zip,2,NCP,309,1766,69,2,\"[1766, 1765]\"\r\nNCP-19.zip,2,NCP,520,2197,55,2,\"[2196, 2197]\"\r\nCP-14.zip,1,CP,1550,4217,64,2,\"[4217, 4218]\"\r\nNCP-8.zip,2,NCP,265,1676,119,2,\"[1676, 1677]\"\r\nNormal-1.zip,0,Normal,1669,782,62,5,\"[782, 783, 784, 785, 786]\"\r\nNCP-26.zip,2,NCP,3976,5484,32,1,[5484]\r\nNCP-31.zip,2,NCP,998,2555,44,1,[2555]\r\nNCP-2.zip,2,NCP,107,1351,146,2,\"[1351, 1352]\"\r\nNormal-16.zip,0,Normal,2136,591,83,1,[591]\r\nCP-12.zip,1,CP,1463,4006,49,2,\"[4005, 4006]\"\r\nNCP-4.zip,2,NCP,156,1457,58,2,\"[1456, 1457]\"\r\nNCP-1.zip,2,NCP,1002,2561,58,1,[2561]\r\nNormal-1.zip,0,Normal,1672,801,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNormal-14.zip,0,Normal,2078,533,73,1,[533]\r\nNCP-5.zip,2,NCP,185,1514,121,2,\"[1514, 1515]\"\r\nCP-14.zip,1,CP,1530,4168,60,1,[4168]\r\nNCP-15.zip,2,NCP,413,1976,128,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-5.zip,1,CP,1224,3442,204,1,[3442]\r\nCP-5.zip,1,CP,1215,3433,165,1,[3433]\r\nNormal-26.zip,0,Normal,3886,5399,76,1,[5399]\r\nNormal-24.zip,0,Normal,2640,150,41,1,[150]\r\nNCP-28.zip,2,NCP,836,2351,52,1,[2351]\r\nNCP-4.zip,2,NCP,146,1436,123,2,\"[1436, 1437]\"\r\nNormal-17.zip,0,Normal,2155,610,89,1,[610]\r\nCP-30.zip,1,CP,3939,5547,38,1,[5547]\r\nCP-19.zip,1,CP,1784,3590,112,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-10.zip,1,CP,1399,3859,45,2,\"[3858, 3859]\"\r\nNCP-19.zip,2,NCP,519,2194,126,2,\"[2194, 2195]\"\r\nNCP-11.zip,2,NCP,297,1739,144,2,\"[1739, 1741]\"\r\nNCP-22.zip,2,NCP,88,1309,170,2,\"[1309, 1310]\"\r\nCP-18.zip,1,CP,1778,3547,65,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-30.zip,2,NCP,968,2511,61,1,[2511]\r\nCP-9.zip,1,CP,1360,3769,67,3,\"[3767, 3768, 3769]\"\r\nCP-26.zip,1,CP,3638,5597,285,1,[5597]\r\nNCP-13.zip,2,NCP,353,1857,167,2,\"[1857, 1858]\"\r\nCP-30.zip,1,CP,3932,5634,71,2,\"[5634, 5635]\"\r\nNCP-21.zip,2,NCP,62,1257,144,2,\"[1257, 1258]\"\r\nCP-2.zip,1,CP,1127,3345,278,1,[3345]\r\nNCP-12.zip,2,NCP,337,1823,58,2,\"[1822, 1823]\"\r\nNCP-14.zip,2,NCP,390,1931,53,2,\"[1930, 1931]\"\r\nNCP-15.zip,2,NCP,417,1988,58,2,\"[1987, 1988]\"\r\nCP-24.zip,1,CP,689,3051,58,1,[3051]\r\nCP-7.zip,1,CP,1270,3501,420,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-9.zip,1,CP,1377,3808,58,2,\"[3808, 3809]\"\r\nCP-13.zip,1,CP,1505,4110,54,3,\"[4108, 4109, 4110]\"\r\nCP-13.zip,1,CP,1492,4078,58,3,\"[4077, 4078, 4079]\"\r\nNCP-4.zip,2,NCP,159,1463,61,2,\"[1462, 1463]\"\r\nNCP-6.zip,2,NCP,220,1585,67,2,\"[1584, 1585]\"\r\nNCP-29.zip,2,NCP,884,2421,23,1,[2421]\r\nNormal-3.zip,0,Normal,757,192,110,1,[192]\r\nCP-21.zip,1,CP,4,3505,298,4,\"[3505, 3506, 3507, 3508]\"\r\nCP-16.zip,1,CP,1608,4296,23,1,[4296]\r\nCP-4.zip,1,CP,1169,3387,171,1,[3387]\r\nNormal-4.zip,0,Normal,797,232,112,1,[232]\r\nNCP-19.zip,2,NCP,540,2238,54,2,\"[2237, 2238]\"\r\nNormal-14.zip,0,Normal,2068,523,81,1,[523]\r\nNormal-11.zip,0,Normal,1985,440,96,1,[440]\r\nCP-9.zip,1,CP,1353,3748,140,3,\"[3748, 3749, 3750]\"\r\nNCP-6.zip,2,NCP,224,1592,136,2,\"[1592, 1593]\"\r\nCP-10.zip,1,CP,1397,3854,60,2,\"[3854, 3855]\"\r\nNCP-12.zip,2,NCP,318,1784,63,2,\"[1783, 1784]\"\r\nNCP-21.zip,2,NCP,59,1251,122,2,\"[1251, 1252]\"\r\nNormal-17.zip,0,Normal,2184,639,86,1,[639]\r\nNCP-18.zip,2,NCP,493,2143,56,2,\"[2142, 2143]\"\r\nNCP-25.zip,2,NCP,3954,5467,42,1,[5467]\r\nNormal-2.zip,0,Normal,1763,1137,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nCP-23.zip,1,CP,675,3037,124,1,[3037]\r\nCP-9.zip,1,CP,1365,3780,60,3,\"[3779, 3780, 3781]\"\r\nCP-6.zip,1,CP,1256,3474,140,1,[3474]\r\nNormal-1.zip,0,Normal,1676,822,69,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-16.zip,2,NCP,441,2037,49,2,\"[2036, 2037]\"\r\nNCP-7.zip,2,NCP,2484,2643,46,1,[2643]\r\nCP-20.zip,1,CP,2771,3302,37,1,[3302]\r\nNCP-10.zip,2,NCP,2714,2707,53,1,[2707]\r\nNormal-4.zip,0,Normal,772,207,363,1,[207]\r\nNCP-16.zip,2,NCP,440,2035,53,2,\"[2034, 2035]\"\r\nCP-17.zip,1,CP,1646,4334,26,1,[4334]\r\nNCP-11.zip,2,NCP,284,1713,139,2,\"[1713, 1714]\"\r\nCP-23.zip,1,CP,656,3018,575,1,[3018]\r\nCP-2.zip,1,CP,1104,3322,164,1,[3322]\r\nNCP-22.zip,2,NCP,85,1303,139,2,\"[1303, 1304]\"\r\nCP-30.zip,1,CP,3933,5637,38,2,\"[5636, 5637]\"\r\nNormal-7.zip,0,Normal,1839,294,94,1,[294]\r\nNCP-6.zip,2,NCP,223,1590,132,2,\"[1590, 1591]\"\r\nCP-2.zip,1,CP,1119,3337,157,1,[3337]\r\nCP-11.zip,1,CP,1431,3931,61,2,\"[3930, 3931]\"\r\nCP-7.zip,1,CP,1304,3634,47,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-11.zip,2,NCP,299,1745,58,2,\"[1744, 1745]\"\r\nNCP-15.zip,2,NCP,405,1960,60,2,\"[1959, 1960]\"\r\nNCP-20.zip,2,NCP,574,2307,58,2,\"[2306, 2307]\"\r\nCP-10.zip,1,CP,1412,3887,66,2,\"[3887, 3888]\"\r\nNCP-4.zip,2,NCP,167,1479,60,2,\"[1478, 1479]\"\r\nNCP-4.zip,2,NCP,157,1459,49,2,\"[1458, 1459]\"\r\nNCP-13.zip,2,NCP,349,1849,135,2,\"[1849, 1850]\"\r\nCP-18.zip,1,CP,1771,3520,51,4,\"[3518, 3519, 3520, 3521]\"\r\nNCP-14.zip,2,NCP,372,1895,109,2,\"[1895, 1896]\"\r\nNCP-18.zip,2,NCP,503,2162,146,2,\"[2162, 2163]\"\r\nNCP-6.zip,2,NCP,199,1543,58,2,\"[1542, 1543]\"\r\nCP-18.zip,1,CP,1662,4350,19,1,[4350]\r\nCP-9.zip,1,CP,1377,3809,57,2,\"[3808, 3809]\"\r\nNormal-1.zip,0,Normal,1727,1009,63,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-20.zip,2,NCP,566,2290,160,2,\"[2290, 2291]\"\r\nNCP-21.zip,2,NCP,583,2324,62,2,\"[2323, 2324]\"\r\nCP-29.zip,1,CP,3821,5765,29,1,[5765]\r\nNCP-5.zip,2,NCP,190,1525,64,2,\"[1524, 1525]\"\r\nNormal-2.zip,0,Normal,1746,1064,68,2,\"[1063, 1064]\"\r\nCP-27.zip,1,CP,3744,5688,17,1,[5688]\r\nCP-2.zip,1,CP,1111,3329,204,1,[3329]\r\nNormal-10.zip,0,Normal,1948,403,98,1,[403]\r\nNCP-12.zip,2,NCP,338,1824,150,2,\"[1824, 1825]\"\r\nNCP-13.zip,2,NCP,348,1847,112,2,\"[1847, 1848]\"\r\nNCP-19.zip,2,NCP,527,2210,114,2,\"[2210, 2211]\"\r\nCP-24.zip,1,CP,700,3062,86,1,[3062]\r\nNCP-15.zip,2,NCP,404,1957,108,2,\"[1957, 1958]\"\r\nCP-18.zip,1,CP,1655,4343,23,1,[4343]\r\nCP-27.zip,1,CP,3736,5680,16,1,[5680]\r\nNormal-24.zip,0,Normal,2654,164,31,1,[164]\r\nNCP-13.zip,2,NCP,359,1869,145,2,\"[1869, 1870]\"\r\nNCP-16.zip,2,NCP,437,2027,142,2,\"[2027, 2028]\"\r\nCP-27.zip,1,CP,3741,5685,17,1,[5685]\r\nCP-24.zip,1,CP,693,3055,273,1,[3055]\r\nNCP-17.zip,2,NCP,466,2088,61,2,\"[2087, 2088]\"\r\nCP-24.zip,1,CP,682,3044,149,1,[3044]\r\nNormal-17.zip,0,Normal,2175,630,80,1,[630]\r\nNCP-6.zip,2,NCP,223,1591,56,2,\"[1590, 1591]\"\r\nNCP-2.zip,2,NCP,1051,2626,178,2,\"[2625, 2626]\"\r\nCP-11.zip,1,CP,1454,3982,125,3,\"[3982, 3983, 3984]\"\r\nNormal-20.zip,0,Normal,2253,708,70,1,[708]\r\nNCP-4.zip,2,NCP,140,1425,54,2,\"[1424, 1425]\"\r\nNormal-20.zip,0,Normal,2252,707,84,1,[707]\r\nNormal-21.zip,0,Normal,2308,763,85,1,[763]\r\nNCP-18.zip,2,NCP,516,2189,57,2,\"[2188, 2189]\"\r\nNCP-12.zip,2,NCP,313,1774,62,2,\"[1773, 1774]\"\r\nCP-2.zip,1,CP,1126,3344,204,1,[3344]\r\nNormal-20.zip,0,Normal,2257,712,83,1,[712]\r\nNCP-6.zip,2,NCP,203,1551,59,2,\"[1550, 1551]\"\r\nCP-13.zip,1,CP,1503,4106,64,3,\"[4104, 4105, 4106]\"\r\nNormal-20.zip,0,Normal,2280,735,82,1,[735]\r\nCP-19.zip,1,CP,2443,2915,112,3,\"[2915, 2916, 2917]\"\r\nCP-20.zip,1,CP,2451,2930,136,1,[2930]\r\nCP-1.zip,1,CP,1093,3311,173,1,[3311]\r\nCP-13.zip,1,CP,1518,4138,160,3,\"[4138, 4139, 4140]\"\r\nCP-20.zip,1,CP,2773,3304,30,1,[3304]\r\nNCP-15.zip,2,NCP,414,1981,51,2,\"[1980, 1981]\"\r\nNCP-23.zip,2,NCP,96,1328,145,2,\"[1328, 1329]\"\r\nCP-11.zip,1,CP,1422,3909,59,3,\"[3908, 3909, 3910]\"\r\nNormal-20.zip,0,Normal,2258,713,74,1,[713]\r\nNCP-29.zip,2,NCP,882,2417,52,2,\"[2417, 2418]\"\r\nNormal-2.zip,0,Normal,1737,1038,79,4,\"[1037, 1038, 1039, 1040]\"\r\nNormal-13.zip,0,Normal,2025,480,101,1,[480]\r\nNCP-5.zip,2,NCP,173,1490,139,2,\"[1490, 1491]\"\r\nCP-6.zip,1,CP,1257,3475,155,1,[3475]\r\nNCP-23.zip,2,NCP,952,2495,379,1,[2495]\r\nNormal-1.zip,0,Normal,1700,954,64,2,\"[953, 954]\"\r\nNCP-17.zip,2,NCP,465,2085,31,3,\"[2084, 2085, 2086]\"\r\nNormal-16.zip,0,Normal,2122,577,85,1,[577]\r\nCP-13.zip,1,CP,1502,4102,73,2,\"[4102, 4103]\"\r\nNormal-17.zip,0,Normal,2153,608,82,1,[608]\r\nNormal-24.zip,0,Normal,2650,160,40,1,[160]\r\nNCP-27.zip,2,NCP,1031,2602,231,2,\"[2601, 2602]\"\r\nNCP-14.zip,2,NCP,393,1937,62,2,\"[1936, 1937]\"\r\nCP-5.zip,1,CP,12,3169,233,2,\"[3168, 3169]\"\r\nNCP-13.zip,2,NCP,346,1844,58,2,\"[1843, 1844]\"\r\nNormal-11.zip,0,Normal,1986,441,88,1,[441]\r\nCP-19.zip,1,CP,2433,2897,108,1,[2897]\r\nNCP-4.zip,2,NCP,151,1447,54,2,\"[1446, 1447]\"\r\nNCP-13.zip,2,NCP,370,1891,128,2,\"[1891, 1892]\"\r\nNormal-17.zip,0,Normal,2168,623,89,1,[623]\r\nNCP-29.zip,2,NCP,880,2415,312,1,[2415]\r\nNCP-12.zip,2,NCP,338,1825,63,2,\"[1824, 1825]\"\r\nNormal-23.zip,0,Normal,2634,144,37,1,[144]\r\nNCP-14.zip,2,NCP,396,1942,170,2,\"[1942, 1943]\"\r\nNCP-16.zip,2,NCP,439,2032,162,2,\"[2032, 2033]\"\r\nNCP-8.zip,2,NCP,266,1678,137,2,\"[1678, 1679]\"\r\nCP-11.zip,1,CP,1423,3911,204,3,\"[3911, 3912, 3913]\"\r\nCP-11.zip,1,CP,1454,3984,53,3,\"[3982, 3983, 3984]\"\r\nCP-28.zip,1,CP,3792,5736,20,1,[5736]\r\nCP-7.zip,1,CP,1270,3494,129,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNormal-1.zip,0,Normal,1727,1011,66,4,\"[1009, 1010, 1011, 1012]\"\r\nNormal-19.zip,0,Normal,2234,689,89,1,[689]\r\nNCP-13.zip,2,NCP,35,1203,58,2,\"[1202, 1203]\"\r\nNCP-18.zip,2,NCP,51,1236,59,2,\"[1235, 1236]\"\r\nNCP-2.zip,2,NCP,113,1368,58,2,\"[1367, 1368]\"\r\nNormal-2.zip,0,Normal,1757,1107,68,4,\"[1105, 1106, 1107, 1108]\"\r\nNCP-12.zip,2,NCP,319,1785,158,2,\"[1785, 1787]\"\r\nNormal-22.zip,0,Normal,2322,777,88,1,[777]\r\nCP-21.zip,1,CP,584,2946,116,1,[2946]\r\nCP-9.zip,1,CP,1365,3781,60,3,\"[3779, 3780, 3781]\"\r\nNCP-12.zip,2,NCP,322,1792,120,2,\"[1792, 1793]\"\r\nNormal-2.zip,0,Normal,1763,1140,75,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-21.zip,2,NCP,59,1252,52,2,\"[1251, 1252]\"\r\nNCP-5.zip,2,NCP,170,1485,59,2,\"[1484, 1485]\"\r\nNCP-21.zip,2,NCP,72,1276,129,2,\"[1276, 1277]\"\r\nNCP-22.zip,2,NCP,887,2425,38,1,[2425]\r\nCP-2.zip,1,CP,1117,3335,155,1,[3335]\r\nNormal-2.zip,0,Normal,1763,1134,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nCP-18.zip,1,CP,1778,3550,64,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nCP-23.zip,1,CP,664,3026,78,1,[3026]\r\nCP-23.zip,1,CP,668,3030,102,1,[3030]\r\nNCP-13.zip,2,NCP,355,1862,53,2,\"[1861, 1862]\"\r\nNCP-13.zip,2,NCP,358,1867,160,2,\"[1867, 1868]\"\r\nCP-14.zip,1,CP,1550,4218,64,2,\"[4217, 4218]\"\r\nCP-26.zip,1,CP,3729,5667,207,3,\"[5665, 5666, 5667]\"\r\nCP-21.zip,1,CP,603,2965,88,1,[2965]\r\nNCP-13.zip,2,NCP,370,1892,54,2,\"[1891, 1892]\"\r\nNCP-13.zip,2,NCP,35,1202,139,2,\"[1202, 1203]\"\r\nCP-3.zip,1,CP,1155,3373,171,1,[3373]\r\nNormal-10.zip,0,Normal,1927,382,99,1,[382]\r\nCP-15.zip,1,CP,1574,4262,26,1,[4262]\r\nCP-13.zip,1,CP,1498,4096,60,2,\"[4095, 4096]\"\r\nNCP-6.zip,2,NCP,205,1555,53,2,\"[1554, 1555]\"\r\nNCP-11.zip,2,NCP,301,1748,147,2,\"[1748, 1749]\"\r\nNCP-11.zip,2,NCP,303,1752,139,2,\"[1752, 1753]\"\r\nCP-12.zip,1,CP,1468,4017,54,3,\"[4015, 4016, 4017]\"\r\nNormal-14.zip,0,Normal,2081,536,93,1,[536]\r\nNormal-2.zip,0,Normal,1763,1141,75,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-22.zip,2,NCP,859,2380,299,2,\"[2380, 2381]\"\r\nNormal-26.zip,0,Normal,3885,5398,63,1,[5398]\r\nCP-13.zip,1,CP,1505,4109,54,3,\"[4108, 4109, 4110]\"\r\nNCP-1.zip,2,NCP,103,1343,150,2,\"[1343, 1344]\"\r\nNCP-14.zip,2,NCP,396,1943,71,2,\"[1942, 1943]\"\r\nNCP-22.zip,2,NCP,871,2402,293,2,\"[2401, 2402]\"\r\nNormal-10.zip,0,Normal,1951,406,105,1,[406]\r\nCP-11.zip,1,CP,1434,3936,63,2,\"[3936, 3937]\"\r\nCP-26.zip,1,CP,3724,5659,51,1,[5659]\r\nCP-12.zip,1,CP,1471,4022,56,2,\"[4022, 4023]\"\r\nNormal-21.zip,0,Normal,2304,759,110,1,[759]\r\nCP-28.zip,1,CP,3777,5721,26,1,[5721]\r\nNCP-28.zip,2,NCP,837,2352,57,1,[2352]\r\nNormal-2.zip,0,Normal,1763,1133,72,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-8.zip,0,Normal,1873,328,104,1,[328]\r\nCP-12.zip,1,CP,1458,3992,165,3,\"[3992, 3993, 3994]\"\r\nNCP-7.zip,2,NCP,230,1604,139,2,\"[1604, 1605]\"\r\nCP-30.zip,1,CP,4042,5591,37,1,[5591]\r\nNormal-4.zip,0,Normal,774,209,134,1,[209]\r\nNormal-19.zip,0,Normal,2228,683,85,1,[683]\r\nNormal-18.zip,0,Normal,2206,661,77,1,[661]\r\nCP-17.zip,1,CP,1628,4316,23,1,[4316]\r\nNormal-11.zip,0,Normal,1969,424,90,1,[424]\r\nNormal-20.zip,0,Normal,2259,714,97,1,[714]\r\nCP-17.zip,1,CP,1640,4328,25,1,[4328]\r\nNCP-8.zip,2,NCP,254,1654,139,2,\"[1654, 1655]\"\r\nNormal-16.zip,0,Normal,2140,595,88,1,[595]\r\nCP-6.zip,1,CP,1249,3467,144,1,[3467]\r\nNCP-23.zip,2,NCP,92,1321,37,2,\"[1320, 1321]\"\r\nCP-18.zip,1,CP,1657,4345,24,1,[4345]\r\nNCP-17.zip,2,NCP,484,2124,58,2,\"[2123, 2124]\"\r\nNormal-2.zip,0,Normal,1743,1057,73,2,\"[1056, 1057]\"\r\nCP-18.zip,1,CP,1778,3545,66,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-30.zip,2,NCP,966,2509,279,1,[2509]\r\nCP-9.zip,1,CP,1376,3807,60,2,\"[3806, 3807]\"\r\nNormal-1.zip,0,Normal,1716,987,71,2,\"[987, 988]\"\r\nCP-7.zip,1,CP,1302,3602,42,4,\"[3602, 3603, 3604, 3605]\"\r\nNCP-18.zip,2,NCP,50,1233,141,2,\"[1233, 1234]\"\r\nCP-32.zip,1,CP,1781,3572,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-7.zip,1,CP,1270,3497,133,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-5.zip,2,NCP,192,1528,135,2,\"[1528, 1529]\"\r\nNCP-7.zip,2,NCP,2489,2646,40,1,[2646]\r\nCP-11.zip,1,CP,1434,3937,63,2,\"[3936, 3937]\"\r\nCP-23.zip,1,CP,645,3007,124,1,[3007]\r\nNormal-10.zip,0,Normal,1941,396,91,1,[396]\r\nNormal-12.zip,0,Normal,2001,456,86,1,[456]\r\nNormal-3.zip,0,Normal,761,196,120,1,[196]\r\nCP-18.zip,1,CP,1782,3579,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-7.zip,1,CP,1265,3483,166,1,[3483]\r\nNCP-3.zip,2,NCP,1287,2728,66,1,[2728]\r\nNCP-28.zip,2,NCP,835,2350,52,2,\"[2349, 2350]\"\r\nNCP-19.zip,2,NCP,543,2243,128,2,\"[2243, 2244]\"\r\nCP-21.zip,1,CP,4,3507,259,4,\"[3505, 3506, 3507, 3508]\"\r\nCP-17.zip,1,CP,1633,4321,26,1,[4321]\r\nNCP-20.zip,2,NCP,565,2289,57,2,\"[2288, 2289]\"\r\nNCP-22.zip,2,NCP,878,2412,46,2,\"[2412, 2413]\"\r\nCP-14.zip,1,CP,1520,4144,57,3,\"[4143, 4144, 4145]\"\r\nNormal-23.zip,0,Normal,2620,130,36,1,[130]\r\nNCP-23.zip,2,NCP,958,2501,133,1,[2501]\r\nCP-13.zip,1,CP,1513,4128,60,2,\"[4127, 4128]\"\r\nNCP-24.zip,2,NCP,98,1332,139,2,\"[1332, 1333]\"\r\nCP-9.zip,1,CP,1375,3804,60,2,\"[3804, 3805]\"\r\nNCP-2.zip,2,NCP,1051,2625,88,2,\"[2625, 2626]\"\r\nNCP-31.zip,2,NCP,999,2556,41,1,[2556]\r\nCP-18.zip,1,CP,1781,3575,78,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,278,1703,57,2,\"[1702, 1703]\"\r\nNCP-12.zip,2,NCP,313,1773,147,2,\"[1773, 1774]\"\r\nNCP-14.zip,2,NCP,381,1915,60,2,\"[1914, 1915]\"\r\nNCP-11.zip,2,NCP,295,1735,236,2,\"[1735, 1736]\"\r\nCP-11.zip,1,CP,1440,3948,196,3,\"[3948, 3949, 3950]\"\r\nCP-19.zip,1,CP,1795,3597,41,2,\"[3596, 3597]\"\r\nCP-12.zip,1,CP,1467,4013,60,2,\"[4013, 4014]\"\r\nNCP-12.zip,2,NCP,322,1793,51,2,\"[1792, 1793]\"\r\nCP-9.zip,1,CP,1353,3750,59,3,\"[3748, 3749, 3750]\"\r\nCP-19.zip,1,CP,1784,3591,50,4,\"[3590, 3591, 3592, 3593]\"\r\nNCP-9.zip,2,NCP,2699,2665,51,1,[2665]\r\nNCP-12.zip,2,NCP,331,1810,158,2,\"[1810, 1811]\"\r\nNCP-12.zip,2,NCP,334,1817,59,2,\"[1816, 1817]\"\r\nNCP-1.zip,2,NCP,1009,2571,29,2,\"[2570, 2571]\"\r\nCP-30.zip,1,CP,4041,5590,31,1,[5590]\r\nCP-24.zip,1,CP,705,3067,168,1,[3067]\r\nNormal-24.zip,0,Normal,2665,175,33,1,[175]\r\nNCP-12.zip,2,NCP,332,1813,70,2,\"[1812, 1813]\"\r\nCP-11.zip,1,CP,1444,3962,58,3,\"[3960, 3961, 3962]\"\r\nCP-22.zip,1,CP,614,2976,100,1,[2976]\r\nNormal-23.zip,0,Normal,2630,140,38,1,[140]\r\nNormal-8.zip,0,Normal,1876,331,97,1,[331]\r\nNCP-1.zip,2,NCP,1001,2559,141,1,[2559]\r\nNCP-22.zip,2,NCP,845,2361,148,4,\"[2360, 2361, 2362, 2363]\"\r\nNormal-1.zip,0,Normal,1676,818,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-26.zip,1,CP,3646,5606,36,1,[5606]\r\nNormal-9.zip,0,Normal,1907,362,92,1,[362]\r\nNormal-1.zip,0,Normal,1672,800,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-12.zip,2,NCP,333,1815,68,2,\"[1814, 1815]\"\r\nCP-11.zip,1,CP,1428,3925,56,3,\"[3923, 3924, 3925]\"\r\nCP-17.zip,1,CP,1634,4322,23,1,[4322]\r\nNormal-12.zip,0,Normal,2009,464,93,1,[464]\r\nCP-7.zip,1,CP,1270,3488,287,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-26.zip,1,CP,3731,5670,215,1,[5670]\r\nNormal-25.zip,0,Normal,3714,5344,22,1,[5344]\r\nNormal-19.zip,0,Normal,2231,686,85,1,[686]\r\nCP-7.zip,1,CP,1270,3500,160,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-23.zip,2,NCP,940,2483,22,1,[2483]\r\nNormal-25.zip,0,Normal,3851,5363,201,1,[5363]\r\nNCP-6.zip,2,NCP,209,1562,139,2,\"[1562, 1563]\"\r\nNCP-13.zip,2,NCP,347,1846,53,2,\"[1845, 1846]\"\r\nNCP-11.zip,2,NCP,312,1772,62,2,\"[1771, 1772]\"\r\nCP-5.zip,1,CP,1196,3414,186,1,[3414]\r\nNCP-21.zip,2,NCP,74,1282,54,2,\"[1281, 1282]\"\r\nCP-23.zip,1,CP,662,3024,114,1,[3024]\r\nNCP-7.zip,2,NCP,23,1177,151,2,\"[1177, 1178]\"\r\nCP-16.zip,1,CP,1591,4279,23,1,[4279]\r\nNormal-12.zip,0,Normal,1995,450,95,1,[450]\r\nNormal-20.zip,0,Normal,2264,719,82,1,[719]\r\nNCP-30.zip,2,NCP,948,2491,365,1,[2491]\r\nNormal-12.zip,0,Normal,1998,453,99,1,[453]\r\nNCP-19.zip,2,NCP,522,2201,58,2,\"[2200, 2201]\"\r\nCP-13.zip,1,CP,1510,4121,60,2,\"[4121, 4122]\"\r\nNCP-15.zip,2,NCP,406,1962,61,2,\"[1961, 1962]\"\r\nNCP-4.zip,2,NCP,162,1468,148,2,\"[1468, 1469]\"\r\nCP-11.zip,1,CP,1431,3930,61,2,\"[3930, 3931]\"\r\nCP-15.zip,1,CP,1569,4257,20,1,[4257]\r\nCP-9.zip,1,CP,1379,3813,52,2,\"[3812, 3813]\"\r\nNCP-30.zip,2,NCP,981,2525,40,2,\"[2525, 2526]\"\r\nNCP-8.zip,2,NCP,2679,2650,42,1,[2650]\r\nNCP-25.zip,2,NCP,3951,5465,43,1,[5465]\r\nNCP-7.zip,2,NCP,2460,2684,36,1,[2684]\r\nCP-25.zip,1,CP,734,3096,106,1,[3096]\r\nNCP-6.zip,2,NCP,209,1563,58,2,\"[1562, 1563]\"\r\nNormal-22.zip,0,Normal,2593,103,38,1,[103]\r\nNCP-16.zip,2,NCP,438,2029,149,2,\"[2029, 2030]\"\r\nCP-7.zip,1,CP,1304,3638,43,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNormal-8.zip,0,Normal,1885,340,101,1,[340]\r\nNCP-17.zip,2,NCP,484,2123,137,2,\"[2123, 2124]\"\r\nNCP-20.zip,2,NCP,565,2288,135,2,\"[2288, 2289]\"\r\nNCP-5.zip,2,NCP,185,1515,51,2,\"[1514, 1515]\"\r\nNCP-29.zip,2,NCP,877,2411,65,1,[2411]\r\nNCP-6.zip,2,NCP,216,1577,58,2,\"[1576, 1577]\"\r\nNormal-24.zip,0,Normal,2658,168,37,1,[168]\r\nCP-28.zip,1,CP,3779,5723,26,1,[5723]\r\nNormal-15.zip,0,Normal,2090,545,83,1,[545]\r\nNormal-2.zip,0,Normal,1750,1077,69,3,\"[1074, 1077, 1078]\"\r\nNCP-24.zip,2,NCP,98,1333,58,2,\"[1332, 1333]\"\r\nCP-5.zip,1,CP,1199,3417,180,1,[3417]\r\nCP-3.zip,1,CP,1146,3364,161,1,[3364]\r\nCP-11.zip,1,CP,1449,3971,50,2,\"[3971, 3972]\"\r\nNormal-3.zip,0,Normal,1767,1154,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-22.zip,0,Normal,2585,95,41,1,[95]\r\nCP-29.zip,1,CP,3816,5760,29,1,[5760]\r\nNCP-21.zip,2,NCP,62,1258,60,2,\"[1257, 1258]\"\r\nNCP-2.zip,2,NCP,1056,2632,473,1,[2632]\r\nNCP-19.zip,2,NCP,525,2206,144,2,\"[2206, 2207]\"\r\nNormal-22.zip,0,Normal,2600,110,41,1,[110]\r\nCP-3.zip,1,CP,1161,3379,310,1,[3379]\r\nNCP-12.zip,2,NCP,316,1779,139,2,\"[1779, 1780]\"\r\nNCP-28.zip,2,NCP,868,2396,200,2,\"[2395, 2396]\"\r\nCP-7.zip,1,CP,1301,3600,52,4,\"[3598, 3599, 3600, 3601]\"\r\nNCP-11.zip,2,NCP,301,1749,62,2,\"[1748, 1749]\"\r\nNormal-9.zip,0,Normal,1917,372,96,1,[372]\r\nNCP-20.zip,2,NCP,571,2300,163,2,\"[2300, 2301]\"\r\nNormal-3.zip,0,Normal,1767,1152,68,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-1.zip,0,Normal,1716,988,71,2,\"[987, 988]\"\r\nNCP-28.zip,2,NCP,842,2357,42,1,[2357]\r\nNCP-27.zip,2,NCP,309,1765,162,2,\"[1766, 1765]\"\r\nCP-12.zip,1,CP,1479,4040,60,3,\"[4039, 4040, 4041]\"\r\nNCP-6.zip,2,NCP,22,1175,163,2,\"[1175, 1176]\"\r\nNCP-28.zip,2,NCP,868,2395,51,2,\"[2395, 2396]\"\r\nCP-14.zip,1,CP,1532,4171,50,2,\"[4171, 4172]\"\r\nNormal-11.zip,0,Normal,1984,439,86,1,[439]\r\nNormal-24.zip,0,Normal,2643,153,39,1,[153]\r\nCP-20.zip,1,CP,2765,3296,42,1,[3296]\r\nNormal-2.zip,0,Normal,1763,1132,72,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-2.zip,2,NCP,109,1356,60,2,\"[1355, 1356]\"\r\nNCP-7.zip,2,NCP,241,1628,55,2,\"[1627, 1628]\"\r\nNormal-22.zip,0,Normal,2587,97,44,1,[97]\r\nCP-20.zip,1,CP,2753,3284,37,1,[3284]\r\nNormal-1.zip,0,Normal,1670,790,63,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-15.zip,0,Normal,2103,558,88,1,[558]\r\nCP-13.zip,1,CP,1503,4104,64,3,\"[4104, 4105, 4106]\"\r\nNormal-21.zip,0,Normal,2313,768,94,1,[768]\r\nCP-9.zip,1,CP,1382,3818,200,3,\"[3818, 3819, 3820]\"\r\nNormal-2.zip,0,Normal,1756,1102,64,4,\"[1101, 1102, 1103, 1104]\"\r\nNCP-12.zip,2,NCP,334,1816,140,2,\"[1816, 1817]\"\r\nCP-13.zip,1,CP,1518,4140,67,3,\"[4138, 4139, 4140]\"\r\nCP-13.zip,1,CP,1492,4077,139,3,\"[4077, 4078, 4079]\"\r\nNormal-11.zip,0,Normal,1982,437,99,1,[437]\r\nNCP-6.zip,2,NCP,213,1570,159,2,\"[1570, 1571]\"\r\nCP-18.zip,1,CP,1779,3551,59,2,\"[3551, 3552]\"\r\nNCP-12.zip,2,NCP,321,1790,122,2,\"[1790, 1791]\"\r\nNCP-4.zip,2,NCP,159,1462,144,2,\"[1462, 1463]\"\r\nCP-24.zip,1,CP,684,3046,161,1,[3046]\r\nCP-29.zip,1,CP,3828,5772,26,1,[5772]\r\nNormal-1.zip,0,Normal,1669,784,196,5,\"[782, 783, 784, 785, 786]\"\r\nCP-12.zip,1,CP,1462,4004,51,3,\"[4002, 4003, 4004]\"\r\nNormal-1.zip,0,Normal,1707,969,65,2,\"[969, 970]\"\r\nCP-24.zip,1,CP,685,3047,168,1,[3047]\r\nNCP-16.zip,2,NCP,444,2043,61,2,\"[2042, 2043]\"\r\nCP-19.zip,1,CP,2430,2892,106,2,\"[2891, 2892]\"\r\nNormal-25.zip,0,Normal,3857,5369,222,1,[5369]\r\nCP-28.zip,1,CP,3774,5718,20,1,[5718]\r\nCP-21.zip,1,CP,591,2953,124,1,[2953]\r\nNormal-1.zip,0,Normal,1669,783,62,5,\"[782, 783, 784, 785, 786]\"\r\nNormal-1.zip,0,Normal,1670,792,66,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNCP-14.zip,2,NCP,387,1925,54,2,\"[1924, 1925]\"\r\nCP-10.zip,1,CP,14,3515,115,1,[3515]\r\nNCP-4.zip,2,NCP,16,1164,113,2,\"[1164, 1165]\"\r\nNormal-17.zip,0,Normal,2162,617,96,1,[617]\r\nCP-13.zip,1,CP,1513,4127,60,2,\"[4127, 4128]\"\r\nNCP-11.zip,2,NCP,300,1746,139,2,\"[1746, 1747]\"\r\nNCP-21.zip,2,NCP,577,2312,61,2,\"[2311, 2312]\"\r\nNormal-8.zip,0,Normal,1875,330,93,1,[330]\r\nNormal-27.zip,0,Normal,3906,5439,62,1,[5439]\r\nNCP-7.zip,2,NCP,249,1645,58,2,\"[1644, 1645]\"\r\nNCP-20.zip,2,NCP,552,2262,61,2,\"[2261, 2262]\"\r\nNCP-9.zip,2,NCP,2701,2667,56,1,[2667]\r\nNCP-15.zip,2,NCP,417,1987,139,2,\"[1987, 1988]\"\r\nCP-18.zip,1,CP,1782,3586,69,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-9.zip,2,NCP,2705,2671,56,1,[2671]\r\nNormal-3.zip,0,Normal,1767,1160,71,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-15.zip,1,CP,1585,4273,23,1,[4273]\r\nCP-27.zip,1,CP,3742,5686,17,1,[5686]\r\nCP-14.zip,1,CP,1521,4146,57,2,\"[4146, 4147]\"\r\nNormal-1.zip,0,Normal,1703,960,70,2,\"[959, 960]\"\r\nCP-21.zip,1,CP,6,3510,36,1,[3510]\r\nNCP-19.zip,2,NCP,54,1242,62,2,\"[1241, 1242]\"\r\nNCP-5.zip,2,NCP,17,1166,143,2,\"[1166, 1167]\"\r\nNCP-15.zip,2,NCP,413,1977,47,4,\"[1975, 1976, 1977, 1979]\"\r\nNCP-22.zip,2,NCP,845,2360,53,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-2.zip,2,NCP,120,1381,139,2,\"[1381, 1382]\"\r\nCP-5.zip,1,CP,1207,3425,189,1,[3425]\r\nCP-27.zip,1,CP,3758,5702,23,1,[5702]\r\nCP-16.zip,1,CP,1592,4280,25,1,[4280]\r\nCP-21.zip,1,CP,4,3506,275,4,\"[3505, 3506, 3507, 3508]\"\r\nNCP-21.zip,2,NCP,72,1277,55,2,\"[1276, 1277]\"\r\nNCP-17.zip,2,NCP,475,2105,156,2,\"[2105, 2106]\"\r\nNCP-13.zip,2,NCP,358,1868,67,2,\"[1867, 1868]\"\r\nNormal-3.zip,0,Normal,764,199,130,1,[199]\r\nNormal-1.zip,0,Normal,1721,998,68,4,\"[1000, 997, 998, 999]\"\r\nCP-9.zip,1,CP,1358,3763,63,3,\"[3761, 3762, 3763]\"\r\nNormal-1.zip,0,Normal,1676,817,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNCP-4.zip,2,NCP,169,1483,56,2,\"[1482, 1483]\"\r\nNormal-1.zip,0,Normal,1707,970,65,2,\"[969, 970]\"\r\nNCP-18.zip,2,NCP,502,2160,140,2,\"[2160, 2161]\"\r\nCP-18.zip,1,CP,1781,3568,67,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,2727,2683,44,1,[2683]\r\nCP-26.zip,1,CP,3719,5651,277,3,\"[5649, 5650, 5651]\"\r\nCP-11.zip,1,CP,1422,3910,58,3,\"[3908, 3909, 3910]\"\r\nNCP-4.zip,2,NCP,168,1480,139,2,\"[1480, 1481]\"\r\nCP-8.zip,1,CP,1329,3695,89,3,\"[3695, 3696, 3697]\"\r\nCP-12.zip,1,CP,1463,4005,49,2,\"[4005, 4006]\"\r\nNormal-27.zip,0,Normal,3915,5458,70,1,[5458]\r\nNormal-18.zip,0,Normal,2209,664,82,1,[664]\r\nCP-13.zip,1,CP,1492,4079,58,3,\"[4077, 4078, 4079]\"\r\nCP-30.zip,1,CP,3830,5774,29,1,[5774]\r\nCP-8.zip,1,CP,1329,3696,45,3,\"[3695, 3696, 3697]\"\r\nNormal-16.zip,0,Normal,2139,594,87,1,[594]\r\nNCP-14.zip,2,NCP,393,1936,149,2,\"[1936, 1937]\"\r\nCP-21.zip,1,CP,4,3508,290,4,\"[3505, 3506, 3507, 3508]\"\r\nNormal-2.zip,0,Normal,1737,1037,79,4,\"[1037, 1038, 1039, 1040]\"\r\nNCP-25.zip,2,NCP,3708,5535,59,1,[5535]\r\nCP-14.zip,1,CP,1540,4193,58,3,\"[4191, 4192, 4193]\"\r\nCP-7.zip,1,CP,1301,3601,276,4,\"[3598, 3599, 3600, 3601]\"\r\nNCP-7.zip,2,NCP,249,1644,139,2,\"[1644, 1645]\"\r\nNCP-12.zip,2,NCP,339,1827,51,2,\"[1826, 1827]\"\r\nNCP-2.zip,2,NCP,1275,2716,68,1,[2716]\r\nNCP-13.zip,2,NCP,354,1860,73,2,\"[1859, 1860]\"\r\nNormal-2.zip,0,Normal,1757,1105,71,4,\"[1105, 1106, 1107, 1108]\"\r\nNCP-27.zip,2,NCP,1016,2582,108,3,\"[2580, 2581, 2582]\"\r\nCP-18.zip,1,CP,1777,3541,62,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-1.zip,2,NCP,1008,2569,387,1,[2569]\r\nCP-7.zip,1,CP,1315,3665,59,2,\"[3665, 3666]\"\r\nCP-27.zip,1,CP,3737,5681,17,1,[5681]\r\nNormal-9.zip,0,Normal,1914,369,88,1,[369]\r\nNormal-1.zip,0,Normal,1672,802,75,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-8.zip,2,NCP,25,1181,129,2,\"[1181, 1183]\"\r\nCP-19.zip,1,CP,1789,3207,64,4,\"[3204, 3205, 3206, 3207]\"\r\nCP-11.zip,1,CP,1444,3960,139,3,\"[3960, 3961, 3962]\"\r\nNCP-4.zip,2,NCP,145,1435,58,2,\"[1434, 1435]\"\r\nNormal-1.zip,0,Normal,1676,821,72,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nCP-23.zip,1,CP,659,3021,594,1,[3021]\r\nNormal-25.zip,0,Normal,3716,5346,31,1,[5346]\r\nNormal-10.zip,0,Normal,1936,391,82,1,[391]\r\nNCP-22.zip,2,NCP,821,2331,30,1,[2331]\r\nCP-13.zip,1,CP,1505,4108,54,3,\"[4108, 4109, 4110]\"\r\nNCP-15.zip,2,NCP,411,1972,62,2,\"[1971, 1972]\"\r\nCP-7.zip,1,CP,1304,3633,18,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-27.zip,2,NCP,1048,2621,44,2,\"[2620, 2621]\"\r\nCP-21.zip,1,CP,595,2957,306,1,[2957]\r\nNCP-22.zip,2,NCP,861,2384,197,1,[2384]\r\nCP-7.zip,1,CP,1302,3604,39,4,\"[3602, 3603, 3604, 3605]\"\r\nNCP-17.zip,2,NCP,472,2099,151,2,\"[2099, 2100]\"\r\nNCP-8.zip,2,NCP,26,1185,36,2,\"[1184, 1185]\"\r\nNormal-1.zip,0,Normal,1676,819,65,7,\"[816, 817, 818, 819, 820, 821, 822]\"\r\nNormal-27.zip,0,Normal,3903,5435,75,1,[5435]\r\nNormal-25.zip,0,Normal,3840,5352,210,1,[5352]\r\nNCP-8.zip,2,NCP,266,1679,58,2,\"[1678, 1679]\"\r\nNormal-16.zip,0,Normal,2120,575,84,1,[575]\r\nNCP-22.zip,2,NCP,82,1297,129,2,\"[1297, 1298]\"\r\nNormal-16.zip,0,Normal,2128,583,76,1,[583]\r\nNormal-1.zip,0,Normal,1718,992,66,2,\"[991, 992]\"\r\nCP-11.zip,1,CP,1449,3972,50,2,\"[3971, 3972]\"\r\nCP-7.zip,1,CP,1304,3636,47,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNormal-22.zip,0,Normal,2597,107,41,1,[107]\r\nNCP-10.zip,2,NCP,2726,2682,50,1,[2682]\r\nNormal-7.zip,0,Normal,1849,304,87,1,[304]\r\nNormal-13.zip,0,Normal,2040,495,95,1,[495]\r\nNormal-16.zip,0,Normal,2125,580,83,1,[580]\r\nCP-25.zip,1,CP,740,3102,193,1,[3102]\r\nNCP-22.zip,2,NCP,871,2401,281,2,\"[2401, 2402]\"\r\nNCP-9.zip,2,NCP,2704,2670,56,1,[2670]\r\nNCP-12.zip,2,NCP,33,1198,147,2,\"[1198, 1199]\"\r\nCP-18.zip,1,CP,1663,4351,26,1,[4351]\r\nNormal-3.zip,0,Normal,1767,1157,28,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-2.zip,0,Normal,1735,1031,76,2,\"[1030, 1031]\"\r\nNormal-10.zip,0,Normal,1938,393,66,1,[393]\r\nNCP-24.zip,2,NCP,975,2518,484,1,[2518]\r\nCP-18.zip,1,CP,1774,3523,65,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-14.zip,2,NCP,381,1914,143,2,\"[1914, 1915]\"\r\nNCP-12.zip,2,NCP,33,1199,62,2,\"[1198, 1199]\"\r\nNCP-13.zip,2,NCP,352,1855,138,2,\"[1855, 1856]\"\r\nNCP-12.zip,2,NCP,333,1814,162,2,\"[1814, 1815]\"\r\nNCP-23.zip,2,NCP,904,2446,667,1,[2446]\r\nNCP-24.zip,2,NCP,985,2531,508,1,[2531]\r\nNCP-6.zip,2,NCP,228,1600,161,2,\"[1600, 1601]\"\r\nNCP-15.zip,2,NCP,414,1980,121,2,\"[1980, 1981]\"\r\nNCP-1.zip,2,NCP,103,1344,63,2,\"[1343, 1344]\"\r\nNormal-3.zip,0,Normal,1767,1155,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-8.zip,1,CP,1349,3744,58,3,\"[3742, 3743, 3744]\"\r\nNCP-8.zip,2,NCP,261,1669,65,2,\"[1668, 1669]\"\r\nNormal-21.zip,0,Normal,2300,755,98,1,[755]\r\nNCP-13.zip,2,NCP,354,1859,177,2,\"[1859, 1860]\"\r\nCP-23.zip,1,CP,665,3027,116,1,[3027]\r\nCP-15.zip,1,CP,1561,4242,49,2,\"[4241, 4242]\"\r\nCP-9.zip,1,CP,1376,3806,60,2,\"[3806, 3807]\"\r\nNormal-1.zip,0,Normal,1727,1012,66,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-28.zip,2,NCP,835,2349,46,2,\"[2349, 2350]\"\r\nCP-8.zip,1,CP,1349,3742,142,3,\"[3742, 3743, 3744]\"\r\nNormal-20.zip,0,Normal,2277,732,95,1,[732]\r\nNCP-28.zip,2,NCP,876,2409,52,1,[2409]\r\nNormal-15.zip,0,Normal,2101,556,85,1,[556]\r\nCP-11.zip,1,CP,1444,3961,58,3,\"[3960, 3961, 3962]\"\r\nCP-7.zip,1,CP,1270,3496,154,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-2.zip,2,NCP,1276,2717,61,1,[2717]\r\nNormal-3.zip,0,Normal,1767,1153,68,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-13.zip,0,Normal,2051,506,86,1,[506]\r\nNormal-2.zip,0,Normal,1734,1029,66,2,\"[1028, 1029]\"\r\nNormal-26.zip,0,Normal,3871,5383,22,1,[5383]\r\nNCP-1.zip,2,NCP,1009,2570,39,2,\"[2570, 2571]\"\r\nNormal-2.zip,0,Normal,1763,1139,65,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-13.zip,2,NCP,359,1870,61,2,\"[1869, 1870]\"\r\nNormal-19.zip,0,Normal,2220,675,78,1,[675]\r\nCP-9.zip,1,CP,1382,3819,60,3,\"[3818, 3819, 3820]\"\r\nCP-20.zip,1,CP,2752,3283,26,1,[3283]\r\nNCP-5.zip,2,NCP,181,1506,139,2,\"[1506, 1507]\"\r\nCP-13.zip,1,CP,1510,4122,60,2,\"[4121, 4122]\"\r\nNCP-16.zip,2,NCP,440,2034,125,2,\"[2034, 2035]\"\r\nCP-12.zip,1,CP,1458,3994,69,3,\"[3992, 3993, 3994]\"\r\nCP-7.zip,1,CP,1270,3492,137,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-11.zip,2,NCP,284,1714,58,2,\"[1713, 1714]\"\r\nNCP-11.zip,2,NCP,303,1753,58,2,\"[1752, 1753]\"\r\nNCP-6.zip,2,NCP,205,1554,126,2,\"[1554, 1555]\"\r\nCP-14.zip,1,CP,1535,4179,53,2,\"[4178, 4179]\"\r\nNormal-27.zip,0,Normal,3910,5446,66,2,\"[5445, 5446]\"\r\nNCP-13.zip,2,NCP,357,1865,150,2,\"[1865, 1866]\"\r\nNormal-3.zip,0,Normal,742,177,107,1,[177]\r\nNormal-22.zip,0,Normal,2589,99,37,1,[99]\r\nNCP-22.zip,2,NCP,88,1310,71,2,\"[1309, 1310]\"\r\nCP-14.zip,1,CP,1521,4147,57,2,\"[4146, 4147]\"\r\nCP-26.zip,1,CP,3729,5666,179,3,\"[5665, 5666, 5667]\"\r\nCP-28.zip,1,CP,3793,5737,29,1,[5737]\r\nNormal-3.zip,0,Normal,767,202,358,1,[202]\r\nNCP-5.zip,2,NCP,198,1540,144,2,\"[1540, 1541]\"\r\nCP-27.zip,1,CP,3738,5682,19,1,[5682]\r\nCP-27.zip,1,CP,3750,5694,28,1,[5694]\r\nCP-10.zip,1,CP,1416,3898,58,2,\"[3897, 3898]\"\r\nCP-8.zip,1,CP,1322,3680,56,2,\"[3680, 3681]\"\r\nNormal-23.zip,0,Normal,2607,117,38,1,[117]\r\nNCP-3.zip,2,NCP,138,1420,124,2,\"[1420, 1421]\"\r\nCP-11.zip,1,CP,1425,3916,185,3,\"[3916, 3917, 3918]\"\r\nCP-15.zip,1,CP,1581,4269,19,1,[4269]\r\nCP-24.zip,1,CP,706,3068,124,1,[3068]\r\nCP-18.zip,1,CP,1666,4354,23,1,[4354]\r\nNCP-4.zip,2,NCP,161,1466,135,2,\"[1466, 1467]\"\r\nNormal-7.zip,0,Normal,1847,302,102,1,[302]\r\nCP-11.zip,1,CP,1443,3959,58,3,\"[3957, 3958, 3959]\"\r\nCP-19.zip,1,CP,1784,3593,69,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-21.zip,1,CP,605,2967,157,1,[2967]\r\nCP-5.zip,1,CP,12,3168,291,2,\"[3168, 3169]\"\r\nNormal-9.zip,0,Normal,1909,364,102,1,[364]\r\nNCP-22.zip,2,NCP,850,2369,52,1,[2369]\r\nCP-24.zip,1,CP,687,3049,135,1,[3049]\r\nNCP-1.zip,2,NCP,1033,2604,39,1,[2604]\r\nNormal-2.zip,0,Normal,1750,1074,65,3,\"[1074, 1077, 1078]\"\r\nCP-9.zip,1,CP,1365,3779,200,3,\"[3779, 3780, 3781]\"\r\nNCP-18.zip,2,NCP,502,2161,59,2,\"[2160, 2161]\"\r\nNormal-3.zip,0,Normal,1767,1162,76,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-1.zip,0,Normal,1672,799,78,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNormal-3.zip,0,Normal,747,182,100,1,[182]\r\nNCP-12.zip,2,NCP,319,1787,66,2,\"[1785, 1787]\"\r\nNCP-15.zip,2,NCP,405,1959,143,2,\"[1959, 1960]\"\r\nCP-18.zip,1,CP,1781,3574,64,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-21.zip,1,CP,600,2962,202,1,[2962]\r\nCP-12.zip,1,CP,1479,4039,60,3,\"[4039, 4040, 4041]\"\r\nNCP-27.zip,2,NCP,827,2340,173,1,[2340]\r\nNCP-24.zip,2,NCP,983,2528,67,1,[2528]\r\nNormal-1.zip,0,Normal,1669,786,54,5,\"[782, 783, 784, 785, 786]\"\r\nCP-11.zip,1,CP,1424,3915,60,2,\"[3914, 3915]\"\r\nCP-2.zip,1,CP,1105,3323,220,1,[3323]\r\nCP-10.zip,1,CP,1412,3888,66,2,\"[3887, 3888]\"\r\nNCP-18.zip,2,NCP,495,2147,65,2,\"[2146, 2147]\"\r\nNCP-3.zip,2,NCP,134,1412,128,2,\"[1412, 1413]\"\r\nNormal-10.zip,0,Normal,1940,395,74,1,[395]\r\nNormal-17.zip,0,Normal,2163,618,89,1,[618]\r\nCP-9.zip,1,CP,1358,3761,249,3,\"[3761, 3762, 3763]\"\r\nCP-23.zip,1,CP,658,3020,273,1,[3020]\r\nNCP-12.zip,2,NCP,341,1830,129,3,\"[1830, 1832, 1834]\"\r\nCP-14.zip,1,CP,1520,4145,57,3,\"[4143, 4144, 4145]\"\r\nCP-19.zip,1,CP,1783,3588,62,2,\"[3588, 3589]\"\r\nNormal-3.zip,0,Normal,1767,1158,66,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-7.zip,1,CP,1301,3598,55,4,\"[3598, 3599, 3600, 3601]\"\r\nNormal-6.zip,0,Normal,1810,265,85,1,[265]\r\nNCP-12.zip,2,NCP,321,1791,51,2,\"[1790, 1791]\"\r\nNCP-12.zip,2,NCP,341,1834,54,3,\"[1830, 1832, 1834]\"\r\nNCP-6.zip,2,NCP,210,1564,131,2,\"[1564, 1565]\"\r\nCP-11.zip,1,CP,1435,3938,46,2,\"[3938, 3939]\"\r\nNormal-26.zip,0,Normal,3876,5388,30,1,[5388]\r\nNormal-16.zip,0,Normal,2123,578,90,1,[578]\r\nNormal-6.zip,0,Normal,1816,271,76,1,[271]\r\nNCP-26.zip,2,NCP,3992,5516,48,1,[5516]\r\nCP-18.zip,1,CP,1777,3544,66,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-5.zip,2,NCP,173,1491,58,2,\"[1490, 1491]\"\r\nNCP-11.zip,2,NCP,312,1771,148,2,\"[1771, 1772]\"\r\nNCP-19.zip,2,NCP,525,2207,61,2,\"[2206, 2207]\"\r\nNormal-3.zip,0,Normal,752,187,103,1,[187]\r\nNCP-7.zip,2,NCP,23,1178,63,2,\"[1177, 1178]\"\r\nCP-27.zip,1,CP,3762,5706,26,1,[5706]\r\nCP-18.zip,1,CP,1659,4347,26,1,[4347]\r\nCP-20.zip,1,CP,2667,3248,46,3,\"[3246, 3247, 3248]\"\r\nNormal-24.zip,0,Normal,2653,163,39,1,[163]\r\nNormal-4.zip,0,Normal,801,236,107,1,[236]\r\nNormal-20.zip,0,Normal,2272,727,79,1,[727]\r\nNCP-30.zip,2,NCP,988,2539,56,2,\"[2538, 2539]\"\r\nCP-18.zip,1,CP,1774,3527,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-17.zip,0,Normal,2165,620,95,1,[620]\r\nCP-12.zip,1,CP,1479,4041,60,3,\"[4039, 4040, 4041]\"\r\nNormal-21.zip,0,Normal,2299,754,90,1,[754]\r\nCP-22.zip,1,CP,637,2999,118,1,[2999]\r\nNCP-6.zip,2,NCP,217,1578,139,2,\"[1578, 1579]\"\r\nCP-30.zip,1,CP,3919,5544,73,4,\"[5543, 5544, 5545, 5546]\"\r\nCP-13.zip,1,CP,1511,4123,57,2,\"[4123, 4124]\"\r\nNormal-13.zip,0,Normal,2035,490,82,1,[490]\r\nCP-19.zip,1,CP,1794,3594,38,2,\"[3594, 3595]\"\r\nNCP-15.zip,2,NCP,410,1970,60,2,\"[1969, 1970]\"\r\nCP-10.zip,1,CP,1417,3899,59,1,[3899]\r\nNCP-8.zip,2,NCP,261,1668,155,2,\"[1668, 1669]\"\r\nCP-20.zip,1,CP,2667,3247,92,3,\"[3246, 3247, 3248]\"\r\nCP-26.zip,1,CP,3636,5595,290,1,[5595]\r\nNormal-2.zip,0,Normal,1763,1136,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-9.zip,0,Normal,1913,368,88,1,[368]\r\nCP-9.zip,1,CP,1375,3805,58,2,\"[3804, 3805]\"\r\nCP-16.zip,1,CP,1606,4294,26,1,[4294]\r\nCP-18.zip,1,CP,1777,3543,68,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNormal-21.zip,0,Normal,2287,742,77,1,[742]\r\nCP-11.zip,1,CP,1422,3908,140,3,\"[3908, 3909, 3910]\"\r\nNCP-22.zip,2,NCP,859,2381,268,2,\"[2380, 2381]\"\r\nNormal-24.zip,0,Normal,2645,155,38,1,[155]\r\nCP-7.zip,1,CP,1302,3605,201,4,\"[3602, 3603, 3604, 3605]\"\r\nCP-23.zip,1,CP,646,3008,128,1,[3008]\r\nCP-11.zip,1,CP,1425,3918,49,3,\"[3916, 3917, 3918]\"\r\nCP-18.zip,1,CP,1781,3569,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-16.zip,2,NCP,436,2025,146,2,\"[2025, 2026]\"\r\nNCP-18.zip,2,NCP,503,2163,61,2,\"[2162, 2163]\"\r\nNCP-4.zip,2,NCP,167,1478,143,2,\"[1478, 1479]\"\r\nNormal-26.zip,0,Normal,3880,5392,32,1,[5392]\r\nNCP-25.zip,2,NCP,3709,5536,65,1,[5536]\r\nNormal-2.zip,0,Normal,1734,1028,66,2,\"[1028, 1029]\"\r\nNormal-17.zip,0,Normal,2169,624,92,1,[624]\r\nNCP-20.zip,2,NCP,546,2249,134,2,\"[2249, 2250]\"\r\nNCP-14.zip,2,NCP,379,1910,147,2,\"[1910, 1911]\"\r\nNCP-4.zip,2,NCP,146,1437,52,2,\"[1436, 1437]\"\r\nNCP-26.zip,2,NCP,3995,5493,47,1,[5493]\r\nCP-20.zip,1,CP,2763,3294,119,1,[3294]\r\nNCP-13.zip,2,NCP,349,1850,57,2,\"[1849, 1850]\"\r\nCP-26.zip,1,CP,3644,5604,284,1,[5604]\r\nCP-8.zip,1,CP,1327,3690,253,3,\"[3690, 3691, 3692]\"\r\nCP-20.zip,1,CP,2770,3301,38,1,[3301]\r\nCP-12.zip,1,CP,1471,4023,55,2,\"[4022, 4023]\"\r\nNormal-27.zip,0,Normal,3912,5453,68,1,[5453]\r\nNCP-23.zip,2,NCP,93,1322,157,2,\"[1322, 1323]\"\r\nCP-18.zip,1,CP,1781,3576,64,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-13.zip,2,NCP,347,1845,126,2,\"[1845, 1846]\"\r\nCP-20.zip,1,CP,2454,2935,120,2,\"[2935, 2936]\"\r\nNormal-1.zip,0,Normal,1670,788,58,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-8.zip,0,Normal,1880,335,83,1,[335]\r\nNormal-10.zip,0,Normal,1937,392,90,1,[392]\r\nCP-20.zip,1,CP,2768,3299,38,1,[3299]\r\nNormal-18.zip,0,Normal,2212,667,89,1,[667]\r\nNormal-1.zip,0,Normal,1677,826,65,4,\"[823, 824, 825, 826]\"\r\nCP-26.zip,1,CP,3721,5654,43,2,\"[5654, 5655]\"\r\nNCP-16.zip,2,NCP,439,2033,66,2,\"[2032, 2033]\"\r\nNormal-13.zip,0,Normal,2031,486,81,1,[486]\r\nCP-19.zip,1,CP,1783,3589,62,2,\"[3588, 3589]\"\r\nCP-2.zip,1,CP,1121,3339,156,1,[3339]\r\nCP-22.zip,1,CP,612,2974,84,1,[2974]\r\nNormal-26.zip,0,Normal,3867,5379,29,1,[5379]\r\nNCP-1.zip,2,NCP,102,1342,56,2,\"[1341, 1342]\"\r\nNCP-18.zip,2,NCP,493,2142,133,2,\"[2142, 2143]\"\r\nNCP-12.zip,2,NCP,339,1826,120,2,\"[1826, 1827]\"\r\nNormal-14.zip,0,Normal,2085,540,95,1,[540]\r\nNCP-27.zip,2,NCP,238,1622,57,2,\"[1621, 1622]\"\r\nNormal-2.zip,0,Normal,1737,1039,80,4,\"[1037, 1038, 1039, 1040]\"\r\nCP-30.zip,1,CP,3919,5546,70,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-1.zip,2,NCP,1012,2576,249,1,[2576]\r\nNCP-17.zip,2,NCP,463,2080,144,2,\"[2080, 2081]\"\r\nNCP-2.zip,2,NCP,127,1400,58,2,\"[1399, 1400]\"\r\nNormal-21.zip,0,Normal,2291,746,96,1,[746]\r\nNCP-8.zip,2,NCP,25,1183,45,2,\"[1181, 1183]\"\r\nCP-9.zip,1,CP,1382,3820,60,3,\"[3818, 3819, 3820]\"\r\nNCP-30.zip,2,NCP,967,2510,168,1,[2510]\r\nNormal-27.zip,0,Normal,3910,5445,66,2,\"[5445, 5446]\"\r\nNCP-4.zip,2,NCP,156,1456,138,2,\"[1456, 1457]\"\r\nCP-12.zip,1,CP,1464,4007,63,2,\"[4007, 4008]\"\r\nNCP-4.zip,2,NCP,162,1469,62,2,\"[1468, 1469]\"\r\nCP-13.zip,1,CP,1493,4081,53,3,\"[4080, 4081, 4082]\"\r\nCP-16.zip,1,CP,1602,4290,17,1,[4290]\r\nNCP-6.zip,2,NCP,216,1576,139,2,\"[1576, 1577]\"\r\nCP-25.zip,1,CP,723,3085,104,1,[3085]\r\nNCP-15.zip,2,NCP,411,1971,149,2,\"[1971, 1972]\"\r\nNCP-15.zip,2,NCP,425,2003,139,2,\"[2003, 2004]\"\r\nCP-7.zip,1,CP,1270,3491,142,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-24.zip,1,CP,688,3050,127,1,[3050]\r\nNormal-13.zip,0,Normal,2033,488,77,1,[488]\r\nNCP-23.zip,2,NCP,96,1329,61,2,\"[1328, 1329]\"\r\nNormal-5.zip,0,Normal,803,238,343,1,[238]\r\nCP-16.zip,1,CP,1595,4283,23,1,[4283]\r\nNCP-27.zip,2,NCP,238,1621,134,2,\"[1621, 1622]\"\r\nNCP-19.zip,2,NCP,529,2214,141,3,\"[2214, 2215, 2217]\"\r\nCP-25.zip,1,CP,710,3072,78,1,[3072]\r\nNormal-19.zip,0,Normal,2243,698,86,1,[698]\r\nCP-11.zip,1,CP,1440,3949,51,3,\"[3948, 3949, 3950]\"\r\nCP-7.zip,1,CP,1260,3478,235,1,[3478]\r\nNormal-1.zip,0,Normal,1672,797,76,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-26.zip,1,CP,3719,5649,52,3,\"[5649, 5650, 5651]\"\r\nNCP-23.zip,2,NCP,969,2512,68,1,[2512]\r\nNCP-5.zip,2,NCP,186,1516,113,2,\"[1516, 1517]\"\r\nCP-13.zip,1,CP,1507,4114,62,2,\"[4113, 4114]\"\r\nCP-19.zip,1,CP,2443,2916,310,3,\"[2915, 2916, 2917]\"\r\nCP-13.zip,1,CP,1503,4105,64,3,\"[4104, 4105, 4106]\"\r\nNormal-10.zip,0,Normal,1934,389,85,1,[389]\r\nCP-20.zip,1,CP,2760,3291,281,1,[3291]\r\nNormal-19.zip,0,Normal,2242,697,86,1,[697]\r\nNCP-22.zip,2,NCP,864,2388,214,2,\"[2388, 2389]\"\r\nNCP-14.zip,2,NCP,377,1906,147,2,\"[1906, 1907]\"\r\nCP-29.zip,1,CP,3818,5762,29,1,[5762]\r\nCP-23.zip,1,CP,676,3038,291,1,[3038]\r\nNCP-14.zip,2,NCP,389,1928,150,2,\"[1928, 1929]\"\r\nCP-27.zip,1,CP,3761,5705,16,1,[5705]\r\nNCP-27.zip,2,NCP,1016,2581,179,3,\"[2580, 2581, 2582]\"\r\nNormal-22.zip,0,Normal,2321,776,90,1,[776]\r\nCP-7.zip,1,CP,1304,3639,212,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-16.zip,2,NCP,438,2030,62,2,\"[2029, 2030]\"\r\nNCP-2.zip,2,NCP,107,1352,61,2,\"[1351, 1352]\"\r\nNCP-11.zip,2,NCP,295,1736,97,2,\"[1735, 1736]\"\r\nCP-2.zip,1,CP,1122,3340,229,1,[3340]\r\nNormal-25.zip,0,Normal,3849,5361,205,1,[5361]\r\nCP-4.zip,1,CP,1189,3407,284,1,[3407]\r\nNormal-1.zip,0,Normal,1721,999,75,4,\"[1000, 997, 998, 999]\"\r\nNCP-4.zip,2,NCP,152,1449,61,2,\"[1448, 1449]\"\r\nNormal-13.zip,0,Normal,2044,499,103,1,[499]\r\nNormal-2.zip,0,Normal,1756,1103,65,4,\"[1101, 1102, 1103, 1104]\"\r\nCP-9.zip,1,CP,1379,3812,52,2,\"[3812, 3813]\"\r\nCP-20.zip,1,CP,2454,2936,116,2,\"[2935, 2936]\"\r\nNCP-3.zip,2,NCP,1294,2735,62,1,[2735]\r\nCP-6.zip,1,CP,1230,3448,37,1,[3448]\r\nCP-14.zip,1,CP,1540,4191,221,3,\"[4191, 4192, 4193]\"\r\nNormal-5.zip,0,Normal,815,250,120,1,[250]\r\nCP-13.zip,1,CP,1488,4066,66,3,\"[4064, 4065, 4066]\"\r\nNCP-7.zip,2,NCP,241,1627,131,2,\"[1627, 1628]\"\r\nNCP-6.zip,2,NCP,220,1584,160,2,\"[1584, 1585]\"\r\nNCP-30.zip,2,NCP,982,2527,242,1,[2527]\r\nNormal-2.zip,0,Normal,1735,1030,76,2,\"[1030, 1031]\"\r\nCP-12.zip,1,CP,1486,4059,63,2,\"[4059, 4060]\"\r\nCP-18.zip,1,CP,1781,3573,65,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-26.zip,1,CP,3642,5601,29,1,[5601]\r\nNCP-5.zip,2,NCP,186,1517,48,2,\"[1516, 1517]\"\r\nNormal-7.zip,0,Normal,1846,301,105,1,[301]\r\nCP-6.zip,1,CP,1252,3470,180,1,[3470]\r\nNCP-8.zip,2,NCP,254,1655,58,2,\"[1654, 1655]\"\r\nNCP-17.zip,2,NCP,460,2075,45,2,\"[2074, 2075]\"\r\nNCP-3.zip,2,NCP,138,1421,52,2,\"[1420, 1421]\"\r\nCP-29.zip,1,CP,3798,5742,21,1,[5742]\r\nNCP-14.zip,2,NCP,389,1929,63,2,\"[1928, 1929]\"\r\nNCP-22.zip,2,NCP,858,2379,52,1,[2379]\r\nNCP-10.zip,2,NCP,2721,2677,37,1,[2677]\r\nNCP-29.zip,2,NCP,882,2418,257,2,\"[2417, 2418]\"\r\nNCP-18.zip,2,NCP,495,2146,156,2,\"[2146, 2147]\"\r\nNormal-18.zip,0,Normal,2210,665,88,1,[665]\r\nCP-7.zip,1,CP,1304,3632,18,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nNCP-18.zip,2,NCP,512,2180,149,2,\"[2180, 2181]\"\r\nNormal-1.zip,0,Normal,1672,803,75,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nCP-21.zip,1,CP,2774,3305,31,1,[3305]\r\nCP-9.zip,1,CP,1372,3797,193,3,\"[3797, 3798, 3799]\"\r\nCP-22.zip,1,CP,615,2977,104,1,[2977]\r\nCP-12.zip,1,CP,1469,4019,47,2,\"[4018, 4019]\"\r\nCP-18.zip,1,CP,1774,3522,65,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-17.zip,2,NCP,472,2100,63,2,\"[2099, 2100]\"\r\nNormal-14.zip,0,Normal,2069,524,81,1,[524]\r\nCP-18.zip,1,CP,1774,3529,58,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNCP-27.zip,2,NCP,1031,2601,216,2,\"[2601, 2602]\"\r\nNCP-22.zip,2,NCP,857,2378,53,1,[2378]\r\nNormal-3.zip,0,Normal,1767,1156,139,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-24.zip,0,Normal,2641,151,41,1,[151]\r\nNCP-9.zip,2,NCP,2696,2662,44,1,[2662]\r\nCP-17.zip,1,CP,1620,4308,24,1,[4308]\r\nNCP-4.zip,2,NCP,149,1443,66,2,\"[1442, 1443]\"\r\nCP-13.zip,1,CP,1488,4064,158,3,\"[4064, 4065, 4066]\"\r\nNormal-22.zip,0,Normal,2315,770,82,1,[770]\r\nNCP-12.zip,2,NCP,316,1780,58,2,\"[1779, 1780]\"\r\nCP-9.zip,1,CP,1360,3767,67,3,\"[3767, 3768, 3769]\"\r\nNCP-18.zip,2,NCP,512,2181,62,2,\"[2180, 2181]\"\r\nNCP-20.zip,2,NCP,547,2252,66,2,\"[2251, 2252]\"\r\nNormal-10.zip,0,Normal,1942,397,81,1,[397]\r\nNCP-5.zip,2,NCP,198,1541,60,2,\"[1540, 1541]\"\r\nNCP-6.zip,2,NCP,199,1542,138,2,\"[1542, 1543]\"\r\nCP-14.zip,1,CP,1528,4164,61,2,\"[4163, 4164]\"\r\nCP-17.zip,1,CP,1631,4319,23,1,[4319]\r\nNCP-13.zip,2,NCP,353,1858,69,2,\"[1857, 1858]\"\r\nNCP-17.zip,2,NCP,463,2081,60,2,\"[2080, 2081]\"\r\nNCP-1.zip,2,NCP,1019,2585,363,1,[2585]\r\nNCP-22.zip,2,NCP,845,2362,48,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-15.zip,2,NCP,425,2004,58,2,\"[2003, 2004]\"\r\nNCP-16.zip,2,NCP,449,2052,145,2,\"[2052, 2053]\"\r\nNCP-28.zip,2,NCP,873,2405,52,2,\"[2405, 2406]\"\r\nNCP-4.zip,2,NCP,152,1448,145,2,\"[1448, 1449]\"\r\nNCP-19.zip,2,NCP,543,2244,54,2,\"[2243, 2244]\"\r\nNormal-14.zip,0,Normal,2062,517,84,1,[517]\r\nNCP-17.zip,2,NCP,465,2086,61,3,\"[2084, 2085, 2086]\"\r\nNormal-25.zip,0,Normal,3717,5347,25,1,[5347]\r\nCP-4.zip,1,CP,1178,3396,133,1,[3396]\r\nCP-22.zip,1,CP,620,2982,64,1,[2982]\r\nNormal-1.zip,0,Normal,1677,825,65,4,\"[823, 824, 825, 826]\"\r\nNormal-9.zip,0,Normal,1908,363,81,1,[363]\r\nCP-30.zip,1,CP,3940,5646,33,1,[5646]\r\nNCP-30.zip,2,NCP,942,2485,45,1,[2485]\r\nCP-18.zip,1,CP,1781,3578,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-9.zip,1,CP,1358,3762,126,3,\"[3761, 3762, 3763]\"\r\nCP-27.zip,1,CP,3764,5708,23,1,[5708]\r\nNCP-8.zip,2,NCP,2673,2692,48,1,[2692]\r\nNCP-19.zip,2,NCP,534,2226,49,2,\"[2225, 2226]\"\r\nCP-11.zip,1,CP,1440,3950,51,3,\"[3948, 3949, 3950]\"\r\nNCP-17.zip,2,NCP,465,2084,145,3,\"[2084, 2085, 2086]\"\r\nNCP-19.zip,2,NCP,522,2200,137,2,\"[2200, 2201]\"\r\nCP-12.zip,1,CP,1468,4015,54,3,\"[4015, 4016, 4017]\"\r\nNCP-13.zip,2,NCP,351,1854,61,2,\"[1853, 1854]\"\r\nCP-7.zip,1,CP,1270,3498,247,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-13.zip,1,CP,1498,4095,60,2,\"[4095, 4096]\"\r\nCP-18.zip,1,CP,1778,3548,65,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNormal-1.zip,0,Normal,1670,791,66,6,\"[787, 788, 789, 790, 791, 792]\"\r\nCP-16.zip,1,CP,1611,4299,19,1,[4299]\r\nNormal-14.zip,0,Normal,2080,535,100,1,[535]\r\nNCP-25.zip,2,NCP,3968,5477,44,1,[5477]\r\nNormal-3.zip,0,Normal,755,190,107,1,[190]\r\nNormal-16.zip,0,Normal,2151,606,93,1,[606]\r\nNCP-4.zip,2,NCP,168,1481,58,2,\"[1480, 1481]\"\r\nNormal-21.zip,0,Normal,2289,744,77,1,[744]\r\nNCP-6.zip,2,NCP,224,1593,57,2,\"[1592, 1593]\"\r\nCP-13.zip,1,CP,1502,4103,73,2,\"[4102, 4103]\"\r\nNCP-22.zip,2,NCP,865,2390,34,2,\"[2390, 2391]\"\r\nCP-28.zip,1,CP,3787,5731,27,1,[5731]\r\nNCP-5.zip,2,NCP,170,1484,141,2,\"[1484, 1485]\"\r\nNormal-20.zip,0,Normal,2271,726,81,1,[726]\r\nNCP-7.zip,2,NCP,2485,2644,46,1,[2644]\r\nNCP-17.zip,2,NCP,475,2106,63,2,\"[2105, 2106]\"\r\nNCP-21.zip,2,NCP,74,1281,127,2,\"[1281, 1282]\"\r\nCP-13.zip,1,CP,1507,4113,62,2,\"[4113, 4114]\"\r\nCP-18.zip,1,CP,1781,3570,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-12.zip,1,CP,1462,4002,193,3,\"[4002, 4003, 4004]\"\r\nCP-8.zip,1,CP,1330,3698,58,3,\"[3698, 3699, 3700]\"\r\nNormal-1.zip,0,Normal,1672,796,76,8,\"[796, 797, 798, 799, 800, 801, 802, 803]\"\r\nNCP-12.zip,2,NCP,337,1822,139,2,\"[1822, 1823]\"\r\nCP-9.zip,1,CP,1353,3749,60,3,\"[3748, 3749, 3750]\"\r\nNormal-15.zip,0,Normal,2087,542,83,1,[542]\r\nNCP-12.zip,2,NCP,331,1811,66,2,\"[1810, 1811]\"\r\nCP-22.zip,1,CP,617,2979,110,1,[2979]\r\nCP-18.zip,1,CP,1771,3518,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-26.zip,1,CP,3730,5668,212,2,\"[5668, 5669]\"\r\nNormal-24.zip,0,Normal,2660,170,38,1,[170]\r\nNormal-11.zip,0,Normal,1967,422,97,1,[422]\r\nNCP-4.zip,2,NCP,149,1442,159,2,\"[1442, 1443]\"\r\nCP-30.zip,1,CP,3834,5778,26,1,[5778]\r\nNCP-19.zip,2,NCP,540,2237,127,2,\"[2237, 2238]\"\r\nNormal-26.zip,0,Normal,3862,5374,188,1,[5374]\r\nNormal-7.zip,0,Normal,1842,297,77,1,[297]\r\nNormal-26.zip,0,Normal,3868,5380,30,1,[5380]\r\nNCP-2.zip,2,NCP,116,1374,54,2,\"[1373, 1374]\"\r\nNormal-12.zip,0,Normal,2003,458,85,1,[458]\r\nNCP-5.zip,2,NCP,17,1167,58,2,\"[1166, 1167]\"\r\nNCP-2.zip,2,NCP,117,1375,130,2,\"[1375, 1376]\"\r\nCP-13.zip,1,CP,1511,4124,57,2,\"[4123, 4124]\"\r\nCP-18.zip,1,CP,1778,3546,66,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nNCP-19.zip,2,NCP,529,2217,58,3,\"[2214, 2215, 2217]\"\r\nCP-14.zip,1,CP,1520,4143,57,3,\"[4143, 4144, 4145]\"\r\nNormal-16.zip,0,Normal,2131,586,95,1,[586]\r\nNCP-28.zip,2,NCP,873,2406,228,2,\"[2405, 2406]\"\r\nNCP-3.zip,2,NCP,137,1418,126,2,\"[1418, 1419]\"\r\nNCP-10.zip,2,NCP,279,1705,58,2,\"[1704, 1705]\"\r\nCP-28.zip,1,CP,3796,5740,28,1,[5740]\r\nNCP-19.zip,2,NCP,54,1241,147,2,\"[1241, 1242]\"\r\nCP-28.zip,1,CP,3768,5712,19,1,[5712]\r\nNCP-2.zip,2,NCP,120,1382,58,2,\"[1381, 1382]\"\r\nCP-16.zip,1,CP,1603,4291,22,1,[4291]\r\nCP-2.zip,1,CP,1118,3336,173,1,[3336]\r\nNCP-30.zip,2,NCP,939,2482,49,1,[2482]\r\nNormal-8.zip,0,Normal,1874,329,90,1,[329]\r\nNormal-3.zip,0,Normal,746,181,110,1,[181]\r\nCP-21.zip,1,CP,608,2970,86,1,[2970]\r\nNormal-22.zip,0,Normal,2601,111,37,1,[111]\r\nNCP-4.zip,2,NCP,16,1165,48,2,\"[1164, 1165]\"\r\nNCP-1.zip,2,NCP,1036,2607,441,1,[2607]\r\nNCP-19.zip,2,NCP,528,2213,59,2,\"[2212, 2213]\"\r\nNCP-6.zip,2,NCP,217,1579,58,2,\"[1578, 1579]\"\r\nCP-10.zip,1,CP,1416,3897,58,2,\"[3897, 3898]\"\r\nCP-30.zip,1,CP,4043,5592,41,1,[5592]\r\nCP-30.zip,1,CP,3933,5636,69,2,\"[5636, 5637]\"\r\nCP-20.zip,1,CP,2667,3246,24,3,\"[3246, 3247, 3248]\"\r\nNormal-1.zip,0,Normal,1677,824,64,4,\"[823, 824, 825, 826]\"\r\nNCP-18.zip,2,NCP,514,2185,67,2,\"[2184, 2185]\"\r\nCP-18.zip,1,CP,1779,3552,59,2,\"[3551, 3552]\"\r\nNormal-25.zip,0,Normal,3855,5367,209,1,[5367]\r\nCP-24.zip,1,CP,691,3053,72,1,[3053]\r\nCP-6.zip,1,CP,1239,3457,134,1,[3457]\r\nCP-21.zip,1,CP,602,2964,84,1,[2964]\r\nNCP-1.zip,2,NCP,105,1348,61,2,\"[1347, 1348]\"\r\nCP-3.zip,1,CP,1151,3369,158,1,[3369]\r\nNCP-15.zip,2,NCP,413,1975,110,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-8.zip,1,CP,1327,3691,64,3,\"[3690, 3691, 3692]\"\r\nCP-6.zip,1,CP,1237,3455,178,1,[3455]\r\nNormal-11.zip,0,Normal,1959,414,97,1,[414]\r\nNormal-25.zip,0,Normal,3713,5343,27,1,[5343]\r\nCP-21.zip,1,CP,597,2959,305,1,[2959]\r\nCP-9.zip,1,CP,1356,3757,60,2,\"[3756, 3757]\"\r\nNCP-7.zip,2,NCP,2483,2686,40,1,[2686]\r\nNCP-27.zip,2,NCP,1048,2620,58,2,\"[2620, 2621]\"\r\nNormal-3.zip,0,Normal,1767,1159,28,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-5.zip,1,CP,1219,3437,179,1,[3437]\r\nNCP-4.zip,2,NCP,145,1434,139,2,\"[1434, 1435]\"\r\nCP-15.zip,1,CP,1575,4263,20,1,[4263]\r\nNCP-18.zip,2,NCP,516,2188,135,2,\"[2188, 2189]\"\r\nCP-9.zip,1,CP,1360,3768,67,3,\"[3767, 3768, 3769]\"\r\nCP-13.zip,1,CP,1488,4065,66,3,\"[4064, 4065, 4066]\"\r\nCP-18.zip,1,CP,1782,3587,69,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-1.zip,1,CP,1077,3122,74,2,\"[3121, 3122]\"\r\nNormal-14.zip,0,Normal,2084,539,92,1,[539]\r\nNormal-3.zip,0,Normal,1767,1163,76,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nNormal-2.zip,0,Normal,1746,1063,68,2,\"[1063, 1064]\"\r\nNCP-12.zip,2,NCP,332,1812,167,2,\"[1812, 1813]\"\r\nNormal-12.zip,0,Normal,1990,445,97,1,[445]\r\nCP-7.zip,1,CP,1301,3599,294,4,\"[3598, 3599, 3600, 3601]\"\r\nCP-1.zip,1,CP,1070,3112,104,1,[3112]\r\nCP-13.zip,1,CP,1493,4082,53,3,\"[4080, 4081, 4082]\"\r\nCP-8.zip,1,CP,1330,3700,58,3,\"[3698, 3699, 3700]\"\r\nNCP-19.zip,2,NCP,520,2196,129,2,\"[2196, 2197]\"\r\nNCP-3.zip,2,NCP,137,1419,53,2,\"[1418, 1419]\"\r\nNCP-30.zip,2,NCP,937,2479,22,1,[2479]\r\nNCP-22.zip,2,NCP,865,2391,260,2,\"[2390, 2391]\"\r\nNCP-7.zip,2,NCP,230,1605,58,2,\"[1604, 1605]\"\r\nCP-7.zip,1,CP,1302,3603,207,4,\"[3602, 3603, 3604, 3605]\"\r\nCP-16.zip,1,CP,1588,4276,20,1,[4276]\r\nNormal-18.zip,0,Normal,2195,650,79,1,[650]\r\nNormal-17.zip,0,Normal,2173,628,96,1,[628]\r\nNCP-22.zip,2,NCP,878,2413,117,2,\"[2412, 2413]\"\r\nNormal-18.zip,0,Normal,2188,643,88,1,[643]\r\nCP-18.zip,1,CP,1774,3526,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nNormal-6.zip,0,Normal,1815,270,91,1,[270]\r\nCP-5.zip,1,CP,1208,3426,321,1,[3426]\r\nNCP-6.zip,2,NCP,22,1176,68,2,\"[1175, 1176]\"\r\nNCP-15.zip,2,NCP,413,1979,54,4,\"[1975, 1976, 1977, 1979]\"\r\nCP-18.zip,1,CP,1771,3521,51,4,\"[3518, 3519, 3520, 3521]\"\r\nCP-4.zip,1,CP,1172,3390,195,1,[3390]\r\nNCP-14.zip,2,NCP,378,1909,69,2,\"[1908, 1909]\"\r\nCP-26.zip,1,CP,3721,5655,206,2,\"[5654, 5655]\"\r\nNCP-1.zip,2,NCP,104,1346,58,2,\"[1345, 1346]\"\r\nCP-27.zip,1,CP,3754,5698,21,1,[5698]\r\nCP-19.zip,1,CP,1784,3592,69,4,\"[3590, 3591, 3592, 3593]\"\r\nCP-9.zip,1,CP,1372,3799,49,3,\"[3797, 3798, 3799]\"\r\nNCP-2.zip,2,NCP,113,1367,137,2,\"[1367, 1368]\"\r\nNormal-22.zip,0,Normal,2318,773,105,1,[773]\r\nCP-18.zip,1,CP,1770,3517,57,1,[3517]\r\nNormal-21.zip,0,Normal,2293,748,88,1,[748]\r\nNormal-22.zip,0,Normal,2595,105,43,1,[105]\r\nNCP-18.zip,2,NCP,50,1234,59,2,\"[1233, 1234]\"\r\nNormal-2.zip,0,Normal,1757,1106,71,4,\"[1105, 1106, 1107, 1108]\"\r\nCP-8.zip,1,CP,1327,3692,64,3,\"[3690, 3691, 3692]\"\r\nCP-18.zip,1,CP,1781,3577,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nCP-30.zip,1,CP,3932,5635,67,2,\"[5634, 5635]\"\r\nNCP-20.zip,2,NCP,566,2291,67,2,\"[2290, 2291]\"\r\nNCP-10.zip,2,NCP,2715,2708,51,1,[2708]\r\nCP-23.zip,1,CP,660,3022,82,1,[3022]\r\nNormal-9.zip,0,Normal,1916,371,106,1,[371]\r\nCP-20.zip,1,CP,2757,3288,211,1,[3288]\r\nNormal-7.zip,0,Normal,1845,300,99,1,[300]\r\nNormal-13.zip,0,Normal,2050,505,74,1,[505]\r\nCP-14.zip,1,CP,1534,4177,58,2,\"[4176, 4177]\"\r\nCP-1.zip,1,CP,1092,3310,216,1,[3310]\r\nNormal-2.zip,0,Normal,1763,1135,70,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNormal-9.zip,0,Normal,1898,353,72,1,[353]\r\nNCP-21.zip,2,NCP,576,2310,124,1,[2310]\r\nNormal-1.zip,0,Normal,1701,956,70,2,\"[955, 956]\"\r\nNormal-17.zip,0,Normal,2178,633,85,1,[633]\r\nCP-8.zip,1,CP,1322,3681,56,2,\"[3680, 3681]\"\r\nNormal-6.zip,0,Normal,1802,257,107,1,[257]\r\nNCP-20.zip,2,NCP,547,2251,159,2,\"[2251, 2252]\"\r\nNCP-3.zip,2,NCP,1285,2726,66,1,[2726]\r\nNormal-7.zip,0,Normal,1828,283,96,1,[283]\r\nNCP-20.zip,2,NCP,546,2250,57,2,\"[2249, 2250]\"\r\nNormal-2.zip,0,Normal,1750,1078,69,3,\"[1074, 1077, 1078]\"\r\nNormal-9.zip,0,Normal,1892,347,77,1,[347]\r\nNCP-19.zip,2,NCP,534,2225,115,2,\"[2225, 2226]\"\r\nCP-29.zip,1,CP,3806,5750,20,1,[5750]\r\nNCP-13.zip,2,NCP,355,1861,125,2,\"[1861, 1862]\"\r\nNormal-6.zip,0,Normal,1813,268,80,1,[268]\r\nNormal-2.zip,0,Normal,1756,1101,66,4,\"[1101, 1102, 1103, 1104]\"\r\nCP-20.zip,1,CP,2759,3290,36,1,[3290]\r\nNormal-17.zip,0,Normal,2183,638,110,1,[638]\r\nCP-7.zip,1,CP,1270,3490,237,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-6.zip,2,NCP,228,1601,67,2,\"[1600, 1601]\"\r\nNCP-5.zip,2,NCP,197,1539,53,2,\"[1538, 1539]\"\r\nCP-28.zip,1,CP,3766,5710,24,1,[5710]\r\nCP-10.zip,1,CP,1399,3858,45,2,\"[3858, 3859]\"\r\nNormal-14.zip,0,Normal,2074,529,82,1,[529]\r\nNormal-2.zip,0,Normal,1733,1026,71,2,\"[1026, 1027]\"\r\nNCP-11.zip,2,NCP,300,1747,58,2,\"[1746, 1747]\"\r\nCP-17.zip,1,CP,1650,4338,31,1,[4338]\r\nCP-20.zip,1,CP,2455,2937,116,1,[2937]\r\nNormal-20.zip,0,Normal,2279,734,78,1,[734]\r\nCP-8.zip,1,CP,1329,3697,45,3,\"[3695, 3696, 3697]\"\r\nNCP-16.zip,2,NCP,444,2042,146,2,\"[2042, 2043]\"\r\nNormal-12.zip,0,Normal,1999,454,78,1,[454]\r\nCP-17.zip,1,CP,1624,4312,20,1,[4312]\r\nNCP-10.zip,2,NCP,2720,2676,45,1,[2676]\r\nCP-2.zip,1,CP,1107,3325,183,1,[3325]\r\nCP-18.zip,1,CP,1777,3542,62,5,\"[3540, 3541, 3542, 3543, 3544]\"\r\nNCP-15.zip,2,NCP,403,1955,110,2,\"[1955, 1956]\"\r\nNCP-3.zip,2,NCP,134,1413,54,2,\"[1412, 1413]\"\r\nCP-13.zip,1,CP,1500,4099,97,1,[4099]\r\nCP-25.zip,1,CP,712,3074,118,1,[3074]\r\nCP-23.zip,1,CP,648,3010,104,1,[3010]\r\nCP-19.zip,1,CP,2443,2917,98,3,\"[2915, 2916, 2917]\"\r\nNCP-16.zip,2,NCP,441,2036,115,2,\"[2036, 2037]\"\r\nNormal-23.zip,0,Normal,2628,138,34,1,[138]\r\nCP-19.zip,1,CP,1795,3596,41,2,\"[3596, 3597]\"\r\nNCP-27.zip,2,NCP,1016,2580,20,3,\"[2580, 2581, 2582]\"\r\nNormal-24.zip,0,Normal,2659,169,39,1,[169]\r\nCP-17.zip,1,CP,1619,4307,29,1,[4307]\r\nNormal-26.zip,0,Normal,3861,5373,211,1,[5373]\r\nNCP-19.zip,2,NCP,519,2195,53,2,\"[2194, 2195]\"\r\nNCP-6.zip,2,NCP,213,1571,66,2,\"[1570, 1571]\"\r\nNormal-25.zip,0,Normal,3860,5372,212,1,[5372]\r\nNCP-5.zip,2,NCP,192,1529,57,2,\"[1528, 1529]\"\r\nCP-3.zip,1,CP,1153,3371,179,1,[3371]\r\nCP-3.zip,1,CP,1159,3377,287,1,[3377]\r\nNCP-30.zip,2,NCP,931,2473,21,1,[2473]\r\nCP-6.zip,1,CP,1255,3473,107,1,[3473]\r\nCP-7.zip,1,CP,1270,3493,193,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-4.zip,2,NCP,169,1482,133,2,\"[1482, 1483]\"\r\nNCP-12.zip,2,NCP,340,1828,128,2,\"[1828, 1829]\"\r\nCP-18.zip,1,CP,1782,3583,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nCP-26.zip,1,CP,3729,5665,36,3,\"[5665, 5666, 5667]\"\r\nNormal-11.zip,0,Normal,1976,431,74,1,[431]\r\nCP-9.zip,1,CP,1372,3798,49,3,\"[3797, 3798, 3799]\"\r\nNCP-4.zip,2,NCP,161,1467,57,2,\"[1466, 1467]\"\r\nCP-22.zip,1,CP,613,2975,78,1,[2975]\r\nNCP-17.zip,2,NCP,460,2074,106,2,\"[2074, 2075]\"\r\nNCP-21.zip,2,NCP,577,2311,145,2,\"[2311, 2312]\"\r\nCP-25.zip,1,CP,741,3103,523,1,[3103]\r\nCP-14.zip,1,CP,1532,4172,50,2,\"[4171, 4172]\"\r\nNCP-11.zip,2,NCP,299,1744,139,2,\"[1744, 1745]\"\r\nNCP-14.zip,2,NCP,372,1896,45,2,\"[1895, 1896]\"\r\nCP-9.zip,1,CP,1356,3756,60,2,\"[3756, 3757]\"\r\nNormal-11.zip,0,Normal,1968,423,96,1,[423]\r\nCP-14.zip,1,CP,1525,4156,60,2,\"[4155, 4156]\"\r\nCP-7.zip,1,CP,1270,3502,21,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nCP-22.zip,1,CP,618,2980,166,1,[2980]\r\nCP-17.zip,1,CP,1639,4327,26,1,[4327]\r\nNormal-19.zip,0,Normal,2245,700,83,1,[700]\r\nCP-13.zip,1,CP,1518,4139,67,3,\"[4138, 4139, 4140]\"\r\nNCP-11.zip,2,NCP,29,1190,132,2,\"[1190, 1191]\"\r\nCP-16.zip,1,CP,1615,4303,29,1,[4303]\r\nCP-29.zip,1,CP,3823,5767,26,1,[5767]\r\nNCP-20.zip,2,NCP,574,2306,139,2,\"[2306, 2307]\"\r\nNCP-12.zip,2,NCP,340,1829,54,2,\"[1828, 1829]\"\r\nNormal-21.zip,0,Normal,2285,740,68,1,[740]\r\nNCP-16.zip,2,NCP,455,2065,56,2,\"[2064, 2065]\"\r\nNCP-16.zip,2,NCP,436,2026,61,2,\"[2025, 2026]\"\r\nNCP-14.zip,2,NCP,383,1918,139,2,\"[1918, 1919]\"\r\nNCP-30.zip,2,NCP,988,2538,287,2,\"[2538, 2539]\"\r\nNCP-7.zip,2,NCP,247,1641,66,2,\"[1640, 1641]\"\r\nCP-15.zip,1,CP,1571,4259,16,1,[4259]\r\nNormal-16.zip,0,Normal,2137,592,94,1,[592]\r\nCP-7.zip,1,CP,1304,3637,218,8,\"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]\"\r\nCP-6.zip,1,CP,1235,3453,155,1,[3453]\r\nNormal-4.zip,0,Normal,776,211,353,1,[211]\r\nCP-7.zip,1,CP,1270,3499,363,15,\"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]\"\r\nNCP-5.zip,2,NCP,18,1168,135,2,\"[1168, 1169]\"\r\nNormal-18.zip,0,Normal,2189,644,82,1,[644]\r\nNormal-6.zip,0,Normal,1799,254,97,1,[254]\r\nNormal-15.zip,0,Normal,2113,568,93,1,[568]\r\nCP-3.zip,1,CP,1131,3349,157,1,[3349]\r\nNormal-6.zip,0,Normal,1819,274,91,1,[274]\r\nCP-18.zip,1,CP,1781,3571,62,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-16.zip,2,NCP,455,2064,132,2,\"[2064, 2065]\"\r\nNormal-8.zip,0,Normal,1888,343,99,1,[343]\r\nNCP-20.zip,2,NCP,571,2301,68,2,\"[2300, 2301]\"\r\nNCP-7.zip,2,NCP,247,1640,159,2,\"[1640, 1641]\"\r\nCP-3.zip,1,CP,1137,3355,147,1,[3355]\r\nCP-11.zip,1,CP,1423,3913,53,3,\"[3911, 3912, 3913]\"\r\nNCP-1.zip,2,NCP,105,1347,145,2,\"[1347, 1348]\"\r\nNCP-14.zip,2,NCP,377,1907,62,2,\"[1906, 1907]\"\r\nCP-14.zip,1,CP,1535,4178,53,2,\"[4178, 4179]\"\r\nNormal-9.zip,0,Normal,1900,355,93,1,[355]\r\nCP-2.zip,1,CP,1125,3343,115,1,[3343]\r\nCP-6.zip,1,CP,1243,3461,176,1,[3461]\r\nNCP-6.zip,2,NCP,203,1550,140,2,\"[1550, 1551]\"\r\nNormal-1.zip,0,Normal,1670,789,63,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNCP-29.zip,2,NCP,909,2451,401,1,[2451]\r\nNCP-25.zip,2,NCP,3949,5463,35,1,[5463]\r\nNormal-26.zip,0,Normal,3879,5391,28,1,[5391]\r\nNCP-11.zip,2,NCP,29,1191,56,2,\"[1190, 1191]\"\r\nCP-3.zip,1,CP,1129,3347,158,1,[3347]\r\nNCP-15.zip,2,NCP,406,1961,146,2,\"[1961, 1962]\"\r\nNCP-4.zip,2,NCP,151,1446,129,2,\"[1446, 1447]\"\r\nCP-19.zip,1,CP,1789,3206,64,4,\"[3204, 3205, 3206, 3207]\"\r\nNCP-6.zip,2,NCP,227,1599,61,2,\"[1598, 1599]\"\r\nCP-12.zip,1,CP,1462,4003,51,3,\"[4002, 4003, 4004]\"\r\nCP-3.zip,1,CP,1147,3365,164,1,[3365]\r\nNormal-23.zip,0,Normal,2629,139,36,1,[139]\r\nNormal-1.zip,0,Normal,1700,953,64,2,\"[953, 954]\"\r\nCP-15.zip,1,CP,1561,4241,49,2,\"[4241, 4242]\"\r\nNCP-16.zip,2,NCP,437,2028,60,2,\"[2027, 2028]\"\r\nCP-18.zip,1,CP,1654,4342,23,1,[4342]\r\nNormal-20.zip,0,Normal,2273,728,75,1,[728]\r\nNormal-14.zip,0,Normal,2067,522,94,1,[522]\r\nNCP-29.zip,2,NCP,911,2453,48,1,[2453]\r\nNormal-2.zip,0,Normal,1756,1104,65,4,\"[1101, 1102, 1103, 1104]\"\r\nNormal-11.zip,0,Normal,1989,444,105,1,[444]\r\nNCP-15.zip,2,NCP,403,1956,47,2,\"[1955, 1956]\"\r\nNCP-13.zip,2,NCP,348,1848,48,2,\"[1847, 1848]\"\r\nNCP-28.zip,2,NCP,844,2359,594,1,[2359]\r\nNCP-18.zip,2,NCP,51,1235,141,2,\"[1235, 1236]\"\r\nCP-28.zip,1,CP,3789,5733,26,1,[5733]\r\nNormal-2.zip,0,Normal,1763,1138,65,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-10.zip,2,NCP,278,1702,137,2,\"[1702, 1703]\"\r\nCP-28.zip,1,CP,3770,5714,23,1,[5714]\r\nCP-18.zip,1,CP,1782,3585,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-23.zip,2,NCP,93,1323,66,2,\"[1322, 1323]\"\r\nNCP-14.zip,2,NCP,390,1930,126,2,\"[1930, 1931]\"\r\nNCP-8.zip,2,NCP,26,1184,82,2,\"[1184, 1185]\"\r\nNormal-2.zip,0,Normal,1763,1142,71,11,\"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]\"\r\nNCP-6.zip,2,NCP,201,1547,62,2,\"[1546, 1547]\"\r\nNCP-5.zip,2,NCP,176,1496,126,2,\"[1496, 1497]\"\r\nNormal-23.zip,0,Normal,2626,136,33,1,[136]\r\nNCP-25.zip,2,NCP,3707,5534,50,1,[5534]\r\nNormal-21.zip,0,Normal,2305,760,104,1,[760]\r\nNormal-6.zip,0,Normal,1818,273,87,1,[273]\r\nCP-22.zip,1,CP,641,3003,136,1,[3003]\r\nNormal-7.zip,0,Normal,1836,291,104,1,[291]\r\nNormal-27.zip,0,Normal,3894,5417,287,1,[5417]\r\nNCP-30.zip,2,NCP,981,2526,23,2,\"[2525, 2526]\"\r\nNCP-1.zip,2,NCP,102,1341,132,2,\"[1341, 1342]\"\r\nCP-18.zip,1,CP,1782,3581,71,9,\"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]\"\r\nNCP-14.zip,2,NCP,387,1924,128,2,\"[1924, 1925]\"\r\nNCP-2.zip,2,NCP,117,1376,55,2,\"[1375, 1376]\"\r\nNCP-5.zip,2,NCP,190,1524,152,2,\"[1524, 1525]\"\r\nCP-26.zip,1,CP,3639,5598,241,1,[5598]\r\nNormal-1.zip,0,Normal,1670,787,58,6,\"[787, 788, 789, 790, 791, 792]\"\r\nNormal-2.zip,0,Normal,1757,1108,68,4,\"[1105, 1106, 1107, 1108]\"\r\nCP-10.zip,1,CP,1393,3845,60,2,\"[3845, 3846]\"\r\nNormal-13.zip,0,Normal,2043,498,84,1,[498]\r\nCP-2.zip,1,CP,1099,3317,198,1,[3317]\r\nCP-7.zip,1,CP,1318,3673,56,1,[3673]\r\nNormal-9.zip,0,Normal,1899,354,88,1,[354]\r\nCP-12.zip,1,CP,1467,4014,60,2,\"[4013, 4014]\"\r\nNCP-5.zip,2,NCP,197,1538,124,2,\"[1538, 1539]\"\r\nCP-26.zip,1,CP,3730,5669,202,2,\"[5668, 5669]\"\r\nNCP-22.zip,2,NCP,845,2363,428,4,\"[2360, 2361, 2362, 2363]\"\r\nNCP-2.zip,2,NCP,127,1399,139,2,\"[1399, 1400]\"\r\nNormal-26.zip,0,Normal,3893,5416,63,1,[5416]\r\nNCP-8.zip,2,NCP,2669,2689,37,1,[2689]\r\nCP-18.zip,1,CP,1778,3549,64,6,\"[3545, 3546, 3547, 3548, 3549, 3550]\"\r\nCP-25.zip,1,CP,722,3084,70,1,[3084]\r\nNCP-4.zip,2,NCP,157,1458,114,2,\"[1458, 1459]\"\r\nNCP-23.zip,2,NCP,92,1320,87,2,\"[1320, 1321]\"\r\nCP-11.zip,1,CP,1424,3914,60,2,\"[3914, 3915]\"\r\nNCP-19.zip,2,NCP,529,2215,33,3,\"[2214, 2215, 2217]\"\r\nCP-24.zip,1,CP,704,3066,417,1,[3066]\r\nNCP-6.zip,2,NCP,201,1546,149,2,\"[1546, 1547]\"\r\nNormal-17.zip,0,Normal,2177,632,88,1,[632]\r\nNCP-14.zip,2,NCP,383,1919,58,2,\"[1918, 1919]\"\r\nNormal-2.zip,0,Normal,1737,1040,80,4,\"[1037, 1038, 1039, 1040]\"\r\nNormal-26.zip,0,Normal,3881,5393,22,1,[5393]\r\nNormal-3.zip,0,Normal,1767,1161,71,12,\"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]\"\r\nCP-14.zip,1,CP,1525,4155,60,2,\"[4155, 4156]\"\r\nNCP-12.zip,2,NCP,341,1832,55,3,\"[1830, 1832, 1834]\"\r\nNormal-13.zip,0,Normal,2034,489,91,1,[489]\r\nNCP-26.zip,2,NCP,3978,5485,49,1,[5485]\r\nNCP-22.zip,2,NCP,864,2389,221,2,\"[2388, 2389]\"\r\nNCP-9.zip,2,NCP,2682,2652,47,1,[2652]\r\nNCP-7.zip,2,NCP,2461,2642,42,1,[2642]\r\nNormal-21.zip,0,Normal,2303,758,110,1,[758]\r\nNCP-8.zip,2,NCP,2670,2690,41,1,[2690]\r\nCP-7.zip,1,CP,1315,3666,59,2,\"[3665, 3666]\"\r\nCP-19.zip,1,CP,2449,2927,118,1,[2927]\r\nCP-19.zip,1,CP,1789,3204,59,4,\"[3204, 3205, 3206, 3207]\"\r\nNormal-6.zip,0,Normal,1803,258,100,1,[258]\r\nNormal-1.zip,0,Normal,1675,812,73,1,[812]\r\nNCP-25.zip,2,NCP,3705,5532,63,1,[5532]\r\nNormal-1.zip,0,Normal,1727,1010,63,4,\"[1009, 1010, 1011, 1012]\"\r\nNCP-3.zip,2,NCP,1283,2724,70,1,[2724]\r\nCP-18.zip,1,CP,1774,3524,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nCP-18.zip,1,CP,1774,3525,66,8,\"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]\"\r\nCP-30.zip,1,CP,3919,5543,66,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-22.zip,2,NCP,85,1304,58,2,\"[1303, 1304]\"\r\nNormal-18.zip,0,Normal,2192,647,79,1,[647]\r\nCP-30.zip,1,CP,3935,5641,70,1,[5641]\r\nNCP-6.zip,2,NCP,227,1598,146,2,\"[1598, 1599]\"\r\nNormal-20.zip,0,Normal,2250,705,76,1,[705]\r\nCP-12.zip,1,CP,1464,4008,63,2,\"[4007, 4008]\"\r\nCP-29.zip,1,CP,3807,5751,20,1,[5751]\r\nNormal-12.zip,0,Normal,1993,448,97,1,[448]\r\nNCP-19.zip,2,NCP,528,2212,140,2,\"[2212, 2213]\"\r\nNCP-26.zip,2,NCP,3987,5511,60,1,[5511]\r\nNCP-25.zip,2,NCP,3969,5478,50,1,[5478]\r\nCP-17.zip,1,CP,1638,4326,25,1,[4326]\r\nCP-17.zip,1,CP,1643,4331,24,1,[4331]\r\nCP-17.zip,1,CP,1629,4317,23,1,[4317]\r\nCP-11.zip,1,CP,1423,3912,53,3,\"[3911, 3912, 3913]\"\r\nNormal-2.zip,0,Normal,1743,1056,73,2,\"[1056, 1057]\"\r\nNormal-9.zip,0,Normal,1915,370,91,1,[370]\r\nNormal-22.zip,0,Normal,2590,100,41,1,[100]\r\nNCP-11.zip,2,NCP,297,1741,60,2,\"[1739, 1741]\"\r\nCP-30.zip,1,CP,3919,5545,70,4,\"[5543, 5544, 5545, 5546]\"\r\nNCP-25.zip,2,NCP,3971,5480,50,1,[5480]\r\nCP-11.zip,1,CP,1454,3983,53,3,\"[3982, 3983, 3984]\"\r\nNormal-21.zip,0,Normal,2282,737,69,1,[737]\r\nNCP-12.zip,2,NCP,318,1783,150,2,\"[1783, 1784]\"\r\nNCP-10.zip,2,NCP,279,1704,139,2,\"[1704, 1705]\"\r\nCP-2.zip,1,CP,1108,3326,135,1,[3326]\r\nNormal-2.zip,0,Normal,1733,1027,71,2,\"[1026, 1027]\"\r\nCP-32.zip,1,CP,1781,3567,67,12,\"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]\"\r\nNCP-10.zip,2,NCP,2725,2681,51,1,[2681]\r\nCP-11.zip,1,CP,1425,3917,49,3,\"[3916, 3917, 3918]\"\r\nNormal-1.zip,0,Normal,1701,955,70,2,\"[955, 956]\"\r\nCP-19.zip,1,CP,1787,3195,59,1,[3195]"
  },
  {
    "path": "Finetune/CC-CCII/csv/CC_CCII_metadata.csv",
    "content": "patient_id,scan_id,Age,Sex(Male1/Female2),Critical_illness,Liver_function,Lung_function,Progression (Days)\r\n1399,127,57,1,1,5,2,0.08\r\n1297,82,55,1,1,3,2,0.88\r\n2255,549,3,1,1,1,2,0.02\r\n1184,26,5,2,1,0,2,0.02\r\n1186,27,2,2,1,2,2,0.02\r\n1181,25,0,1,1,2,2,0.03\r\n1610,233,0,1,1,3,2,0.02\r\n2245,544,69,1,1,1,3,0.09\r\n1316,90,8,2,1,1,2,0.02\r\n1268,68,47,2,1,4,4,6.08\r\n1328,96,74,1,1,1,4,4.81\r\n1948,399,44,1,0,0,1,10.25\r\n1950,400,17,2,0,1,2,10.25\r\n1897,373,80,2,1,1,3,0.03\r\n2292,567,64,1,1,1,4,0.08\r\n1785,319,45,1,1,0,2,0.32\r\n1733,294,51,2,1,1,2,3.58\r\n1320,92,9,1,1,1,3,0.17\r\n1318,91,7,1,1,1,2,0.04\r\n1908,378,66,1,1,2,4,3.64\r\n1312,89,34,1,1,4,1,0.73\r\n1387,123,83,1,1,3,4,0.07\r\n1446,151,86,2,1,1,3,0.01\r\n2156,500,44,1,0,0,1,7.32\r\n2113,479,32,2,0,0,1,8.22\r\n2091,468,38,1,0,1,2,8.38\r\n1684,269,75,1,1,2,4,0.07\r\n2060,453,39,2,0,0,2,9.5\r\n2048,447,40,2,0,0,2,9.58\r\n1281,74,63,1,1,1,5,0.84\r\n2210,527,70,2,1,2,4,0.07\r\n2150,497,57,2,1,0,1,1.16\r\n1700,277,85,1,1,2,3,0.05\r\n2313,578,87,2,1,1,3,0.03\r\n1895,372,8,2,1,0,2,0.04\r\n2146,495,61,1,0,0,3,7.41\r\n2311,577,77,1,0,0,3,8.3\r\n2144,494,31,1,1,0,1,0.03\r\n1369,114,57,2,1,1,1,0.06\r\n1572,214,26,1,1,1,1,0.81\r\n1536,196,35,2,1,0,2,0.92\r\n1692,273,48,2,1,1,1,1.03\r\n2302,572,49,1,1,0,1,0.03\r\n1504,180,62,2,1,0,4,1.01\r\n1578,217,40,2,1,5,2,0.9\r\n1458,157,49,2,1,2,1,1.13\r\n1982,415,31,1,1,0,2,0.06\r\n1967,409,26,2,0,0,2,10.14\r\n2054,450,2,2,1,1,2,0.06\r\n2056,451,0,2,1,1,2,0.05\r\n1922,385,0,2,1,2,2,0.03\r\n2310,576,77,2,1,0,4,0.02\r\n1412,134,71,2,1,1,3,0.07\r\n1566,211,48,2,0,1,2,13.03\r\n1923,386,0,2,1,3,2,0.03\r\n2059,452,0,2,1,1,3,0.04\r\n2204,524,73,1,1,5,3,0.06\r\n2031,439,51,1,0,0,2,9.99\r\n1920,384,45,2,1,2,2,8.48\r\n1179,24,62,2,0,1,3,6.24\r\n1634,244,43,1,0,1,2,12.39\r\n1482,169,61,1,1,1,4,1.05\r\n1580,218,44,1,1,3,2,0.87\r\n1476,166,57,2,1,0,2,1.09\r\n1474,165,57,1,1,0,2,7.04\r\n1332,98,9,2,1,1,3,0.11\r\n2190,517,73,2,1,2,3,0.99\r\n1341,102,29,2,1,1,2,0.92\r\n1629,242,75,1,1,2,3,5.07\r\n1590,223,42,1,1,1,1,0.09\r\n1804,328,71,2,1,1,4,0.05\r\n1175,22,82,2,1,1,3,0.05\r\n1287,77,44,1,1,2,2,0.07\r\n1524,190,40,1,1,0,3,0.01\r\n1508,182,54,1,1,0,2,0.02\r\n2296,569,56,1,0,0,1,10.98\r\n2280,561,63,2,1,2,3,6.59\r\n1800,326,69,2,1,0,4,0.08\r\n2074,460,9,1,1,2,2,0.02\r\n2123,484,51,1,1,1,2,0.03\r\n2101,473,46,1,1,5,1,0.03\r\n2214,529,25,2,1,0,2,0.04\r\n1410,133,9,2,1,1,3,0.09\r\n1615,235,47,2,0,0,2,12.51\r\n1877,363,50,2,0,0,1,11.22\r\n1704,279,53,2,0,1,2,12.1\r\n1584,220,56,1,1,0,2,0.04\r\n2192,518,44,1,1,2,2,0.02\r\n1942,396,53,1,0,1,1,10.39\r\n1625,240,51,1,0,1,2,10.45\r\n2087,466,51,1,0,2,3,9\r\n2180,512,46,2,0,1,1,6.12\r\n2202,523,63,1,1,0,4,2.82\r\n2194,519,68,2,0,1,4,5.19\r\n2188,516,65,2,0,0,4,6.12\r\n1436,146,32,1,0,0,1,14\r\n2253,548,37,1,0,1,2,14\r\n1604,230,59,1,1,0,1,0.05\r\n1295,81,26,1,0,4,3,14\r\n1426,141,38,2,1,1,2,0.08\r\n1227,47,24,2,1,2,2,0.94\r\n1798,325,64,2,1,0,4,2.73\r\n1744,299,56,1,1,1,2,5.54\r\n1924,387,65,2,1,0,4,0.92\r\n1932,391,62,2,0,0,3,10.4\r\n1309,88,61,1,1,3,3,0.87\r\n1194,31,41,1,1,0,1,0.99\r\n1654,254,66,2,0,1,4,12.36\r\n1243,55,38,2,1,1,1,0.86\r\n1276,72,41,2,1,5,1,0.08\r\n1690,272,79,1,1,2,4,0.03\r\n1349,106,54,1,1,0,1,0.95\r\n2011,429,58,2,1,0,1,4.54\r\n1987,417,51,2,1,1,2,0.14\r\n2206,525,79,1,0,0,3,5.16\r\n2300,571,51,1,0,1,2,5.18\r\n2243,543,68,2,1,0,4,0.02\r\n2072,459,73,1,1,1,3,0.05\r\n1674,264,40,1,0,0,2,12.32\r\n1906,377,73,1,0,0,4,11.11\r\n1715,285,37,2,0,0,2,12.08\r\n1520,188,68,2,1,0,4,4.68\r\n1901,375,69,2,0,0,4,11.12\r\n1881,365,63,2,0,0,4,11.22\r\n2015,431,28,1,0,0,3,10.09\r\n1783,318,32,1,0,0,3,11.94\r\n1855,352,59,1,1,1,2,0.02\r\n1859,354,29,1,0,0,2,11.35\r\n1710,282,68,2,1,0,3,7.19\r\n1788,320,57,2,1,0,1,4.6\r\n1351,107,49,1,1,1,2,6.83\r\n1867,358,54,1,0,2,2,11.27\r\n1377,118,34,1,1,1,2,6.77\r\n1965,408,54,2,1,0,2,6.12\r\n1808,330,76,1,1,0,5,0.05\r\n1928,389,40,2,0,1,2,10.41\r\n1748,301,29,2,0,0,2,12.02\r\n1608,232,46,2,0,1,2,12.63\r\n1739,297,61,1,1,0,3,8.46\r\n2142,493,33,2,1,0,2,0.59\r\n2241,542,62,1,1,1,4,0.83\r\n2046,446,51,1,1,0,3,7.22\r\n2212,528,29,2,1,1,1,0.24\r\n2170,507,55,2,1,4,1,1.57\r\n2148,496,30,1,0,0,2,7.41\r\n1552,204,48,1,1,4,2,0.03\r\n1779,316,50,2,0,0,1,11.96\r\n2119,482,42,2,1,0,2,1.93\r\n2220,531,49,1,0,2,2,4.21\r\n1210,39,70,2,1,0,5,5.81\r\n1251,59,52,2,0,0,3,14\r\n1405,130,53,2,1,0,3,0.07\r\n1259,63,29,1,1,0,3,5.64\r\n1530,193,48,2,1,2,2,0.02\r\n2222,532,74,1,0,0,3,4.19\r\n2134,489,48,1,1,1,3,0.05\r\n1863,356,56,2,0,4,2,11.3\r\n2005,426,77,1,0,0,4,10.11\r\n1168,18,76,1,0,1,4,10.26\r\n1995,421,55,1,0,3,2,10.11\r\n1891,370,55,2,1,3,2,5.54\r\n2282,562,34,2,0,2,2,7.51\r\n1752,303,57,1,0,0,2,11.98\r\n1558,207,63,1,1,3,4,7.43\r\n1973,412,53,1,1,0,3,9.53\r\n1771,312,24,1,0,4,2,11.97\r\n1794,323,72,2,1,2,5,8.34\r\n2064,455,39,2,0,0,2,9.42\r\n1170,19,57,2,1,0,2,0.04\r\n2174,509,71,2,0,0,3,6.47\r\n2200,522,32,2,0,1,1,5.17\r\n2154,499,34,1,0,0,1,7.35\r\n2198,521,57,1,0,0,1,5.18\r\n2196,520,54,2,0,0,1,5.18\r\n1980,414,69,2,1,2,3,0.87\r\n2288,565,83,1,1,2,4,0.01\r\n1468,162,33,1,1,0,3,0.02\r\n2275,559,75,2,1,2,4,5.01\r\n1375,117,67,2,1,2,4,3.95\r\n2164,504,56,2,1,0,2,1.86\r\n2111,478,35,2,0,0,2,14\r\n1223,45,75,2,0,0,3,10.11\r\n1938,394,84,1,1,0,3,0.88\r\n2237,540,58,2,1,1,2,7.07\r\n1725,290,59,2,0,1,1,12.06\r\n1837,343,52,2,1,1,2,0.04\r\n2082,464,65,2,0,0,4,9.02\r\n2029,438,31,1,0,0,2,10.03\r\n2109,477,16,1,1,0,3,0.03\r\n2040,443,72,2,1,1,3,3.37\r\n1991,419,70,2,0,4,3,10.11\r\n2036,441,50,2,0,0,2,9.59\r\n1961,406,36,1,0,0,2,10.16\r\n2052,449,25,1,0,0,2,9.58\r\n2218,530,61,2,1,0,4,0.04\r\n1235,51,38,1,1,5,2,6\r\n1767,310,35,1,0,0,2,11.97\r\n1177,23,63,1,0,0,4,11.79\r\n2231,537,80,1,1,0,3,0.02\r\n1621,238,78,1,1,4,4,4.06\r\n2224,533,29,1,0,0,1,3.27\r\n1385,122,84,1,0,0,3,2.07\r\n1488,172,40,2,1,0,2,0.03\r\n1596,226,59,2,1,0,1,0.05\r\n1265,66,23,1,0,0,2,14\r\n2321,582,63,1,1,3,3,0.98\r\n1516,186,68,2,1,2,3,0.03\r\n2127,486,48,1,1,2,2,0.02\r\n1682,268,68,1,1,0,3,0.04\r\n2138,491,49,1,1,0,1,0.02\r\n2095,470,65,1,1,5,4,0.04\r\n3988,1456,77,2,1,0,5,0.02\r\n1871,360,46,1,1,0,2,1.47\r\n2308,575,52,1,1,1,2,0.1\r\n1420,138,62,2,1,2,4,0.04\r\n2235,539,65,1,1,0,5,9.63\r\n1642,248,83,1,1,1,4,0.05\r\n1305,86,77,2,1,1,3,0.6\r\n2325,816,74,1,0,1,2,14\r\n2326,817,52,1,1,0,0,7\r\n2327,818,56,2,0,1,1,7\r\n2330,820,40,1,0,3,2,9\r\n2331,821,24,2,0,0,1,8\r\n2332,822,57,1,1,0,0,8\r\n2334,823,46,2,0,0,1,11\r\n2335,824,35,1,0,2,2,9\r\n2338,825,46,2,0,0,1,10\r\n2339,826,24,1,0,0,1,8\r\n2341,828,41,1,0,1,2,7\r\n2343,830,27,1,0,3,1,12\r\n2345,832,27,1,0,1,1,8\r\n2346,833,33,1,0,0,1,8\r\n2350,835,40,2,0,0,1,10\r\n2351,836,35,1,0,0,2,10\r\n2352,837,51,1,0,0,1,10\r\n2353,838,58,2,0,1,1,10\r\n2354,839,43,1,0,1,1,8\r\n2355,840,18,1,0,1,1,10\r\n2356,841,49,2,0,1,1,10\r\n2359,844,68,2,0,1,2,10\r\n2364,846,41,2,0,1,0,9\r\n2365,847,61,2,0,0,3,8\r\n2368,849,22,1,0,0,1,8\r\n2369,850,29,1,0,0,1,9\r\n2370,851,34,1,0,1,2,11\r\n2371,852,39,1,0,0,0,10\r\n2373,853,42,2,0,0,1,10\r\n2374,854,28,2,0,1,0,9\r\n2377,856,56,1,0,1,0,11\r\n2378,857,41,2,0,1,0,10\r\n2379,858,41,1,0,0,0,10\r\n2380,859,39,1,0,0,1,10\r\n2384,861,47,2,0,1,0,11\r\n2387,863,50,2,0,0,0,10\r\n2389,864,39,2,0,0,1,10\r\n2393,866,30,1,1,1,1,10\r\n2394,867,60,2,0,1,1,9\r\n2397,869,31,1,0,0,1,11\r\n2399,870,31,2,0,1,1,14\r\n2402,871,43,1,0,0,0,9\r\n2403,872,26,1,0,1,1,14\r\n2407,874,23,1,0,3,1,12\r\n2408,875,38,2,0,0,1,9\r\n2410,876,54,1,0,0,2,9\r\n2411,877,31,1,0,0,1,8\r\n2413,878,18,2,0,0,1,7\r\n2414,879,52,2,0,1,1,9\r\n2415,880,54,1,0,0,1,7\r\n2420,883,36,2,0,1,1,9\r\n2421,884,56,2,0,0,0,8\r\n2423,885,46,1,0,0,1,11\r\n2424,886,51,1,0,0,0,14\r\n2426,888,51,1,0,1,2,9\r\n2428,889,48,2,0,1,1,8\r\n2429,890,52,1,0,1,1,6\r\n2431,892,29,1,0,0,2,8\r\n2432,893,62,2,0,5,4,14\r\n2435,895,36,1,0,1,1,8\r\n2437,896,53,2,0,1,1,7\r\n2438,897,22,1,0,3,2,7\r\n2439,898,41,1,0,3,0,8\r\n2441,899,32,1,0,1,2,12\r\n2443,901,56,2,0,1,1,14\r\n2444,902,26,2,0,1,1,8\r\n2446,904,59,2,0,1,1,9\r\n2448,906,49,2,0,2,1,14\r\n2449,907,59,1,0,1,0,8\r\n2450,908,34,2,0,0,2,9\r\n2451,909,63,1,1,1,2,9\r\n2452,910,48,1,1,0,3,9\r\n2453,911,28,1,1,0,2,10\r\n2454,912,44,2,0,1,1,14\r\n2455,913,45,1,0,1,2,12\r\n2457,915,45,2,0,0,1,8\r\n2458,916,57,1,0,1,2,7\r\n2459,917,62,1,0,2,3,11\r\n2463,921,40,2,0,1,1,14\r\n2464,922,62,1,0,1,3,9\r\n2465,923,55,1,0,2,2,11\r\n2466,924,60,2,0,0,1,12\r\n2467,925,33,1,0,0,1,10\r\n2468,926,32,1,0,2,1,10\r\n2472,930,58,2,0,2,1,10\r\n2473,931,47,1,0,2,1,13\r\n2475,933,50,2,0,0,0,14\r\n2476,934,39,2,0,0,2,12\r\n2477,935,46,1,0,1,0,10\r\n2479,937,67,2,0,0,3,10\r\n2480,938,38,1,0,1,1,14\r\n2482,939,23,1,0,0,0,8\r\n2483,940,45,1,0,1,1,9\r\n2484,941,27,2,0,0,1,8\r\n2486,943,63,1,0,1,3,14\r\n2487,944,47,1,0,0,2,7\r\n2488,945,29,1,1,1,0,11\r\n2489,946,30,2,0,1,2,9\r\n2490,947,60,2,0,0,1,13\r\n2492,949,40,2,0,1,1,6\r\n2493,950,28,1,0,1,0,8\r\n2494,951,30,1,0,0,1,8\r\n2495,952,50,1,0,0,2,14\r\n2497,954,48,2,0,1,1,10\r\n2501,958,33,2,0,5,2,9\r\n2502,959,28,1,0,1,1,9\r\n2503,960,50,1,0,1,2,10\r\n2505,962,12,1,0,0,2,9\r\n2506,963,47,2,0,1,4,13\r\n2507,965,47,1,0,1,2,14\r\n2510,967,33,1,0,1,1,8\r\n2511,968,71,2,0,1,3,14\r\n2512,969,44,1,0,1,1,14\r\n2513,970,36,1,0,1,2,12\r\n2514,971,42,1,0,1,2,12\r\n2515,972,37,1,0,1,1,14\r\n2516,973,49,2,0,3,1,9\r\n2519,976,50,1,0,1,0,9\r\n2521,977,47,2,0,0,3,9\r\n2522,978,63,1,0,1,2,9\r\n2523,979,44,2,0,1,2,6\r\n2525,981,47,2,0,1,0,9\r\n2527,982,35,2,0,0,1,10\r\n2529,984,28,1,0,2,1,11\r\n2531,985,30,1,0,1,1,10\r\n2535,986,44,2,0,1,0,12\r\n2537,987,35,1,0,1,1,8\r\n2541,989,40,2,0,1,1,14\r\n2543,990,57,1,1,1,0,7\r\n2544,991,46,2,0,1,1,8\r\n2546,993,47,2,0,2,2,10\r\n2547,994,46,1,0,5,2,14\r\n2549,995,72,2,0,1,2,9\r\n2550,996,34,1,1,2,1,9\r\n2555,998,62,1,0,1,3,7\r\n2556,999,33,2,0,0,2,7\r\n2560,1001,43,1,1,2,2,9\r\n2561,1002,39,1,0,0,1,8\r\n2562,1003,47,1,0,2,2,10\r\n2564,1004,51,2,0,0,0,9\r\n2565,1005,56,2,1,2,1,12\r\n2568,1007,19,1,0,1,1,10\r\n2569,1008,58,1,0,1,1,9\r\n2570,1009,50,2,0,0,0,9\r\n2572,1010,71,1,1,0,2,10\r\n2574,1011,65,1,1,1,3,10\r\n2576,1012,47,1,0,1,2,8\r\n2577,1013,47,1,0,1,2,10\r\n2579,1015,23,2,0,0,0,7\r\n2583,1017,64,1,0,1,3,9\r\n2585,1019,57,2,0,5,1,9\r\n2586,1020,35,1,0,1,1,6\r\n2588,1021,45,2,0,1,1,14\r\n2591,1022,9,1,0,1,3,9\r\n2593,1023,52,1,0,2,1,9\r\n2595,1025,31,1,0,1,1,9\r\n2596,1026,39,2,0,0,1,9\r\n2597,1027,28,1,0,1,1,12\r\n2598,1028,47,2,0,0,2,9\r\n2599,1029,58,1,0,2,0,8\r\n2600,1030,50,1,0,1,1,6\r\n2602,1031,40,1,1,0,1,11\r\n2603,1032,47,1,0,0,1,6\r\n2604,1033,73,1,0,1,4,11\r\n2605,1034,27,1,0,2,0,7\r\n2606,1035,33,2,0,1,0,9\r\n2607,1036,24,1,0,1,1,8\r\n2609,1038,19,1,0,1,1,8\r\n2610,1039,42,1,0,1,2,10\r\n2611,1040,61,2,0,2,3,6\r\n2612,1041,63,1,0,1,3,6\r\n2614,1042,47,2,0,0,3,8\r\n2616,1044,55,2,1,0,1,8\r\n2617,1045,81,1,0,0,4,14\r\n2618,1046,36,1,0,0,1,8\r\n2619,1047,41,1,0,1,1,9\r\n2622,1049,45,2,0,0,0,8\r\n2626,1051,52,1,0,1,1,8\r\n2627,1052,47,1,0,0,1,8\r\n2629,1053,28,1,0,1,1,10\r\n2631,1055,52,2,0,0,1,14\r\n2632,1056,51,2,0,1,1,9\r\n2633,1057,45,1,0,1,1,10\r\n2634,1058,68,2,1,0,2,10\r\n2636,1059,48,2,0,0,1,8\r\n2637,1060,69,2,0,0,3,11\r\n2638,1061,51,2,0,1,1,12\r\n2639,1062,43,1,0,0,2,12\r\n2640,1063,29,1,0,1,1,10\r\n2641,1064,47,2,0,1,1,14\r\n"
  },
  {
    "path": "Finetune/CC-CCII/dataset/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/CC-CCII/eval.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#     http://www.apache.org/licenses/LICENSE-2.0\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nimport argparse\r\nimport os\r\nfrom functools import partial\r\nimport nibabel as nib\r\nimport numpy as np\r\nimport torch\r\nfrom torch.cuda.amp import GradScaler, autocast\r\nfrom utils.data_utils import get_loader\r\nfrom utils.utils import dice, resample_3d\r\nfrom utils.utils import AverageMeter, distributed_all_gather\r\n\r\nfrom monai.inferers import sliding_window_inference\r\nfrom monai.data import decollate_batch\r\nfrom monai.losses import DiceCELoss\r\nfrom monai.metrics import DiceMetric\r\nfrom monai.networks.nets import SwinUNETR\r\nfrom monai.transforms import Activations, AsDiscrete, Compose\r\nfrom monai.utils.enums import MetricReduction\r\n\r\nos.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\r\nos.environ['MASTER_ADDR'] = 'localhost'\r\nos.environ['MASTER_PORT'] = '28890'\r\n\r\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\r\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\r\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\r\nparser.add_argument(\r\n    \"--pretrained_dir\", default=\"./runs/logs_384/\", type=str, help=\"pretrained checkpoint directory\"\r\n)\r\nparser.add_argument(\"--csv_list\", default=\"./csv/\", type=str, help=\"csv directory\")\r\nparser.add_argument(\"--fold\", default=0, type=int, help=\"fold\")\r\nparser.add_argument(\"--data_dir\", default=\"/data/jiaxin/data/CC-CCII_public/data/\", type=str, help=\"dataset directory\")\r\nparser.add_argument(\r\n    \"--pretrained_model_name\",\r\n    default=\"model.pt\",\r\n    type=str,\r\n    help=\"pretrained model name\",\r\n)\r\n\r\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\r\nparser.add_argument(\"--max_epochs\", default=100, type=int, help=\"max number of training epochs\")\r\nparser.add_argument(\"--batch_size\", default=4, type=int, help=\"number of batch size\")\r\nparser.add_argument(\"--sw_batch_size\", default=1, type=int, help=\"number of sliding window batch size\")\r\nparser.add_argument(\"--optim_lr\", default=1e-4, type=float, help=\"optimization learning rate\")\r\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\r\nparser.add_argument(\"--reg_weight\", default=1e-5, type=float, help=\"regularization weight\")\r\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\r\nparser.add_argument(\"--noamp\", action=\"store_true\", help=\"do NOT use amp for training\")\r\nparser.add_argument(\"--val_every\", default=5, type=int, help=\"validation frequency\")\r\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\r\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\r\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\r\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\r\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\r\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\r\nparser.add_argument(\"--workers\", default=4, type=int, help=\"number of workers\")\r\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\r\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\r\nparser.add_argument(\"--out_channels\", default=3, type=int, help=\"number of output channels\")\r\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\r\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\r\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\r\n# warmup is important !!!\r\nparser.add_argument(\"--warmup_epochs\", default=5, type=int, help=\"number of warmup epochs\")\r\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\r\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\r\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\r\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\r\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\r\n\r\n\r\ndef main():\r\n    args = parser.parse_args()\r\n    args.test_mode = True\r\n    _, loader = get_loader(args)\r\n\r\n    pretrained_dir = args.pretrained_dir\r\n    model_name = args.pretrained_model_name\r\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\r\n\r\n    pretrained_pth = os.path.join(pretrained_dir, model_name)\r\n    from model import Swin\r\n    model = Swin(args)\r\n\r\n    model_dict = torch.load(pretrained_pth)[\"state_dict\"]\r\n    model.load_state_dict(model_dict, strict=True)\r\n    model.eval()\r\n    model.to(device)\r\n\r\n    with torch.no_grad():\r\n        num_correct = 0.0\r\n        metric_count = 0\r\n        for idx, batch_data in enumerate(loader):\r\n            if isinstance(batch_data, list):\r\n                data, target = batch_data\r\n            else:\r\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\r\n\r\n            # data = resize(data)\r\n            data, target = data.cuda(args.rank), target.cuda(args.rank)\r\n\r\n            logits = model(data)\r\n\r\n            value = torch.eq(logits.argmax(dim=1), target)\r\n\r\n            metric_count += len(value)\r\n            num_correct += value.sum().item()\r\n\r\n            metric = num_correct / metric_count\r\n            print(\r\n                \"Val {}/{}\".format(idx, len(loader)),\r\n                \"acc\",\r\n                metric,\r\n            )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    main()\r\n"
  },
  {
    "path": "Finetune/CC-CCII/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom utils.data_utils import get_loader\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\nos.environ['CUDA_VISIBLE_DEVICES'] = \"2\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\n    \"--pretrained_checkpoint\",default=\"VoCo_10k.pt\", type=str, help=\"VoCo_10k pretrained model\")\n\nparser.add_argument(\"--csv_list\", default=\"./csv/\", type=str, help=\"csv directory\")\nparser.add_argument(\"--fold\", default=0, type=int, help=\"fold\")\nparser.add_argument(\"--data_dir\", default=\"/data/jiaxin/data/CC-CCII_public/data/\", type=str, help=\"dataset directory\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_bestVal_big.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\n\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=100, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--batch_size\", default=4, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=1, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=3e-4, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=1e-5, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", action=\"store_true\", help=\"do NOT use amp for training\")\nparser.add_argument(\"--val_every\", default=5, type=int, help=\"validation frequency\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=4, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=3, type=int, help=\"number of output channels\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\n\nparser.add_argument(\"--warmup_epochs\", default=5, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n    torch.cuda.set_device(args.gpu)\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    from model import Swin\n    model = Swin(args)\n    # from densenet import densenet3d\n    # model = densenet3d()\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use pretrained weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            # model_VoCoEMA.pt\n            # model_dict = torch.load(\"./pretrained_models/supervised_suprem_swinunetr_2100.pth\", map_location=torch.device('cpu'))\n            # model_dict = torch.load(\"./pretrained_models/model_VoCoEMA.pt\", map_location=torch.device('cpu'))\n            model_dict = torch.load(args.pretrained_checkpoint,\n                                    map_location=torch.device('cpu'))\n            state_dict = model_dict\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda(args.gpu)\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        max_steps = args.max_epochs * len(loader[0])\n        warmup_steps = args.warmup_epochs * len(loader[0])\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=warmup_steps, max_epochs=max_steps\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        args=args,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n    )\n    return accuracy\n\nlogs = set()\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/CC-CCII/model.py",
    "content": "import torch\r\nimport torch.nn as nn\r\nimport numpy as np\r\nfrom monai.networks.nets.swin_unetr import *\r\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\r\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\r\nfrom monai.utils import ensure_tuple_rep\r\nimport argparse\r\nimport torch.nn.functional as F\r\n\r\n\r\nclass Swin(nn.Module):\r\n    def __init__(self, args):\r\n        super(Swin, self).__init__()\r\n        patch_size = ensure_tuple_rep(2, args.spatial_dims)\r\n        window_size = ensure_tuple_rep(7, args.spatial_dims)\r\n        self.swinViT = SwinViT(\r\n            in_chans=args.in_channels,\r\n            embed_dim=args.feature_size,\r\n            window_size=window_size,\r\n            patch_size=patch_size,\r\n            depths=[2, 2, 2, 2],\r\n            num_heads=[3, 6, 12, 24],\r\n            mlp_ratio=4.0,\r\n            qkv_bias=True,\r\n            drop_rate=0.0,\r\n            attn_drop_rate=0.0,\r\n            drop_path_rate=args.dropout_path_rate,\r\n            norm_layer=torch.nn.LayerNorm,\r\n            use_checkpoint=args.use_checkpoint,\r\n            spatial_dims=args.spatial_dims,\r\n            use_v2=True\r\n        )\r\n        norm_name = 'instance'\r\n        self.encoder1 = UnetrBasicBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=args.in_channels,\r\n            out_channels=args.feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder2 = UnetrBasicBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=args.feature_size,\r\n            out_channels=args.feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder3 = UnetrBasicBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=2 * args.feature_size,\r\n            out_channels=2 * args.feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder4 = UnetrBasicBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=4 * args.feature_size,\r\n            out_channels=4 * args.feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder10 = UnetrBasicBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=16 * args.feature_size,\r\n            out_channels=16 * args.feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder5 = UnetrUpBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=16 * args.feature_size,\r\n            out_channels=8 * args.feature_size,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder4 = UnetrUpBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=args.feature_size * 8,\r\n            out_channels=args.feature_size * 4,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder3 = UnetrUpBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=args.feature_size * 4,\r\n            out_channels=args.feature_size * 2,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n        self.decoder2 = UnetrUpBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=args.feature_size * 2,\r\n            out_channels=args.feature_size,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder1 = UnetrUpBlock(\r\n            spatial_dims=args.spatial_dims,\r\n            in_channels=args.feature_size,\r\n            out_channels=args.feature_size,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.head = nn.Linear(args.feature_size, 3)\r\n\r\n    def forward(self, x_in):\r\n        b = x_in.size()[0]\r\n        x_in = torch.cat([x_in, x_in], dim=2)\r\n        hidden_states_out = self.swinViT(x_in)\r\n\r\n        enc0 = self.encoder1(x_in)\r\n        enc1 = self.encoder2(hidden_states_out[0])\r\n        enc2 = self.encoder3(hidden_states_out[1])\r\n        enc3 = self.encoder4(hidden_states_out[2])\r\n        dec4 = self.encoder10(hidden_states_out[4])\r\n\r\n        dec3 = self.decoder5(dec4, hidden_states_out[3])\r\n        dec2 = self.decoder4(dec3, enc3)\r\n        dec1 = self.decoder3(dec2, enc2)\r\n        dec0 = self.decoder2(dec1, enc1)\r\n        out = self.decoder1(dec0, enc0)\r\n\r\n        out = F.adaptive_avg_pool3d(out, (1, 1, 1))\r\n        out = self.head(out.view(b, -1))\r\n\r\n        return out\r\n\r\n\r\nif __name__ == '__main__':\r\n    parser = argparse.ArgumentParser(description=\"PyTorch Training\")\r\n    parser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\r\n    parser.add_argument(\"--feature_size\", default=48, type=int, help=\"embedding size\")\r\n    parser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\r\n    parser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\r\n    parser.add_argument(\"--use_checkpoint\", action=\"store_true\", help=\"use gradient checkpointing to save memory\")\r\n    parser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\r\n\r\n    args = parser.parse_args()\r\n    x = torch.rand(2, 1, 32, 128, 128)\r\n    model = Swin(args)\r\n    y = model(x)\r\n    print(y.shape)\r\n\r\n\r\n\r\n"
  },
  {
    "path": "Finetune/CC-CCII/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/CC-CCII/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/CC-CCII/train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs\nmkdir -p $logdir\n\ntorchrun --master_port=25584 main.py \\\n    --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "Finetune/CC-CCII/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\nimport torch.nn.functional as F\nfrom monai.data import decollate_batch\n\n\ndef resize(img):\n    size = 256\n    b, _, c, h, w = img.size()\n    new_img = []\n    for i in range(b):\n        im = img[i, :, :, :, :]\n        im = F.interpolate(im, size=[size, size], mode='bilinear', align_corners=True)\n        new_img.append(im.unsqueeze(0))\n    new_img = torch.cat(new_img, dim=0)\n    return new_img\n\n\ndef train_epoch(model, loader, optimizer, scheduler, scaler, epoch, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n\n    loss_func = torch.nn.CrossEntropyLoss()\n\n    for idx, batch_data in enumerate(loader):\n        if isinstance(batch_data, list):\n            data, target = batch_data\n        else:\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n\n        data = resize(data)\n        data, target = data.cuda(args.rank), target.cuda(args.rank)\n\n        for param in model.parameters():\n            param.grad = None\n\n        logits = model(data)\n        loss = loss_func(logits, target)\n        # print(logits.argmax(1)[0].item(), target[0].item())\n\n        loss.backward()\n        optimizer.step()\n        run_loss.update(loss.item(), n=args.batch_size)\n\n        lr = optimizer.param_groups[0][\"lr\"]\n        if scheduler is not None:\n            scheduler.step()\n\n        length = len(loader) // 4\n        if args.rank == 0 and (idx + 1) % length == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, args):\n    model.eval()\n    start_time = time.time()\n    with torch.no_grad():\n        num_correct = 0.0\n        metric_count = 0\n        for idx, batch_data in enumerate(loader):\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n\n            data = resize(data)\n            data, target = data.cuda(args.rank), target.cuda(args.rank)\n\n            with autocast(enabled=args.amp):\n                logits = model(data)\n\n            value = torch.eq(logits.argmax(dim=1), target)\n\n            metric_count += len(value)\n            num_correct += value.sum().item()\n\n            metric = num_correct / metric_count\n\n            if args.rank == 0:\n                print(\n                    \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                    \"acc\",\n                    metric,\n                    \"time {:.2f}s\".format(time.time() - start_time),\n            )\n    return metric\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n    model,\n    train_loader,\n    val_loader,\n    optimizer,\n    args,\n    scheduler=None,\n    start_epoch=0,\n):\n    writer = None\n    if args.logdir is not None and args.rank == 0:\n        writer = SummaryWriter(log_dir=args.logdir)\n        if args.rank == 0:\n            print(\"Writing Tensorboard logs to \", args.logdir)\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n        if args.rank == 0 and writer is not None:\n            writer.add_scalar(\"train_loss\", train_loss, epoch)\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                args=args,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n                if writer is not None:\n                    writer.add_scalar(\"val_acc\", val_avg_acc, epoch)\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n        if scheduler is not None:\n            scheduler.step()\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/CC-CCII/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/CC-CCII/utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\n\nfrom monai import data, transforms\nfrom monai.data import *\nimport pandas as pd\nimport random\n\n\ndef get_loader(args):\n    '''Get the dataloader for the CCII dataset.'''\n    # Transforms\n    def __transforms__(augmentation=True, npy=None, args=None):\n        RANDOM_BRIGHTNESS = 7\n        RANDOM_CONTRAST = 5\n        pre_size = 420\n        final_size = 384\n        spatial_limit = int((pre_size-final_size)/2.0)\n        # pre_top_left = int((512-pre_size)/2.0)\n        final_top_left = int((512-final_size)/2.0)\n\n        npy_normalized = npy.astype(np.float32) / 255.0 # cast to float\n        if augmentation:\n            # random flip\n            if random.uniform(0, 1) < 0.5: #horizontal flip\n                npy_normalized = np.flipud(npy_normalized)\n            # color jitter\n            br = random.randint(-RANDOM_BRIGHTNESS, RANDOM_BRIGHTNESS) / 100.\n            npy_normalized = npy_normalized + br\n            # Random contrast\n            cr = 1.0 + random.randint(-RANDOM_CONTRAST, RANDOM_CONTRAST) / 100.\n            npy_normalized = npy_normalized * cr\n            # clip values to 0-1 range\n            npy_normalized = np.clip(npy_normalized, 0, 1.0)\n            # random crop\n            offset_x = random.randint(-spatial_limit, spatial_limit)\n            offset_y = random.randint(-spatial_limit, spatial_limit)\n            npy_normalized = npy_normalized[\n                :,\n                final_top_left+offset_x : final_top_left+final_size+offset_x,\n                final_top_left+offset_y : final_top_left+final_size+offset_y\n                ]\n        else:\n            npy_normalized = npy_normalized[\n                :,\n                final_top_left : final_top_left+final_size,\n                final_top_left : final_top_left+final_size\n                ]\n        return npy_normalized\n\n    train_files_name = os.path.join(args.csv_list, f'CC_CCII_fold{args.fold}_train.csv')\n    val_files_name = os.path.join(args.csv_list, f'CC_CCII_fold{args.fold}_valid.csv')\n    train_files = pd.read_csv(train_files_name)\n    val_files = pd.read_csv(val_files_name)\n\n    train_ds = CC_CCII(data=train_files, transforms=__transforms__, augmentation=True, args=args)\n    print(f'=>Train len {len(train_ds)}')\n    train_loader = torch.utils.data.DataLoader(\n        train_ds, batch_size=args.batch_size, shuffle=True,\n        num_workers=8, pin_memory=True, persistent_workers=True,\n    )\n\n    val_ds = CC_CCII(data=val_files, transforms=__transforms__, augmentation=False,args=args)\n    print(f'=>Val len {len(val_ds)}')\n    val_loader = torch.utils.data.DataLoader(\n        val_ds, batch_size=1, shuffle=False, num_workers=1, pin_memory=True, persistent_workers=True)\n    return train_loader, val_loader\n\n\nclass CC_CCII(torch.utils.data.Dataset):\n    '''CC_CCII Covid-19 classification dataset.\n    This dataset is used for Covid-19 classification.\n    It loads the data from the given directory and csv file.\n    The data is preprocessed and augmented using various techniques.\n    http://ncov-ai.big.ac.cn/download?lang=en\n    '''\n    def __init__(self, data=None, transforms=None, augmentation=True, args=None):\n        super().__init__()\n        self.augmentation = augmentation\n        self.df_meta = pd.read_csv(os.path.join(args.csv_list, 'CC_CCII_metadata.csv'))\n\n        df = data\n        self.patients = df['patient_id']\n        self.scans = df['scan_id']\n        self.targets = df['target']\n        self.transforms = transforms\n        self.args = args\n\n    def __getitem__(self, index):\n        target = int(self.targets[index])\n        npy = np.load(\n            os.path.join(\n                self.args.data_dir,\n                'p'+str(self.patients[index])+'-s'+str(self.scans[index])+'.npy'\n                )\n            )\n\n        meta = self.df_meta[(self.df_meta['patient_id'] == self.patients[index])]\n        covariates = [\n            'Age',\n            'Sex(Male1/Female2)',\n            'Critical_illness',\n            'Liver_function',\n            'Lung_function',\n            'Progression (Days)'\n        ]\n        if meta.size == 0:\n            meta = np.array([47, 1.5, 0, 1, 2, 6.89],dtype='f8')\n        else:\n            meta = meta.sample(frac=1.0, replace=True, weights=None, random_state=0, axis=0)\n            meta = np.squeeze(meta[covariates].to_numpy(), axis=0)\n        meta[0] = np.clip(meta[0] / 100, 0.25, 0.95)\n        meta[1] = meta[1] - 1\n        meta[3] = meta[3] / 5\n        meta[4] = meta[4] / 5\n        meta[-1] = meta[-1] / 14\n\n        npy_normalized = self.transforms(self.augmentation, npy, self.args)\n        npy_normalized = npy_normalized[np.newaxis,]\n        return {\n            'image': npy_normalized,\n            'label': target\n        }\n\n    def __len__(self):\n        return len(self.targets)"
  },
  {
    "path": "Finetune/CC-CCII/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\n\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n"
  },
  {
    "path": "Finetune/Flare22/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Flare22/dataset/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Flare22/dataset/dataset.json",
    "content": "{\r\n    \"description\": \"0\",\r\n    \"labels\": {\r\n        \"0\": \"background\",\r\n        \"1\": \"Liver\",\r\n        \"10\": \"Esophagus\",\r\n        \"11\": \"Stomach\",\r\n        \"12\": \"Duodenum\",\r\n        \"13\": \"Left Kidney\",\r\n        \"2\": \"Right kidney\",\r\n        \"3\": \"Spleen\",\r\n        \"4\": \"Pancreas\",\r\n        \"5\": \"Aorta\",\r\n        \"6\": \"Inferior vena cava\",\r\n        \"7\": \"Right adrenal gland\",\r\n        \"8\": \"Left adrenal gland\",\r\n        \"9\": \"Gallbladder\"\r\n    },\r\n    \"licence\": \"hands off!\",\r\n    \"modality\": {\r\n        \"0\": \"CT\"\r\n    },\r\n    \"name\": \"FLARE22\",\r\n    \"numTest\": 200,\r\n    \"numTraining\": 50,\r\n    \"reference\": \"0\",\r\n    \"release\": \"0.0\",\r\n    \"tensorImageSize\": \"4D\",\r\n    \"test\": [\r\n        \"./imagesTs/FLARETs_0001_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0002_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0003_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0004_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0005_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0006_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0007_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0008_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0009_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0010_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0011_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0012_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0013_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0014_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0015_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0016_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0017_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0018_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0019_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0020_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0021_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0022_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0023_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0024_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0025_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0026_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0027_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0028_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0029_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0030_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0031_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0032_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0033_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0034_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0035_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0036_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0037_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0038_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0039_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0040_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0041_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0042_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0043_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0044_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0045_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0046_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0047_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0048_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0049_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0050_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0051_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0052_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0053_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0054_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0055_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0056_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0057_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0058_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0059_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0060_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0061_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0062_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0063_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0064_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0065_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0066_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0067_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0068_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0069_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0070_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0071_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0072_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0073_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0074_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0075_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0076_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0077_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0078_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0079_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0080_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0081_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0082_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0083_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0084_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0085_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0086_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0087_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0088_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0089_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0090_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0091_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0092_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0093_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0094_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0095_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0096_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0097_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0098_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0099_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0100_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0101_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0102_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0103_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0104_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0105_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0106_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0107_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0108_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0109_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0110_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0111_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0112_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0113_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0114_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0115_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0116_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0117_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0118_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0119_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0120_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0121_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0122_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0123_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0124_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0125_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0126_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0127_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0128_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0129_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0130_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0131_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0132_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0133_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0134_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0135_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0136_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0137_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0138_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0139_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0140_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0141_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0142_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0143_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0144_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0145_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0146_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0147_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0148_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0149_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0150_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0151_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0152_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0153_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0154_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0155_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0156_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0157_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0158_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0159_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0160_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0161_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0162_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0163_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0164_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0165_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0166_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0167_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0168_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0169_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0170_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0171_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0172_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0173_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0174_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0175_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0176_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0177_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0178_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0179_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0180_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0181_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0182_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0183_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0184_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0185_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0186_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0187_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0188_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0189_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0190_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0191_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0192_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0193_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0194_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0195_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0196_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0197_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0198_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0199_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0200_0000.nii.gz\"\r\n    ],\r\n    \"validation\": [{\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0001_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0001.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0002_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0002.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0003_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0003.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0004_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0004.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0005_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0005.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0006_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0006.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0007_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0007.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0008_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0008.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0009_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0009.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0010_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0010.nii.gz\"\r\n        }\r\n    ],\r\n    \"training\": [\r\n\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0011_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0011.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0012_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0012.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0013_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0013.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0014_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0014.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0015_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0015.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0016_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0016.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0017_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0017.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0018_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0018.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0019_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0019.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0020_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0020.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0021_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0021.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0022_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0022.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0023_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0023.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0024_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0024.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0025_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0025.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0026_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0026.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0027_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0027.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0028_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0028.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0029_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0029.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0030_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0030.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0031_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0031.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0032_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0032.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0033_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0033.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0034_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0034.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0035_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0035.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0036_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0036.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0037_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0037.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0038_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0038.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0039_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0039.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0040_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0040.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0041_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0041.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0042_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0042.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0043_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0043.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0044_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0044.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0045_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0045.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0046_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0046.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0047_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0047.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0048_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0048.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0049_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0049.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0050_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0050.nii.gz\"\r\n        }\r\n    ]\r\n}"
  },
  {
    "path": "Finetune/Flare22/dataset/dataset_test50.json",
    "content": "{\r\n    \"description\": \"0\",\r\n    \"labels\": {\r\n        \"0\": \"background\",\r\n        \"1\": \"Liver\",\r\n        \"10\": \"Esophagus\",\r\n        \"11\": \"Stomach\",\r\n        \"12\": \"Duodenum\",\r\n        \"13\": \"Left Kidney\",\r\n        \"2\": \"Right kidney\",\r\n        \"3\": \"Spleen\",\r\n        \"4\": \"Pancreas\",\r\n        \"5\": \"Aorta\",\r\n        \"6\": \"Inferior vena cava\",\r\n        \"7\": \"Right adrenal gland\",\r\n        \"8\": \"Left adrenal gland\",\r\n        \"9\": \"Gallbladder\"\r\n    },\r\n    \"licence\": \"hands off!\",\r\n    \"modality\": {\r\n        \"0\": \"CT\"\r\n    },\r\n    \"name\": \"FLARE22\",\r\n    \"numTest\": 200,\r\n    \"numTraining\": 50,\r\n    \"reference\": \"0\",\r\n    \"release\": \"0.0\",\r\n    \"tensorImageSize\": \"4D\",\r\n    \"test\": [\r\n        \"./imagesTs/FLARETs_0001_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0002_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0003_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0004_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0005_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0006_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0007_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0008_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0009_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0010_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0011_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0012_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0013_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0014_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0015_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0016_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0017_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0018_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0019_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0020_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0021_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0022_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0023_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0024_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0025_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0026_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0027_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0028_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0029_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0030_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0031_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0032_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0033_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0034_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0035_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0036_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0037_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0038_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0039_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0040_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0041_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0042_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0043_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0044_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0045_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0046_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0047_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0048_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0049_0000.nii.gz\",\r\n        \"./imagesTs/FLARETs_0050_0000.nii.gz\"\r\n    ],\r\n    \"validation\": [{\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0001_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0001.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0002_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0002.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0003_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0003.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0004_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0004.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0005_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0005.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0006_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0006.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0007_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0007.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0008_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0008.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0009_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0009.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0010_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0010.nii.gz\"\r\n        }\r\n    ],\r\n    \"training\": [\r\n\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0011_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0011.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0012_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0012.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0013_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0013.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0014_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0014.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0015_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0015.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0016_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0016.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0017_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0017.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0018_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0018.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0019_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0019.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0020_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0020.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0021_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0021.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0022_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0022.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0023_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0023.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0024_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0024.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0025_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0025.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0026_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0026.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0027_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0027.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0028_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0028.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0029_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0029.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0030_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0030.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0031_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0031.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0032_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0032.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0033_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0033.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0034_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0034.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0035_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0035.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0036_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0036.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0037_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0037.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0038_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0038.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0039_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0039.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0040_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0040.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0041_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0041.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0042_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0042.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0043_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0043.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0044_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0044.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0045_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0045.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0046_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0046.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0047_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0047.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0048_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0048.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0049_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0049.nii.gz\"\r\n        },\r\n        {\r\n            \"image\": \"./imagesTr/FLARE22_Tr_0050_0000.nii.gz\",\r\n            \"label\": \"./labelsTr/FLARE22_Tr_0050.nii.gz\"\r\n        }\r\n    ]\r\n}"
  },
  {
    "path": "Finetune/Flare22/inferers.py",
    "content": "\"\"\"Multiview inferer.\"\"\"\r\n\r\nimport warnings\r\nfrom typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union\r\n\r\nimport torch\r\nimport torch.nn.functional as F\r\n\r\nfrom monai.data.utils import compute_importance_map, dense_patch_slices, get_valid_patch_size\r\nfrom monai.transforms import Resize\r\nfrom monai.utils import (\r\n    BlendMode,\r\n    PytorchPadMode,\r\n    convert_data_type,\r\n    ensure_tuple,\r\n    fall_back_tuple,\r\n    look_up_option,\r\n    optional_import,\r\n)\r\nfrom monai.inferers.utils import _get_scan_interval\r\n\r\n# from utils import view_ops\r\n# from utils import view_transforms\r\n\r\ntqdm, _ = optional_import(\"tqdm\", name=\"tqdm\")\r\n\r\n\r\ndef double_sliding_window_inference(\r\n    inputs: torch.Tensor,\r\n    view: int,\r\n    roi_size: Union[Sequence[int], int],\r\n    sw_batch_size: int,\r\n    predictor: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor], Dict[Any, torch.Tensor]]],\r\n    overlap: float = 0.25,\r\n    mode: Union[BlendMode, str] = BlendMode.CONSTANT,\r\n    sigma_scale: Union[Sequence[float], float] = 0.125,\r\n    padding_mode: Union[PytorchPadMode, str] = PytorchPadMode.CONSTANT,\r\n    cval: float = 0.0,\r\n    sw_device: Union[torch.device, str, None] = None,\r\n    device: Union[torch.device, str, None] = None,\r\n    progress: bool = False,\r\n    roi_weight_map: Union[torch.Tensor, None] = None,\r\n    *args: Any,\r\n    **kwargs: Any,\r\n) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[Any, torch.Tensor]]:\r\n    \"\"\"\r\n    Sliding window inference on two `inputs` with `predictor`.\r\n\r\n    The outputs of `predictor` could be a tensor, a tuple, or a dictionary of tensors.\r\n    Each output in the tuple or dict value is allowed to have different resolutions with respect to the input.\r\n    e.g., the input patch spatial size is [128,128,128], the output (a tuple of two patches) patch sizes\r\n    could be ([128,64,256], [64,32,128]).\r\n    In this case, the parameter `overlap` and `roi_size` need to be carefully chosen to ensure the output ROI is still\r\n    an integer. If the predictor's input and output spatial sizes are not equal, we recommend choosing the parameters\r\n    so that `overlap*roi_size*output_size/input_size` is an integer (for each spatial dimension).\r\n\r\n    When roi_size is larger than the inputs' spatial size, the input image are padded during inference.\r\n    To maintain the same spatial sizes, the output image will be cropped to the original input size.\r\n\r\n    Args:\r\n        inputs: input image to be processed (assuming NCHW[D])\r\n        roi_size: the spatial window size for inferences.\r\n            When its components have None or non-positives, the corresponding inputs dimension will be used.\r\n            if the components of the `roi_size` are non-positive values, the transform will use the\r\n            corresponding components of img size. For example, `roi_size=(32, -1)` will be adapted\r\n            to `(32, 64)` if the second spatial dimension size of img is `64`.\r\n        sw_batch_size: the batch size to run window slices.\r\n        predictor: given input tensor ``patch_data`` in shape NCHW[D],\r\n            The outputs of the function call ``predictor(patch_data)`` should be a tensor, a tuple, or a dictionary\r\n            with Tensor values. Each output in the tuple or dict value should have the same batch_size, i.e. NM'H'W'[D'];\r\n            where H'W'[D'] represents the output patch's spatial size, M is the number of output channels,\r\n            N is `sw_batch_size`, e.g., the input shape is (7, 1, 128,128,128),\r\n            the output could be a tuple of two tensors, with shapes: ((7, 5, 128, 64, 256), (7, 4, 64, 32, 128)).\r\n            In this case, the parameter `overlap` and `roi_size` need to be carefully chosen\r\n            to ensure the scaled output ROI sizes are still integers.\r\n            If the `predictor`'s input and output spatial sizes are different,\r\n            we recommend choosing the parameters so that ``overlap*roi_size*zoom_scale`` is an integer for each dimension.\r\n        overlap: Amount of overlap between scans.\r\n        mode: {``\"constant\"``, ``\"gaussian\"``}\r\n            How to blend output of overlapping windows. Defaults to ``\"constant\"``.\r\n\r\n            - ``\"constant``\": gives equal weight to all predictions.\r\n            - ``\"gaussian``\": gives less weight to predictions on edges of windows.\r\n\r\n        sigma_scale: the standard deviation coefficient of the Gaussian window when `mode` is ``\"gaussian\"``.\r\n            Default: 0.125. Actual window sigma is ``sigma_scale`` * ``dim_size``.\r\n            When sigma_scale is a sequence of floats, the values denote sigma_scale at the corresponding\r\n            spatial dimensions.\r\n        padding_mode: {``\"constant\"``, ``\"reflect\"``, ``\"replicate\"``, ``\"circular\"``}\r\n            Padding mode for ``inputs``, when ``roi_size`` is larger than inputs. Defaults to ``\"constant\"``\r\n            See also: https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html\r\n        cval: fill value for 'constant' padding mode. Default: 0\r\n        sw_device: device for the window data.\r\n            By default the device (and accordingly the memory) of the `inputs` is used.\r\n            Normally `sw_device` should be consistent with the device where `predictor` is defined.\r\n        device: device for the stitched output prediction.\r\n            By default the device (and accordingly the memory) of the `inputs` is used. If for example\r\n            set to device=torch.device('cpu') the gpu memory consumption is less and independent of the\r\n            `inputs` and `roi_size`. Output is on the `device`.\r\n        progress: whether to print a `tqdm` progress bar.\r\n        roi_weight_map: pre-computed (non-negative) weight map for each ROI.\r\n            If not given, and ``mode`` is not `constant`, this map will be computed on the fly.\r\n        args: optional args to be passed to ``predictor``.\r\n        kwargs: optional keyword args to be passed to ``predictor``.\r\n\r\n    Note:\r\n        - input must be channel-first and have a batch dim, supports N-D sliding window.\r\n\r\n    \"\"\"\r\n    compute_dtype = inputs.dtype\r\n    num_spatial_dims = len(inputs.shape) - 2\r\n    if overlap < 0 or overlap >= 1:\r\n        raise ValueError(\"overlap must be >= 0 and < 1.\")\r\n\r\n    # determine image spatial size and batch size\r\n    # Note: all input images must have the same image size and batch size\r\n    batch_size, _, *image_size_ = inputs.shape\r\n\r\n    if device is None:\r\n        device = inputs.device\r\n    if sw_device is None:\r\n        sw_device = inputs.device\r\n\r\n    roi_size = fall_back_tuple(roi_size, image_size_)\r\n    # in case that image size is smaller than roi size\r\n    image_size = tuple(max(image_size_[i], roi_size[i]) for i in range(num_spatial_dims))\r\n    pad_size = []\r\n    for k in range(len(inputs.shape) - 1, 1, -1):\r\n        diff = max(roi_size[k - 2] - inputs.shape[k], 0)\r\n        half = diff // 2\r\n        pad_size.extend([half, diff - half])\r\n    inputs = F.pad(inputs, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)\r\n    # inputs2 = F.pad(inputs2, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)\r\n\r\n    scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims, overlap)\r\n\r\n    # Store all slices in list\r\n    slices = dense_patch_slices(image_size, roi_size, scan_interval)\r\n    num_win = len(slices)  # number of windows per image\r\n    total_slices = num_win * batch_size  # total number of windows\r\n\r\n    # Create window-level importance map\r\n    valid_patch_size = get_valid_patch_size(image_size, roi_size)\r\n    if valid_patch_size == roi_size and (roi_weight_map is not None):\r\n        importance_map = roi_weight_map\r\n    else:\r\n        try:\r\n            importance_map = compute_importance_map(valid_patch_size, mode=mode, sigma_scale=sigma_scale, device=device)\r\n        except BaseException as e:\r\n            raise RuntimeError(\r\n                \"Seems to be OOM. Please try smaller patch size or mode='constant' instead of mode='gaussian'.\"\r\n            ) from e\r\n    importance_map = convert_data_type(importance_map, torch.Tensor, device, compute_dtype)[0]  # type: ignore\r\n    # handle non-positive weights\r\n    min_non_zero = max(importance_map[importance_map != 0].min().item(), 1e-3)\r\n    importance_map = torch.clamp(importance_map.to(torch.float32), min=min_non_zero).to(compute_dtype)\r\n\r\n    # Perform predictions\r\n    dict_key, output_image_list_1, output_image_list_2, count_map_list = None, [], [], []\r\n    _initialized_ss = -1\r\n    is_tensor_output = True  # whether the predictor's output is a tensor (instead of dict/tuple)\r\n\r\n    # for each patch\r\n    for slice_g in tqdm(range(0, total_slices, sw_batch_size)) if progress else range(0, total_slices, sw_batch_size):\r\n        slice_range = range(slice_g, min(slice_g + sw_batch_size, total_slices))\r\n        unravel_slice = [\r\n            [slice(int(idx / num_win), int(idx / num_win) + 1), slice(None)] + list(slices[idx % num_win])\r\n            for idx in slice_range\r\n        ]\r\n        window_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)\r\n        view_list = [view, (view + 1) % len(view_transforms.permutation_transforms)]\r\n        window_data_list = [view_ops.get_permute_transform(0, dst)(window_data) for dst in view_list]\r\n        # window_data_2 = torch.cat([inputs2[win_slice] for win_slice in unravel_slice]).to(sw_device)\r\n        seg_prob_out_1, seg_prob_out_2 = predictor(window_data_list[0], window_data_list[1], view_list, *args, **kwargs)  # batched patch segmentation\r\n        seg_prob_out_1, seg_prob_out_2 = view_ops.permute_inverse([seg_prob_out_1, seg_prob_out_2], view_list)\r\n\r\n        # convert seg_prob_out to tuple seg_prob_tuple, this does not allocate new memory.\r\n        seg_prob_tuple_1: Tuple[torch.Tensor, ...]\r\n        seg_prob_tuple_2: Tuple[torch.Tensor, ...]\r\n        if isinstance(seg_prob_out_1, torch.Tensor):\r\n            seg_prob_tuple_1 = (seg_prob_out_1,)\r\n            seg_prob_tuple_2 = (seg_prob_out_2,)\r\n        elif isinstance(seg_prob_out_1, Mapping):\r\n            if dict_key is None:\r\n                dict_key = sorted(seg_prob_out_1.keys())  # track predictor's output keys\r\n            seg_prob_tuple_1 = tuple(seg_prob_out_1[k] for k in dict_key)\r\n            seg_prob_tuple_2 = tuple(seg_prob_out_2[k] for k in dict_key)\r\n            is_tensor_output = False\r\n        else:\r\n            seg_prob_tuple_1 = ensure_tuple(seg_prob_out_1)\r\n            seg_prob_tuple_2 = ensure_tuple(seg_prob_out_2)\r\n            is_tensor_output = False\r\n\r\n        # for each output in multi-output list\r\n        for ss in range(len(seg_prob_tuple_1)):\r\n            seg_prob_1 = seg_prob_tuple_1[ss].to(device)  # BxCxMxNxP or BxCxMxN\r\n            seg_prob_2 = seg_prob_tuple_2[ss].to(device)\r\n\r\n            # compute zoom scale: out_roi_size/in_roi_size\r\n            zoom_scale = []\r\n            for axis, (img_s_i, out_w_i, in_w_i) in enumerate(\r\n                zip(image_size, seg_prob_1.shape[2:], window_data.shape[2:])\r\n            ):\r\n                _scale = out_w_i / float(in_w_i)\r\n                if not (img_s_i * _scale).is_integer():\r\n                    warnings.warn(\r\n                        f\"For spatial axis: {axis}, output[{ss}] will have non-integer shape. Spatial \"\r\n                        f\"zoom_scale between output[{ss}] and input is {_scale}. Please pad inputs.\"\r\n                    )\r\n                zoom_scale.append(_scale)\r\n\r\n            if _initialized_ss < ss:  # init. the ss-th buffer at the first iteration\r\n                # construct multi-resolution outputs\r\n                output_classes = seg_prob_1.shape[1]\r\n                output_shape = [batch_size, output_classes] + [\r\n                    int(image_size_d * zoom_scale_d) for image_size_d, zoom_scale_d in zip(image_size, zoom_scale)\r\n                ]\r\n                # allocate memory to store the full output and the count for overlapping parts\r\n                output_image_list_1.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))\r\n                output_image_list_2.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))\r\n                count_map_list.append(torch.zeros([1, 1] + output_shape[2:], dtype=compute_dtype, device=device))\r\n                _initialized_ss += 1\r\n\r\n            # resizing the importance_map\r\n            resizer = Resize(spatial_size=seg_prob_1.shape[2:], mode=\"nearest\", anti_aliasing=False)\r\n\r\n            # store the result in the proper location of the full output. Apply weights from importance map.\r\n            for idx, original_idx in zip(slice_range, unravel_slice):\r\n                # zoom roi\r\n                original_idx_zoom = list(original_idx)  # 4D for 2D image, 5D for 3D image\r\n                for axis in range(2, len(original_idx_zoom)):\r\n                    zoomed_start = original_idx[axis].start * zoom_scale[axis - 2]\r\n                    zoomed_end = original_idx[axis].stop * zoom_scale[axis - 2]\r\n                    if not zoomed_start.is_integer() or (not zoomed_end.is_integer()):\r\n                        warnings.warn(\r\n                            f\"For axis-{axis-2} of output[{ss}], the output roi range is not int. \"\r\n                            f\"Input roi range is ({original_idx[axis].start}, {original_idx[axis].stop}). \"\r\n                            f\"Spatial zoom_scale between output[{ss}] and input is {zoom_scale[axis - 2]}. \"\r\n                            f\"Corresponding output roi range is ({zoomed_start}, {zoomed_end}).\\n\"\r\n                            f\"Please change overlap ({overlap}) or roi_size ({roi_size[axis-2]}) for axis-{axis-2}. \"\r\n                            \"Tips: if overlap*roi_size*zoom_scale is an integer, it usually works.\"\r\n                        )\r\n                    original_idx_zoom[axis] = slice(int(zoomed_start), int(zoomed_end), None)\r\n                importance_map_zoom = resizer(importance_map.unsqueeze(0))[0].to(compute_dtype)\r\n                # store results and weights\r\n                output_image_list_1[ss][original_idx_zoom] += importance_map_zoom * seg_prob_1[idx - slice_g]\r\n                output_image_list_2[ss][original_idx_zoom] += importance_map_zoom * seg_prob_2[idx - slice_g]\r\n                count_map_list[ss][original_idx_zoom] += (\r\n                    importance_map_zoom.unsqueeze(0).unsqueeze(0).expand(count_map_list[ss][original_idx_zoom].shape)\r\n                )\r\n\r\n    # account for any overlapping sections\r\n    for ss in range(len(output_image_list_1)):\r\n        count_map_pop = count_map_list.pop(0)\r\n        output_image_list_1[ss] = (output_image_list_1[ss] / count_map_pop).to(compute_dtype)\r\n        output_image_list_2[ss] = (output_image_list_2[ss] / count_map_pop).to(compute_dtype)\r\n\r\n    # remove padding if image_size smaller than roi_size\r\n    for ss in range(len(output_image_list_1)):\r\n        output_i_1, output_i_2 = output_image_list_1[ss], output_image_list_2[ss]\r\n        if torch.isnan(output_i_1).any() or torch.isinf(output_i_1).any():\r\n            warnings.warn(\"Sliding window inference results contain NaN or Inf.\")\r\n        if torch.isnan(output_i_2).any() or torch.isinf(output_i_2).any():\r\n            warnings.warn(\"Sliding window inference results contain NaN or Inf.\")\r\n\r\n        zoom_scale = [\r\n            seg_prob_map_shape_d / roi_size_d for seg_prob_map_shape_d, roi_size_d in zip(output_i_1.shape[2:], roi_size)\r\n        ]\r\n\r\n        final_slicing: List[slice] = []\r\n        for sp in range(num_spatial_dims):\r\n            slice_dim = slice(pad_size[sp * 2], image_size_[num_spatial_dims - sp - 1] + pad_size[sp * 2])\r\n            slice_dim = slice(\r\n                int(round(slice_dim.start * zoom_scale[num_spatial_dims - sp - 1])),\r\n                int(round(slice_dim.stop * zoom_scale[num_spatial_dims - sp - 1])),\r\n            )\r\n            final_slicing.insert(0, slice_dim)\r\n        while len(final_slicing) < len(output_i_1.shape):\r\n            final_slicing.insert(0, slice(None))\r\n        output_image_list_1[ss] = output_i_1[final_slicing]\r\n        output_image_list_2[ss] = output_i_2[final_slicing]\r\n\r\n    if dict_key is not None:  # if output of predictor is a dict\r\n        final_output_1 = dict(zip(dict_key, output_image_list_1))\r\n        final_output_2 = dict(zip(dict_key, output_image_list_2))\r\n    else:\r\n        final_output_1 = tuple(output_image_list_1)  # type: ignore\r\n        final_output_2 = tuple(output_image_list_2)  # type: ignore\r\n    final_output_1 = final_output_1[0] if is_tensor_output else final_output_1  # type: ignore\r\n    final_output_2 = final_output_2[0] if is_tensor_output else final_output_2  # type: ignore\r\n    return final_output_1, final_output_2\r\n\r\n\r\ndef one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:\r\n    \"\"\"\r\n    For every value v in `labels`, the value in the output will be either 1 or 0. Each vector along the `dim`-th\r\n    dimension has the \"one-hot\" format, i.e., it has a total length of `num_classes`,\r\n    with a one and `num_class-1` zeros.\r\n    Note that this will include the background label, thus a binary mask should be treated as having two classes.\r\n\r\n    Args:\r\n        labels: input tensor of integers to be converted into the 'one-hot' format. Internally `labels` will be\r\n            converted into integers `labels.long()`.\r\n        num_classes: number of output channels, the corresponding length of `labels[dim]` will be converted to\r\n            `num_classes` from `1`.\r\n        dtype: the data type of the output one_hot label.\r\n        dim: the dimension to be converted to `num_classes` channels from `1` channel, should be non-negative number.\r\n\r\n    Example:\r\n\r\n    For a tensor `labels` of dimensions [B]1[spatial_dims], return a tensor of dimensions `[B]N[spatial_dims]`\r\n    when `num_classes=N` number of classes and `dim=1`.\r\n\r\n    .. code-block:: python\r\n\r\n        from monai.networks.utils import one_hot\r\n        import torch\r\n\r\n        a = torch.randint(0, 2, size=(1, 2, 2, 2))\r\n        out = one_hot(a, num_classes=2, dim=0)\r\n        print(out.shape)  # torch.Size([2, 2, 2, 2])\r\n\r\n        a = torch.randint(0, 2, size=(2, 1, 2, 2, 2))\r\n        out = one_hot(a, num_classes=2, dim=1)\r\n        print(out.shape)  # torch.Size([2, 2, 2, 2, 2])\r\n\r\n    \"\"\"\r\n\r\n    # if `dim` is bigger, add singleton dim at the end\r\n    if labels.ndim < dim + 1:\r\n        shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))\r\n        labels = torch.reshape(labels, shape)\r\n\r\n    sh = list(labels.shape)\r\n\r\n    if sh[dim] != 1:\r\n        raise AssertionError(\"labels should have a channel with length equal to one.\")\r\n\r\n    sh[dim] = num_classes\r\n\r\n    o = torch.zeros(size=sh, dtype=dtype, device=labels.device)\r\n    labels = o.scatter_(dim=dim, index=labels.long(), value=1)\r\n\r\n    return labels\r\n\r\n\r\n\"\"\"View operations.\"\"\"\r\n\r\nfrom typing import Sequence, Tuple\r\n\r\n\r\n\"\"\"View operations.\r\n\r\nInput format: [B, C, X, Y, Z, ...]\r\n\r\nNOTE(meijieru): 0 is reserved for identify transform.\r\n\"\"\"\r\n\r\nfrom typing import Callable, Sequence, Union\r\n\r\nimport enum\r\n\r\nimport torch\r\n\r\nRotateType = int\r\nPermuteType = int\r\nTransformFuncType = Callable[[torch.Tensor], torch.Tensor]\r\n# A composition of multiple view transoforms.\r\nTransformsType = Sequence[Union[PermuteType, RotateType]]\r\n\r\n\r\nclass GroupName(enum.Enum):\r\n\r\n    ROTATE = 1\r\n    PERMUTE = 2\r\n\r\n\r\nDEFAULT_ORDER = (GroupName.ROTATE, GroupName.PERMUTE)\r\n\r\nrotation_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.rot90(1, (3, 4)),\r\n    2: lambda x: x.rot90(2, (3, 4)),\r\n    3: lambda x: x.rot90(3, (3, 4)),\r\n}\r\nrotation_inverse_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.rot90(3, (3, 4)),\r\n    2: lambda x: x.rot90(2, (3, 4)),\r\n    3: lambda x: x.rot90(1, (3, 4)),\r\n}\r\npermutation_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.permute(0, 1, 3, 2, 4),\r\n    2: lambda x: x.permute(0, 1, 4, 3, 2),\r\n}\r\npermutation_inverse_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.permute(0, 1, 3, 2, 4),\r\n    2: lambda x: x.permute(0, 1, 4, 3, 2),\r\n}\r\n\r\nall_forward_transforms = {\r\n    GroupName.ROTATE: rotation_transforms,\r\n    GroupName.PERMUTE: permutation_transforms,\r\n}\r\nall_backward_transforms = {\r\n    GroupName.ROTATE: rotation_inverse_transforms,\r\n    GroupName.PERMUTE: permutation_inverse_transforms,\r\n}\r\n\r\n\r\ndef get_transforms_func(views: TransformsType,\r\n                        orders: Sequence[GroupName] = DEFAULT_ORDER,\r\n                        inverse: bool = False) -> TransformFuncType:\r\n    \"\"\"Gets sequential transform functions.\"\"\"\r\n    if len(views) != len(orders):\r\n        raise ValueError()\r\n\r\n    all_transforms = (all_forward_transforms\r\n                      if not inverse else all_backward_transforms)\r\n    funcs = [\r\n        all_transforms[group_name][view]\r\n        for view, group_name in zip(views, orders)\r\n    ]\r\n    funcs = funcs if not inverse else funcs[::-1]\r\n\r\n    def aux(val):\r\n        for func in funcs:\r\n            val = func(val)\r\n        return val\r\n\r\n    return aux\r\n\r\n\r\nimport torch\r\nimport numpy as np\r\n\r\n\r\ndef get_permute_transform(view_src: PermuteType,\r\n                          view_dst: PermuteType) -> TransformFuncType:\r\n    \"\"\"Gets transform function from view src to view dst.\"\"\"\r\n\r\n    def transform(x: torch.Tensor) -> torch.Tensor:\r\n        x_view_0 = view_transforms.permutation_inverse_transforms[view_src](x)\r\n        return view_transforms.permutation_transforms[view_dst](\r\n            x_view_0).contiguous()\r\n\r\n    return transform\r\n\r\n\r\ndef permute_inverse(xs: Sequence[torch.Tensor],\r\n                    views: Sequence[PermuteType]) -> Sequence[torch.Tensor]:\r\n    \"\"\"Transforms data back to origin view.\"\"\"\r\n    return [get_permute_transform(view, 0)(x) for x, view in zip(xs, views)]\r\n\r\n\r\ndef permute_rand(\r\n    x: torch.Tensor,\r\n    num_samples: int = 2\r\n) -> Tuple[Sequence[torch.Tensor], Sequence[PermuteType]]:\r\n    \"\"\"Samples different transforms of data.\"\"\"\r\n    num_permutes = len(view_transforms.permutation_transforms)\r\n    if num_samples > num_permutes:\r\n        raise ValueError('Duplicate samples.')\r\n    view_dsts = np.random.permutation(num_permutes)[:num_samples].tolist()\r\n    return [get_permute_transform(0, view)(x) for view in view_dsts], view_dsts"
  },
  {
    "path": "Finetune/Flare22/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom utils.data_utils import get_loader\nimport torch.nn as nn\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\nfrom monai.utils import ensure_tuple_rep\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"4\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/linshan/CTs/Flare22/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--json_list\", default=\"dataset.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_checkpoint\",default=\"VoCo_10k.pt\", type=str, help=\"VoCo_10k pretrained model\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_bestVal.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=3000, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=16, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=3e-4, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=0.005, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", default=False, help=\"do NOT use amp for training\")\nparser.add_argument(\"--val_every\", default=50, type=int, help=\"validation frequency\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\nparser.add_argument(\"--warmup_epochs\", default=100, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--smooth_dr\", default=1e-6, type=float, help=\"constant added to dice denominator to avoid nan\")\nparser.add_argument(\"--smooth_nr\", default=0.0, type=float, help=\"constant added to dice numerator to avoid zero\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n    torch.cuda.set_device(0)\n\n    torch.backends.cudnn.enabled = True\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=args.dropout_path_rate,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use pretrained weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            # model_VoCoEMA.pt\n            # model_dict = torch.load(\"./pretrained_models/supervised_suprem_swinunetr_2100.pth\", map_location=torch.device('cpu'))\n            # model_dict = torch.load(\"./pretrained_models/model_VoCoEMA.pt\", map_location=torch.device('cpu'))\n            model_dict = torch.load(args.pretrained_checkpoint,\n                                    map_location=torch.device('cpu'))\n\n            state_dict = model_dict\n            # state_dict = model_dict['net']\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    if args.squared_dice:\n        dice_loss = DiceCELoss(\n            to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr\n        )\n    else:\n        dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)\n\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n    dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda(args.gpu)\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n        # optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        loss_func=dice_loss,\n        acc_func=dice_acc,\n        args=args,\n        model_inferer=model_inferer,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n        post_label=post_label,\n        post_pred=post_pred,\n    )\n    return accuracy\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/Flare22/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Flare22/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/Flare22/train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs\nmkdir -p $logdir\n\ntorchrun --master_port=21198 main.py \\\n    --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "Finetune/Flare22/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.data import decollate_batch\n\n\ndef train_epoch(model, loader, optimizer, scaler, epoch, loss_func, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n    for idx, batch_data in enumerate(loader):\n        if isinstance(batch_data, list):\n            data, target = batch_data\n        else:\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n        data, target = data.cuda(), target.cuda()\n        for param in model.parameters():\n            param.grad = None\n        with autocast(enabled=args.amp):\n            logits = model(data)\n            loss = loss_func(logits, target)\n            #\n        if args.amp:\n            scaler.scale(loss).backward()\n            scaler.step(optimizer)\n            scaler.update()\n        else:\n            loss.backward()\n            optimizer.step()\n        if args.distributed:\n            loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)\n            run_loss.update(\n                np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size\n            )\n        else:\n            run_loss.update(loss.item(), n=args.batch_size)\n\n        lr = optimizer.param_groups[0][\"lr\"]\n        if args.rank == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):\n    model.eval()\n    run_acc = AverageMeter()\n    start_time = time.time()\n    with torch.no_grad():\n        for idx, batch_data in enumerate(loader):\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n            with autocast(enabled=args.amp):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            acc = acc.cuda(args.rank)\n\n            if args.distributed:\n                acc_list, not_nans_list = distributed_all_gather(\n                    [acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length\n                )\n                for al, nl in zip(acc_list, not_nans_list):\n                    run_acc.update(al, n=nl)\n\n            else:\n                run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n\n            if args.rank == 0:\n                avg_acc = np.mean(run_acc.avg)\n                print(\n                    \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                    \"acc\",\n                    avg_acc,\n                    \"time {:.2f}s\".format(time.time() - start_time),\n                )\n            start_time = time.time()\n    return run_acc.avg\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n        model,\n        train_loader,\n        val_loader,\n        optimizer,\n        loss_func,\n        acc_func,\n        args,\n        model_inferer=None,\n        scheduler=None,\n        start_epoch=0,\n        post_label=None,\n        post_pred=None,\n):\n    writer = None\n    if args.logdir is not None and args.rank == 0:\n        writer = SummaryWriter(log_dir=args.logdir)\n        if args.rank == 0:\n            print(\"Writing Tensorboard logs to \", args.logdir)\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n        if args.rank == 0 and writer is not None:\n            writer.add_scalar(\"train_loss\", train_loss, epoch)\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                acc_func=acc_func,\n                model_inferer=model_inferer,\n                args=args,\n                post_label=post_label,\n                post_pred=post_pred,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n                if writer is not None:\n                    writer.add_scalar(\"val_acc\", val_avg_acc, epoch)\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n        if scheduler is not None:\n            scheduler.step()\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/Flare22/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Flare22/utils/data_test.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = os.path.join(data_dir, args.json_list)\n    transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\"]),\n            transforms.Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n        ]\n    )\n\n    datalist = load_decathlon_datalist(datalist_json, True, \"test\", base_dir=data_dir)\n\n    print('use persistent')\n    ds = PersistentDataset(data=datalist,\n                             transform=transform,\n                             pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                             cache_dir='/data/linshan/cache/flare22_test')\n    # /data/linshan/cache/flare22_test\n\n    sampler = Sampler(ds) if args.distributed else None\n    loader = data.DataLoader(\n        ds,\n        batch_size=args.batch_size,\n        shuffle=(sampler is None),\n        num_workers=args.workers,\n        sampler=sampler,\n        pin_memory=True,\n    )\n\n    return loader, transform\n"
  },
  {
    "path": "Finetune/Flare22/utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = os.path.join(data_dir, args.json_list)\n    train_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            transforms.RandCropByPosNegLabeld(\n                keys=[\"image\", \"label\"],\n                label_key=\"label\",\n                spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                pos=9,\n                neg=1,\n                num_samples=args.sw_batch_size,\n                image_key=\"image\",\n                image_threshold=0,\n            ),\n            # transforms.RandCropByLabelClassesd(\n            #     keys=[\"image\", \"label\"],\n            #     image_key=\"image\",\n            #     label_key=\"label\",\n            #     spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n            #     num_classes=args.out_channels,\n            #     ratios=[0, *it.repeat(1, args.out_channels-1)],\n            #     num_samples=args.sw_batch_size,\n            #     image_threshold=0,\n            #     warn=False,\n            # ),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=0),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=1),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=2),\n            transforms.RandRotate90d(keys=[\"image\", \"label\"], prob=args.RandRotate90d_prob, max_k=3),\n            #transforms.RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=args.RandShiftIntensityd_prob),\n            transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n    val_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n\n    if args.test_mode:\n        test_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n        test_ds = PersistentDataset(data=test_files,\n                                     transform=val_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/flare22')\n        test_sampler = Sampler(test_ds, shuffle=False) if args.distributed else None\n        test_loader = data.DataLoader(\n            test_ds,\n            batch_size=1,\n            shuffle=False,\n            num_workers=args.workers,\n            sampler=test_sampler,\n            pin_memory=True,\n            persistent_workers=True,\n        )\n        loader = test_loader\n    else:\n        datalist = load_decathlon_datalist(datalist_json, True, \"training\", base_dir=data_dir)\n        if args.use_normal_dataset:\n            print('use persistent')\n            train_ds = PersistentDataset(data=datalist,\n                                     transform=train_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/flare22')\n            # train_ds = data.Dataset(data=datalist, transform=train_transform)\n        else:\n            train_ds = data.CacheDataset(\n                data=datalist, transform=train_transform, cache_num=24, cache_rate=1.0, num_workers=args.workers\n            )\n        train_sampler = Sampler(train_ds) if args.distributed else None\n        train_loader = data.DataLoader(\n            train_ds,\n            batch_size=args.batch_size,\n            shuffle=(train_sampler is None),\n            num_workers=args.workers,\n            sampler=train_sampler,\n            pin_memory=True,\n        )\n        val_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n        # val_ds = data.Dataset(data=val_files, transform=val_transform)\n        val_ds = PersistentDataset(data=val_files,\n                                     transform=val_transform,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/flare22')\n        val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None\n        val_loader = data.DataLoader(\n            val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=False\n        )\n        loader = [train_loader, val_loader]\n\n    return loader\n"
  },
  {
    "path": "Finetune/Flare22/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\nimport os\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n\n\ndef color_map(dataset='pascal'):\n    cmap = np.zeros((256, 3), dtype='uint8')\n\n    if dataset == 'pascal' or dataset == 'coco':\n        def bitget(byteval, idx):\n            return (byteval & (1 << idx)) != 0\n\n        for i in range(256):\n            r = g = b = 0\n            c = i\n            for j in range(8):\n                r = r | (bitget(c, 0) << 7-j)\n                g = g | (bitget(c, 1) << 7-j)\n                b = b | (bitget(c, 2) << 7-j)\n                c = c >> 3\n\n            cmap[i] = np.array([r, g, b])\n\n    elif dataset == 'cityscapes':\n        cmap[0] = np.array([128, 64, 128])\n        cmap[1] = np.array([244, 35, 232])\n        cmap[2] = np.array([70, 70, 70])\n        cmap[3] = np.array([102, 102, 156])\n        cmap[4] = np.array([190, 153, 153])\n        cmap[5] = np.array([153, 153, 153])\n        cmap[6] = np.array([250, 170, 30])\n        cmap[7] = np.array([220, 220, 0])\n        cmap[8] = np.array([107, 142, 35])\n        cmap[9] = np.array([152, 251, 152])\n        cmap[10] = np.array([70, 130, 180])\n        cmap[11] = np.array([220, 20, 60])\n        cmap[12] = np.array([255,  0,  0])\n        cmap[13] = np.array([0,  0, 142])\n        cmap[14] = np.array([0,  0, 70])\n        cmap[15] = np.array([0, 60, 100])\n        cmap[16] = np.array([0, 80, 100])\n        cmap[17] = np.array([0,  0, 230])\n        cmap[18] = np.array([119, 11, 32])\n\n        cmap[19] = np.array([0, 0, 0])\n        cmap[255] = np.array([0, 0, 0])\n\n    return cmap\n\n\ndef check_dir(dir):\n    if not os.path.exists(dir):\n        os.makedirs(dir)"
  },
  {
    "path": "Finetune/Flare22/val.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport nibabel as nib\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.data_utils import get_loader\nfrom utils.utils import dice, resample_3d\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.data import decollate_batch\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./runs/logs_scratch_v2/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/linshan/CTs/BTCV/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--exp_name\", default=\"BTCV_0.8451\", type=str, help=\"experiment name\")\nparser.add_argument(\"--json_list\", default=\"dataset_0.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_0.8451.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi=96\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.test_mode = True\n    output_directory = \"./outputs/\" + args.exp_name\n    if not os.path.exists(output_directory):\n        os.makedirs(output_directory)\n    val_loader = get_loader(args)\n    pretrained_dir = args.pretrained_dir\n    model_name = args.pretrained_model_name\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    pretrained_pth = os.path.join(pretrained_dir, model_name)\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=0.0,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    model_dict = torch.load(pretrained_pth)[\"state_dict\"]\n    model.load_state_dict(model_dict, strict=True)\n    model.eval()\n    model.to(device)\n\n    acc_func = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    run_acc = AverageMeter()\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n\n    with torch.no_grad():\n        all_dice = None\n        num = np.zeros(13)\n        dice_list_case = []\n        for idx, batch_data in enumerate(val_loader):\n            img_name = batch_data[\"image_meta_dict\"][\"filename_or_obj\"][0].split(\"/\")[-1]\n\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n\n            print(data.shape, target.shape)\n            z = data.shape[-1]\n            data = F.interpolate(data, size=(263, 218, z), mode='trilinear')\n            target = F.interpolate(target, size=(263, 218, z), mode='nearest')\n            print(data.shape, target.shape)\n\n            with autocast(enabled=True):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n            print(np.mean(run_acc.avg))\n\n            # # save predict\n            # print(logits.shape)\n            # val_outputs = torch.argmax(logits, 1).cpu().numpy()\n            # np.save(os.path.join(output_directory, 'pre'+ img_name[3:-7]+'.npy'), val_outputs.astype(np.uint8)[0])\n            # # save label\n            # val_labels = target.cpu().numpy()\n            # np.save(os.path.join(output_directory, 'label' + img_name[3:-7] + '.npy'), val_labels.astype(np.uint8)[0][0])\n            #\n            # # save input\n            # img = data.cpu().numpy()\n            # img = img * 255\n            # print(np.max(img))\n            # np.save(os.path.join(output_directory, 'img' + img_name[3:-7] + '.npy'), img.astype(np.uint8)[0][0])\n\n\nif __name__ == \"__main__\":\n    main()\n\n    # outputs = torch.argmax(logits, 1).cpu().numpy()\n    # outputs = outputs.astype(np.uint8)[0]\n    # val_labels = target.cpu().numpy()[0, 0, :, :, :]\n    #\n    # len_class = len(list(np.unique(val_labels))) - 1\n    # dice_list_sub = []\n    # for i in range(1, 14):\n    #     # judge this class exist or not, ignore background\n    #     num[i - 1] += (np.sum(val_labels == i) > 0).astype(np.uint8)\n    #     organ_Dice = dice(outputs == i, val_labels == i)\n    #     dice_list_sub.append(organ_Dice)\n    #\n    # mean_dice = np.sum(dice_list_sub) / len_class\n    # print(\"Mean Organ Dice: {}\".format(mean_dice))\n    #\n    # # acc of each organ\n    # print(\"Organ Dice:\", dice_list_sub)\n    #\n    # if all_dice is None:\n    #     all_dice = (np.asarray(dice_list_sub)).copy()\n    # else:\n    #     all_dice = all_dice + np.asarray(dice_list_sub)\n    # print(\"Organ Dice accumulate:\", all_dice*100 / num)\n    #\n    # dice_list_case.append(mean_dice)\n    # print(\"Overall Mean Dice: {}\".format(100*np.mean(dice_list_case)))\n"
  },
  {
    "path": "Finetune/MM-WHS/dataset.json",
    "content": "{\n    \"description\": \"0\",\n    \"labels\": {\n        \"0\": \"background\",\n        \"1\": \"Left Ventricle\",\n        \"2\": \"whole aorta\",\n        \"3\": \"Right Ventricle\",\n        \"4\": \"Left Atrium\",\n        \"5\": \"myocardium of Left Ventricle\",\n        \"6\": \"Right Atrium\",\n        \"7\": \"Pulmonary Artery\"\n    },\n    \"licence\": \"hands off!\",\n    \"modality\": {\n        \"0\": \"CT\"\n    },\n    \"name\": \"MM-WHS\",\n    \"numTest\": 0,\n    \"numTraining\": 20,\n    \"reference\": \"0\",\n    \"release\": \"0.0\",\n    \"tensorImageSize\": \"4D\",\n    \"test\": [],\n    \"validation\": [{\n            \"image\": \"./images/ct_train_1001_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1001_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1002_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1002_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1003_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1003_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1004_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1004_label.nii.gz\"\n        }\n    ],\n    \"training\": [\n        {\n            \"image\": \"./images/ct_train_1005_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1005_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1006_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1006_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1007_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1007_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1008_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1008_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1009_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1009_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1010_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1010_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1011_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1011_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1012_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1012_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1013_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1013_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1014_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1014_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1015_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1015_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1016_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1016_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1017_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1017_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1018_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1018_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1019_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1019_label.nii.gz\"\n        },\n        {\n            \"image\": \"./images/ct_train_1020_image.nii.gz\",\n            \"label\": \"./labels/ct_train_1020_label.nii.gz\"\n        }\n    ]\n}"
  },
  {
    "path": "Finetune/MM-WHS/inferers.py",
    "content": "\"\"\"Multiview inferer.\"\"\"\r\n\r\nimport warnings\r\nfrom typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union\r\n\r\nimport torch\r\nimport torch.nn.functional as F\r\n\r\nfrom monai.data.utils import compute_importance_map, dense_patch_slices, get_valid_patch_size\r\nfrom monai.transforms import Resize\r\nfrom monai.utils import (\r\n    BlendMode,\r\n    PytorchPadMode,\r\n    convert_data_type,\r\n    ensure_tuple,\r\n    fall_back_tuple,\r\n    look_up_option,\r\n    optional_import,\r\n)\r\nfrom monai.inferers.utils import _get_scan_interval\r\n\r\n# from utils import view_ops\r\n# from utils import view_transforms\r\n\r\ntqdm, _ = optional_import(\"tqdm\", name=\"tqdm\")\r\n\r\n\r\ndef double_sliding_window_inference(\r\n    inputs: torch.Tensor,\r\n    view: int,\r\n    roi_size: Union[Sequence[int], int],\r\n    sw_batch_size: int,\r\n    predictor: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor], Dict[Any, torch.Tensor]]],\r\n    overlap: float = 0.25,\r\n    mode: Union[BlendMode, str] = BlendMode.CONSTANT,\r\n    sigma_scale: Union[Sequence[float], float] = 0.125,\r\n    padding_mode: Union[PytorchPadMode, str] = PytorchPadMode.CONSTANT,\r\n    cval: float = 0.0,\r\n    sw_device: Union[torch.device, str, None] = None,\r\n    device: Union[torch.device, str, None] = None,\r\n    progress: bool = False,\r\n    roi_weight_map: Union[torch.Tensor, None] = None,\r\n    *args: Any,\r\n    **kwargs: Any,\r\n) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[Any, torch.Tensor]]:\r\n    \"\"\"\r\n    Sliding window inference on two `inputs` with `predictor`.\r\n\r\n    The outputs of `predictor` could be a tensor, a tuple, or a dictionary of tensors.\r\n    Each output in the tuple or dict value is allowed to have different resolutions with respect to the input.\r\n    e.g., the input patch spatial size is [128,128,128], the output (a tuple of two patches) patch sizes\r\n    could be ([128,64,256], [64,32,128]).\r\n    In this case, the parameter `overlap` and `roi_size` need to be carefully chosen to ensure the output ROI is still\r\n    an integer. If the predictor's input and output spatial sizes are not equal, we recommend choosing the parameters\r\n    so that `overlap*roi_size*output_size/input_size` is an integer (for each spatial dimension).\r\n\r\n    When roi_size is larger than the inputs' spatial size, the input image are padded during inference.\r\n    To maintain the same spatial sizes, the output image will be cropped to the original input size.\r\n\r\n    Args:\r\n        inputs: input image to be processed (assuming NCHW[D])\r\n        roi_size: the spatial window size for inferences.\r\n            When its components have None or non-positives, the corresponding inputs dimension will be used.\r\n            if the components of the `roi_size` are non-positive values, the transform will use the\r\n            corresponding components of img size. For example, `roi_size=(32, -1)` will be adapted\r\n            to `(32, 64)` if the second spatial dimension size of img is `64`.\r\n        sw_batch_size: the batch size to run window slices.\r\n        predictor: given input tensor ``patch_data`` in shape NCHW[D],\r\n            The outputs of the function call ``predictor(patch_data)`` should be a tensor, a tuple, or a dictionary\r\n            with Tensor values. Each output in the tuple or dict value should have the same batch_size, i.e. NM'H'W'[D'];\r\n            where H'W'[D'] represents the output patch's spatial size, M is the number of output channels,\r\n            N is `sw_batch_size`, e.g., the input shape is (7, 1, 128,128,128),\r\n            the output could be a tuple of two tensors, with shapes: ((7, 5, 128, 64, 256), (7, 4, 64, 32, 128)).\r\n            In this case, the parameter `overlap` and `roi_size` need to be carefully chosen\r\n            to ensure the scaled output ROI sizes are still integers.\r\n            If the `predictor`'s input and output spatial sizes are different,\r\n            we recommend choosing the parameters so that ``overlap*roi_size*zoom_scale`` is an integer for each dimension.\r\n        overlap: Amount of overlap between scans.\r\n        mode: {``\"constant\"``, ``\"gaussian\"``}\r\n            How to blend output of overlapping windows. Defaults to ``\"constant\"``.\r\n\r\n            - ``\"constant``\": gives equal weight to all predictions.\r\n            - ``\"gaussian``\": gives less weight to predictions on edges of windows.\r\n\r\n        sigma_scale: the standard deviation coefficient of the Gaussian window when `mode` is ``\"gaussian\"``.\r\n            Default: 0.125. Actual window sigma is ``sigma_scale`` * ``dim_size``.\r\n            When sigma_scale is a sequence of floats, the values denote sigma_scale at the corresponding\r\n            spatial dimensions.\r\n        padding_mode: {``\"constant\"``, ``\"reflect\"``, ``\"replicate\"``, ``\"circular\"``}\r\n            Padding mode for ``inputs``, when ``roi_size`` is larger than inputs. Defaults to ``\"constant\"``\r\n            See also: https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html\r\n        cval: fill value for 'constant' padding mode. Default: 0\r\n        sw_device: device for the window data.\r\n            By default the device (and accordingly the memory) of the `inputs` is used.\r\n            Normally `sw_device` should be consistent with the device where `predictor` is defined.\r\n        device: device for the stitched output prediction.\r\n            By default the device (and accordingly the memory) of the `inputs` is used. If for example\r\n            set to device=torch.device('cpu') the gpu memory consumption is less and independent of the\r\n            `inputs` and `roi_size`. Output is on the `device`.\r\n        progress: whether to print a `tqdm` progress bar.\r\n        roi_weight_map: pre-computed (non-negative) weight map for each ROI.\r\n            If not given, and ``mode`` is not `constant`, this map will be computed on the fly.\r\n        args: optional args to be passed to ``predictor``.\r\n        kwargs: optional keyword args to be passed to ``predictor``.\r\n\r\n    Note:\r\n        - input must be channel-first and have a batch dim, supports N-D sliding window.\r\n\r\n    \"\"\"\r\n    compute_dtype = inputs.dtype\r\n    num_spatial_dims = len(inputs.shape) - 2\r\n    if overlap < 0 or overlap >= 1:\r\n        raise ValueError(\"overlap must be >= 0 and < 1.\")\r\n\r\n    # determine image spatial size and batch size\r\n    # Note: all input images must have the same image size and batch size\r\n    batch_size, _, *image_size_ = inputs.shape\r\n\r\n    if device is None:\r\n        device = inputs.device\r\n    if sw_device is None:\r\n        sw_device = inputs.device\r\n\r\n    roi_size = fall_back_tuple(roi_size, image_size_)\r\n    # in case that image size is smaller than roi size\r\n    image_size = tuple(max(image_size_[i], roi_size[i]) for i in range(num_spatial_dims))\r\n    pad_size = []\r\n    for k in range(len(inputs.shape) - 1, 1, -1):\r\n        diff = max(roi_size[k - 2] - inputs.shape[k], 0)\r\n        half = diff // 2\r\n        pad_size.extend([half, diff - half])\r\n    inputs = F.pad(inputs, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)\r\n    # inputs2 = F.pad(inputs2, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)\r\n\r\n    scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims, overlap)\r\n\r\n    # Store all slices in list\r\n    slices = dense_patch_slices(image_size, roi_size, scan_interval)\r\n    num_win = len(slices)  # number of windows per image\r\n    total_slices = num_win * batch_size  # total number of windows\r\n\r\n    # Create window-level importance map\r\n    valid_patch_size = get_valid_patch_size(image_size, roi_size)\r\n    if valid_patch_size == roi_size and (roi_weight_map is not None):\r\n        importance_map = roi_weight_map\r\n    else:\r\n        try:\r\n            importance_map = compute_importance_map(valid_patch_size, mode=mode, sigma_scale=sigma_scale, device=device)\r\n        except BaseException as e:\r\n            raise RuntimeError(\r\n                \"Seems to be OOM. Please try smaller patch size or mode='constant' instead of mode='gaussian'.\"\r\n            ) from e\r\n    importance_map = convert_data_type(importance_map, torch.Tensor, device, compute_dtype)[0]  # type: ignore\r\n    # handle non-positive weights\r\n    min_non_zero = max(importance_map[importance_map != 0].min().item(), 1e-3)\r\n    importance_map = torch.clamp(importance_map.to(torch.float32), min=min_non_zero).to(compute_dtype)\r\n\r\n    # Perform predictions\r\n    dict_key, output_image_list_1, output_image_list_2, count_map_list = None, [], [], []\r\n    _initialized_ss = -1\r\n    is_tensor_output = True  # whether the predictor's output is a tensor (instead of dict/tuple)\r\n\r\n    # for each patch\r\n    for slice_g in tqdm(range(0, total_slices, sw_batch_size)) if progress else range(0, total_slices, sw_batch_size):\r\n        slice_range = range(slice_g, min(slice_g + sw_batch_size, total_slices))\r\n        unravel_slice = [\r\n            [slice(int(idx / num_win), int(idx / num_win) + 1), slice(None)] + list(slices[idx % num_win])\r\n            for idx in slice_range\r\n        ]\r\n        window_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)\r\n        view_list = [view, (view + 1) % len(view_transforms.permutation_transforms)]\r\n        window_data_list = [view_ops.get_permute_transform(0, dst)(window_data) for dst in view_list]\r\n        # window_data_2 = torch.cat([inputs2[win_slice] for win_slice in unravel_slice]).to(sw_device)\r\n        seg_prob_out_1, seg_prob_out_2 = predictor(window_data_list[0], window_data_list[1], view_list, *args, **kwargs)  # batched patch segmentation\r\n        seg_prob_out_1, seg_prob_out_2 = view_ops.permute_inverse([seg_prob_out_1, seg_prob_out_2], view_list)\r\n\r\n        # convert seg_prob_out to tuple seg_prob_tuple, this does not allocate new memory.\r\n        seg_prob_tuple_1: Tuple[torch.Tensor, ...]\r\n        seg_prob_tuple_2: Tuple[torch.Tensor, ...]\r\n        if isinstance(seg_prob_out_1, torch.Tensor):\r\n            seg_prob_tuple_1 = (seg_prob_out_1,)\r\n            seg_prob_tuple_2 = (seg_prob_out_2,)\r\n        elif isinstance(seg_prob_out_1, Mapping):\r\n            if dict_key is None:\r\n                dict_key = sorted(seg_prob_out_1.keys())  # track predictor's output keys\r\n            seg_prob_tuple_1 = tuple(seg_prob_out_1[k] for k in dict_key)\r\n            seg_prob_tuple_2 = tuple(seg_prob_out_2[k] for k in dict_key)\r\n            is_tensor_output = False\r\n        else:\r\n            seg_prob_tuple_1 = ensure_tuple(seg_prob_out_1)\r\n            seg_prob_tuple_2 = ensure_tuple(seg_prob_out_2)\r\n            is_tensor_output = False\r\n\r\n        # for each output in multi-output list\r\n        for ss in range(len(seg_prob_tuple_1)):\r\n            seg_prob_1 = seg_prob_tuple_1[ss].to(device)  # BxCxMxNxP or BxCxMxN\r\n            seg_prob_2 = seg_prob_tuple_2[ss].to(device)\r\n\r\n            # compute zoom scale: out_roi_size/in_roi_size\r\n            zoom_scale = []\r\n            for axis, (img_s_i, out_w_i, in_w_i) in enumerate(\r\n                zip(image_size, seg_prob_1.shape[2:], window_data.shape[2:])\r\n            ):\r\n                _scale = out_w_i / float(in_w_i)\r\n                if not (img_s_i * _scale).is_integer():\r\n                    warnings.warn(\r\n                        f\"For spatial axis: {axis}, output[{ss}] will have non-integer shape. Spatial \"\r\n                        f\"zoom_scale between output[{ss}] and input is {_scale}. Please pad inputs.\"\r\n                    )\r\n                zoom_scale.append(_scale)\r\n\r\n            if _initialized_ss < ss:  # init. the ss-th buffer at the first iteration\r\n                # construct multi-resolution outputs\r\n                output_classes = seg_prob_1.shape[1]\r\n                output_shape = [batch_size, output_classes] + [\r\n                    int(image_size_d * zoom_scale_d) for image_size_d, zoom_scale_d in zip(image_size, zoom_scale)\r\n                ]\r\n                # allocate memory to store the full output and the count for overlapping parts\r\n                output_image_list_1.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))\r\n                output_image_list_2.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))\r\n                count_map_list.append(torch.zeros([1, 1] + output_shape[2:], dtype=compute_dtype, device=device))\r\n                _initialized_ss += 1\r\n\r\n            # resizing the importance_map\r\n            resizer = Resize(spatial_size=seg_prob_1.shape[2:], mode=\"nearest\", anti_aliasing=False)\r\n\r\n            # store the result in the proper location of the full output. Apply weights from importance map.\r\n            for idx, original_idx in zip(slice_range, unravel_slice):\r\n                # zoom roi\r\n                original_idx_zoom = list(original_idx)  # 4D for 2D image, 5D for 3D image\r\n                for axis in range(2, len(original_idx_zoom)):\r\n                    zoomed_start = original_idx[axis].start * zoom_scale[axis - 2]\r\n                    zoomed_end = original_idx[axis].stop * zoom_scale[axis - 2]\r\n                    if not zoomed_start.is_integer() or (not zoomed_end.is_integer()):\r\n                        warnings.warn(\r\n                            f\"For axis-{axis-2} of output[{ss}], the output roi range is not int. \"\r\n                            f\"Input roi range is ({original_idx[axis].start}, {original_idx[axis].stop}). \"\r\n                            f\"Spatial zoom_scale between output[{ss}] and input is {zoom_scale[axis - 2]}. \"\r\n                            f\"Corresponding output roi range is ({zoomed_start}, {zoomed_end}).\\n\"\r\n                            f\"Please change overlap ({overlap}) or roi_size ({roi_size[axis-2]}) for axis-{axis-2}. \"\r\n                            \"Tips: if overlap*roi_size*zoom_scale is an integer, it usually works.\"\r\n                        )\r\n                    original_idx_zoom[axis] = slice(int(zoomed_start), int(zoomed_end), None)\r\n                importance_map_zoom = resizer(importance_map.unsqueeze(0))[0].to(compute_dtype)\r\n                # store results and weights\r\n                output_image_list_1[ss][original_idx_zoom] += importance_map_zoom * seg_prob_1[idx - slice_g]\r\n                output_image_list_2[ss][original_idx_zoom] += importance_map_zoom * seg_prob_2[idx - slice_g]\r\n                count_map_list[ss][original_idx_zoom] += (\r\n                    importance_map_zoom.unsqueeze(0).unsqueeze(0).expand(count_map_list[ss][original_idx_zoom].shape)\r\n                )\r\n\r\n    # account for any overlapping sections\r\n    for ss in range(len(output_image_list_1)):\r\n        count_map_pop = count_map_list.pop(0)\r\n        output_image_list_1[ss] = (output_image_list_1[ss] / count_map_pop).to(compute_dtype)\r\n        output_image_list_2[ss] = (output_image_list_2[ss] / count_map_pop).to(compute_dtype)\r\n\r\n    # remove padding if image_size smaller than roi_size\r\n    for ss in range(len(output_image_list_1)):\r\n        output_i_1, output_i_2 = output_image_list_1[ss], output_image_list_2[ss]\r\n        if torch.isnan(output_i_1).any() or torch.isinf(output_i_1).any():\r\n            warnings.warn(\"Sliding window inference results contain NaN or Inf.\")\r\n        if torch.isnan(output_i_2).any() or torch.isinf(output_i_2).any():\r\n            warnings.warn(\"Sliding window inference results contain NaN or Inf.\")\r\n\r\n        zoom_scale = [\r\n            seg_prob_map_shape_d / roi_size_d for seg_prob_map_shape_d, roi_size_d in zip(output_i_1.shape[2:], roi_size)\r\n        ]\r\n\r\n        final_slicing: List[slice] = []\r\n        for sp in range(num_spatial_dims):\r\n            slice_dim = slice(pad_size[sp * 2], image_size_[num_spatial_dims - sp - 1] + pad_size[sp * 2])\r\n            slice_dim = slice(\r\n                int(round(slice_dim.start * zoom_scale[num_spatial_dims - sp - 1])),\r\n                int(round(slice_dim.stop * zoom_scale[num_spatial_dims - sp - 1])),\r\n            )\r\n            final_slicing.insert(0, slice_dim)\r\n        while len(final_slicing) < len(output_i_1.shape):\r\n            final_slicing.insert(0, slice(None))\r\n        output_image_list_1[ss] = output_i_1[final_slicing]\r\n        output_image_list_2[ss] = output_i_2[final_slicing]\r\n\r\n    if dict_key is not None:  # if output of predictor is a dict\r\n        final_output_1 = dict(zip(dict_key, output_image_list_1))\r\n        final_output_2 = dict(zip(dict_key, output_image_list_2))\r\n    else:\r\n        final_output_1 = tuple(output_image_list_1)  # type: ignore\r\n        final_output_2 = tuple(output_image_list_2)  # type: ignore\r\n    final_output_1 = final_output_1[0] if is_tensor_output else final_output_1  # type: ignore\r\n    final_output_2 = final_output_2[0] if is_tensor_output else final_output_2  # type: ignore\r\n    return final_output_1, final_output_2\r\n\r\n\r\ndef one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:\r\n    \"\"\"\r\n    For every value v in `labels`, the value in the output will be either 1 or 0. Each vector along the `dim`-th\r\n    dimension has the \"one-hot\" format, i.e., it has a total length of `num_classes`,\r\n    with a one and `num_class-1` zeros.\r\n    Note that this will include the background label, thus a binary mask should be treated as having two classes.\r\n\r\n    Args:\r\n        labels: input tensor of integers to be converted into the 'one-hot' format. Internally `labels` will be\r\n            converted into integers `labels.long()`.\r\n        num_classes: number of output channels, the corresponding length of `labels[dim]` will be converted to\r\n            `num_classes` from `1`.\r\n        dtype: the data type of the output one_hot label.\r\n        dim: the dimension to be converted to `num_classes` channels from `1` channel, should be non-negative number.\r\n\r\n    Example:\r\n\r\n    For a tensor `labels` of dimensions [B]1[spatial_dims], return a tensor of dimensions `[B]N[spatial_dims]`\r\n    when `num_classes=N` number of classes and `dim=1`.\r\n\r\n    .. code-block:: python\r\n\r\n        from monai.networks.utils import one_hot\r\n        import torch\r\n\r\n        a = torch.randint(0, 2, size=(1, 2, 2, 2))\r\n        out = one_hot(a, num_classes=2, dim=0)\r\n        print(out.shape)  # torch.Size([2, 2, 2, 2])\r\n\r\n        a = torch.randint(0, 2, size=(2, 1, 2, 2, 2))\r\n        out = one_hot(a, num_classes=2, dim=1)\r\n        print(out.shape)  # torch.Size([2, 2, 2, 2, 2])\r\n\r\n    \"\"\"\r\n\r\n    # if `dim` is bigger, add singleton dim at the end\r\n    if labels.ndim < dim + 1:\r\n        shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))\r\n        labels = torch.reshape(labels, shape)\r\n\r\n    sh = list(labels.shape)\r\n\r\n    if sh[dim] != 1:\r\n        raise AssertionError(\"labels should have a channel with length equal to one.\")\r\n\r\n    sh[dim] = num_classes\r\n\r\n    o = torch.zeros(size=sh, dtype=dtype, device=labels.device)\r\n    labels = o.scatter_(dim=dim, index=labels.long(), value=1)\r\n\r\n    return labels\r\n\r\n\r\n\"\"\"View operations.\"\"\"\r\n\r\nfrom typing import Sequence, Tuple\r\n\r\n\r\n\"\"\"View operations.\r\n\r\nInput format: [B, C, X, Y, Z, ...]\r\n\r\nNOTE(meijieru): 0 is reserved for identify transform.\r\n\"\"\"\r\n\r\nfrom typing import Callable, Sequence, Union\r\n\r\nimport enum\r\n\r\nimport torch\r\n\r\nRotateType = int\r\nPermuteType = int\r\nTransformFuncType = Callable[[torch.Tensor], torch.Tensor]\r\n# A composition of multiple view transoforms.\r\nTransformsType = Sequence[Union[PermuteType, RotateType]]\r\n\r\n\r\nclass GroupName(enum.Enum):\r\n\r\n    ROTATE = 1\r\n    PERMUTE = 2\r\n\r\n\r\nDEFAULT_ORDER = (GroupName.ROTATE, GroupName.PERMUTE)\r\n\r\nrotation_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.rot90(1, (3, 4)),\r\n    2: lambda x: x.rot90(2, (3, 4)),\r\n    3: lambda x: x.rot90(3, (3, 4)),\r\n}\r\nrotation_inverse_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.rot90(3, (3, 4)),\r\n    2: lambda x: x.rot90(2, (3, 4)),\r\n    3: lambda x: x.rot90(1, (3, 4)),\r\n}\r\npermutation_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.permute(0, 1, 3, 2, 4),\r\n    2: lambda x: x.permute(0, 1, 4, 3, 2),\r\n}\r\npermutation_inverse_transforms = {\r\n    0: lambda x: x,\r\n    1: lambda x: x.permute(0, 1, 3, 2, 4),\r\n    2: lambda x: x.permute(0, 1, 4, 3, 2),\r\n}\r\n\r\nall_forward_transforms = {\r\n    GroupName.ROTATE: rotation_transforms,\r\n    GroupName.PERMUTE: permutation_transforms,\r\n}\r\nall_backward_transforms = {\r\n    GroupName.ROTATE: rotation_inverse_transforms,\r\n    GroupName.PERMUTE: permutation_inverse_transforms,\r\n}\r\n\r\n\r\ndef get_transforms_func(views: TransformsType,\r\n                        orders: Sequence[GroupName] = DEFAULT_ORDER,\r\n                        inverse: bool = False) -> TransformFuncType:\r\n    \"\"\"Gets sequential transform functions.\"\"\"\r\n    if len(views) != len(orders):\r\n        raise ValueError()\r\n\r\n    all_transforms = (all_forward_transforms\r\n                      if not inverse else all_backward_transforms)\r\n    funcs = [\r\n        all_transforms[group_name][view]\r\n        for view, group_name in zip(views, orders)\r\n    ]\r\n    funcs = funcs if not inverse else funcs[::-1]\r\n\r\n    def aux(val):\r\n        for func in funcs:\r\n            val = func(val)\r\n        return val\r\n\r\n    return aux\r\n\r\n\r\nimport torch\r\nimport numpy as np\r\n\r\n\r\ndef get_permute_transform(view_src: PermuteType,\r\n                          view_dst: PermuteType) -> TransformFuncType:\r\n    \"\"\"Gets transform function from view src to view dst.\"\"\"\r\n\r\n    def transform(x: torch.Tensor) -> torch.Tensor:\r\n        x_view_0 = view_transforms.permutation_inverse_transforms[view_src](x)\r\n        return view_transforms.permutation_transforms[view_dst](\r\n            x_view_0).contiguous()\r\n\r\n    return transform\r\n\r\n\r\ndef permute_inverse(xs: Sequence[torch.Tensor],\r\n                    views: Sequence[PermuteType]) -> Sequence[torch.Tensor]:\r\n    \"\"\"Transforms data back to origin view.\"\"\"\r\n    return [get_permute_transform(view, 0)(x) for x, view in zip(xs, views)]\r\n\r\n\r\ndef permute_rand(\r\n    x: torch.Tensor,\r\n    num_samples: int = 2\r\n) -> Tuple[Sequence[torch.Tensor], Sequence[PermuteType]]:\r\n    \"\"\"Samples different transforms of data.\"\"\"\r\n    num_permutes = len(view_transforms.permutation_transforms)\r\n    if num_samples > num_permutes:\r\n        raise ValueError('Duplicate samples.')\r\n    view_dsts = np.random.permutation(num_permutes)[:num_samples].tolist()\r\n    return [get_permute_transform(0, view)(x) for view in view_dsts], view_dsts"
  },
  {
    "path": "Finetune/MM-WHS/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom utils.data_utils import get_loader\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"1\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/jiaxin/data/MM-WHS/ct_train/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--json_list\", default=\"./dataset.json\", type=str, help=\"dataset json file\")\nparser.add_argument(\n    \"--pretrained_checkpoint\",default=\"VoCo_10k.pt\", type=str, help=\"VoCo_10k pretrained model\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_bestVal.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 64\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=1000, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=1e-3, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=1e-5, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", default=True, help=\"do NOT use amp for training\")\nparser.add_argument(\"--val_every\", default=50, type=int, help=\"validation frequency\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=4, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=8, type=int, help=\"number of output channels\")\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--a_min\", default=0.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=1700.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\nparser.add_argument(\"--warmup_epochs\", default=50, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--smooth_dr\", default=1e-6, type=float, help=\"constant added to dice denominator to avoid nan\")\nparser.add_argument(\"--smooth_nr\", default=0.0, type=float, help=\"constant added to dice numerator to avoid zero\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=True, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n    torch.cuda.set_device(args.gpu)\n\n    torch.backends.cudnn.enabled = True\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=args.dropout_path_rate,\n        use_checkpoint=args.use_checkpoint,\n    )\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use pretrained weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            # model_VoCoEMA.pt\n            # model_dict = torch.load(\"./pretrained_models/supervised_suprem_swinunetr_2100.pth\", map_location=torch.device('cpu'))\n            model_dict = torch.load(args.pretrained_checkpoint,\n                                    map_location=torch.device('cpu'))\n\n            state_dict = model_dict\n            # state_dict = model_dict['net']\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    if args.squared_dice:\n        dice_loss = DiceCELoss(\n            to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr\n        )\n    else:\n        dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)\n\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n    dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda(args.gpu)\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        loss_func=dice_loss,\n        acc_func=dice_acc,\n        args=args,\n        model_inferer=model_inferer,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n        post_label=post_label,\n        post_pred=post_pred,\n    )\n    return accuracy\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/MM-WHS/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/MM-WHS/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/MM-WHS/pretrained_models/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/MM-WHS/test.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport nibabel as nib\nimport numpy as np\nimport torch\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.data_utils import get_loader\nfrom utils.utils import dice, resample_3d\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.inferers import sliding_window_inference\nfrom monai.data import decollate_batch\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR\nfrom monai.transforms import *\nfrom monai.utils.enums import MetricReduction\nfrom monai import data, transforms\nfrom monai.data import *\nfrom utils.utils import *\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"2\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./runs/logs_0.9054/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data_dir\", default=\"/data/jiaxin/data/MM-WHS/ct_train/\", type=str, help=\"dataset directory\")\nparser.add_argument(\"--exp_name\", default=\"MMWHS\", type=str, help=\"experiment name\")\nparser.add_argument(\n    \"--trained_pth\", default=\"./runs/logs/model.pt\", type=str, help=\"your trained checkpoint directory\")\nparser.add_argument(\n    \"--save_prediction_path\", default=\"./pred/MM-WHS/\", type=str, help=\"test_prediction_path\")\n\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_0.9054.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi=64\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=2, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--infer_overlap\", default=0.7, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=8, type=int, help=\"number of output channels\")\nparser.add_argument(\"--a_min\", default=-1000.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=1000.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.1, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n\n\ndef get_test_loader(args):\n    \"\"\"\n    Creates training transforms, constructs a dataset, and returns a dataloader.\n\n    Args:\n        args: Command line arguments containing dataset paths and hyperparameters.\n    \"\"\"\n    test_transforms = transforms.Compose([\n        LoadImaged(keys=[\"image\"]),\n        EnsureChannelFirstd(keys=[\"image\"]),\n        Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n        Spacingd(keys=[\"image\"], pixdim=(args.space_x, args.space_y, args.space_z),\n                 mode=(\"bilinear\")),\n        ScaleIntensityRanged(\n            keys=[\"image\"],\n            a_min=args.a_min,\n            a_max=args.a_max,\n            b_min=0.0,\n            b_max=1.0,\n            clip=True,\n        ),\n        CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n        SpatialPadd(keys=[\"image\"], spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                    mode='constant'),\n    ])\n\n    # constructing training dataset\n    test_img = []\n    test_name = []\n\n    dataset_list = os.listdir(args.data_dir)\n    check_dir(args.save_prediction_path)\n    already_exist_list = os.listdir(args.save_prediction_path)\n\n    for item in dataset_list:\n        if item not in already_exist_list and item.endswith('_image.nii.gz'):\n            name = item\n            test_img_path = os.path.join(args.data_dir, name)\n            test_img.append(test_img_path)\n            test_name.append(name)\n\n    data_dicts_test = [{'image': image, 'name': name}\n                        for image, name in zip(test_img, test_name)]\n\n    print('test len {}'.format(len(data_dicts_test)))\n\n    test_ds = Dataset(data=data_dicts_test, transform=test_transforms)\n    test_loader = DataLoader(\n        test_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=None, pin_memory=True\n    )\n    return test_loader, test_transforms\n\n\ndef main():\n    args = parser.parse_args()\n\n    test_loader, test_transforms = get_test_loader(args)\n\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=0.0,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    model_dict = torch.load(args.trained_pth)[\"state_dict\"]\n    model.load_state_dict(model_dict, strict=True)\n    model.eval()\n    model.to(device)\n\n    # enable cuDNN benchmark\n    torch.backends.cudnn.benchmark = True\n\n    post_transforms = Compose([EnsureTyped(keys=[\"pred\"]),\n                               Invertd(keys=[\"pred\"],\n                                       transform=test_transforms,\n                                       orig_keys=\"image\",\n                                       meta_keys=\"pred_meta_dict\",\n                                       orig_meta_keys=\"image_meta_dict\",\n                                       meta_key_postfix=\"meta_dict\",\n                                       nearest_interp=True,\n                                       to_tensor=True),\n                               AsDiscreted(keys=\"pred\", argmax=False, to_onehot=None),\n                               SaveImaged(keys=\"pred\", meta_keys=\"pred_meta_dict\", output_dir=args.save_prediction_path,\n                                          separate_folder=False, folder_layout=None,\n                                          resample=False),\n                               ])\n\n    with torch.no_grad():\n        for idx, batch_data in enumerate(test_loader):\n            torch.cuda.empty_cache()\n\n            data = batch_data[\"image\"]\n            data = data.cuda()\n\n            name = batch_data['name'][0]\n\n            with autocast(enabled=True):\n                logits = model_inferer(data)\n\n            logits = logits.argmax(1)\n            output = logits\n\n            print(torch.unique(output))\n\n            batch_data['pred'] = output.unsqueeze(1)\n            batch_data = [post_transforms(i) for i in\n                          decollate_batch(batch_data)]\n\n            os.rename(os.path.join(args.save_prediction_path, name[:-7]+'_trans.nii.gz'),\n                      os.path.join(args.save_prediction_path, name))\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/MM-WHS/train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs\nmkdir -p $logdir\n\ntorchrun --master_port=21120 --max-restart=10 main.py \\\n    --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "Finetune/MM-WHS/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.data import decollate_batch\n\n\ndef train_epoch(model, loader, optimizer, scaler, epoch, loss_func, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n    for idx, batch_data in enumerate(loader):\n        if isinstance(batch_data, list):\n            data, target = batch_data\n        else:\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n        data, target = data.cuda(), target.cuda()\n        for param in model.parameters():\n            param.grad = None\n        with autocast(enabled=args.amp):\n            logits = model(data)\n            loss = loss_func(logits, target)\n            #\n        if args.amp:\n            scaler.scale(loss).backward()\n            scaler.step(optimizer)\n            scaler.update()\n        else:\n            loss.backward()\n            optimizer.step()\n        if args.distributed:\n            loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)\n            run_loss.update(\n                np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size\n            )\n        else:\n            run_loss.update(loss.item(), n=args.batch_size)\n\n        lr = optimizer.param_groups[0][\"lr\"]\n        if args.rank == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):\n    model.eval()\n    run_acc = AverageMeter()\n    start_time = time.time()\n    with torch.no_grad():\n        for idx, batch_data in enumerate(loader):\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n            with autocast(enabled=args.amp):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            acc = acc.cuda(args.rank)\n\n            if args.distributed:\n                acc_list, not_nans_list = distributed_all_gather(\n                    [acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length\n                )\n                for al, nl in zip(acc_list, not_nans_list):\n                    run_acc.update(al, n=nl)\n\n            else:\n                run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n\n            if args.rank == 0:\n                avg_acc = np.mean(run_acc.avg)\n                print(\n                    \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                    \"acc\",\n                    avg_acc,\n                    \"time {:.2f}s\".format(time.time() - start_time),\n                )\n            start_time = time.time()\n    return run_acc.avg\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n        model,\n        train_loader,\n        val_loader,\n        optimizer,\n        loss_func,\n        acc_func,\n        args,\n        model_inferer=None,\n        scheduler=None,\n        start_epoch=0,\n        post_label=None,\n        post_pred=None,\n):\n    writer = None\n    if args.logdir is not None and args.rank == 0:\n        writer = SummaryWriter(log_dir=args.logdir)\n        if args.rank == 0:\n            print(\"Writing Tensorboard logs to \", args.logdir)\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n        if args.rank == 0 and writer is not None:\n            writer.add_scalar(\"train_loss\", train_loss, epoch)\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                acc_func=acc_func,\n                model_inferer=model_inferer,\n                args=args,\n                post_label=post_label,\n                post_pred=post_pred,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n                if writer is not None:\n                    writer.add_scalar(\"val_acc\", val_avg_acc, epoch)\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n        if scheduler is not None:\n            scheduler.step()\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/MM-WHS/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/MM-WHS/utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\nfrom monai.transforms import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank: self.total_size: self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader(args):\n    data_dir = args.data_dir\n    datalist_json = args.json_list\n    train_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            Convert_WHS_label(keys=\"label\"),\n            transforms.RandCropByPosNegLabeld(\n                keys=[\"image\", \"label\"],\n                label_key=\"label\",\n                spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                pos=9,\n                neg=1,\n                num_samples=args.sw_batch_size,\n                image_key=\"image\",\n                image_threshold=0,\n            ),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=0),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=1),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=2),\n            transforms.RandRotate90d(keys=[\"image\", \"label\"], prob=args.RandRotate90d_prob, max_k=3),\n            transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n    val_transform = transforms.Compose(\n        [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n            Convert_WHS_label(keys=\"label\"),\n            transforms.ToTensord(keys=[\"image\", \"label\"]),\n        ]\n    )\n\n    datalist = load_decathlon_datalist(datalist_json, True, \"training\", base_dir=data_dir)\n    print('use persistent')\n    train_ds = PersistentDataset(data=datalist,\n                                 transform=train_transform,\n                                 pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                 cache_dir='/data/linshan/cache/MM-WHS')\n    # train_ds = data.Dataset(data=datalist, transform=train_transform)\n\n    train_sampler = Sampler(train_ds) if args.distributed else None\n    train_loader = data.DataLoader(\n        train_ds,\n        batch_size=args.batch_size,\n        shuffle=(train_sampler is None),\n        num_workers=args.workers,\n        sampler=train_sampler,\n        pin_memory=True,\n    )\n    val_files = load_decathlon_datalist(datalist_json, True, \"validation\", base_dir=data_dir)\n    # val_ds = data.Dataset(data=val_files, transform=val_transform)\n    val_ds = PersistentDataset(data=val_files,\n                               transform=val_transform,\n                               pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                               cache_dir='/data/linshan/cache/MM-WHS')\n    val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None\n    val_loader = data.DataLoader(\n        val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=False\n    )\n    loader = [train_loader, val_loader]\n\n    return loader\n\n\nclass Convert_WHS_label(MapTransform):\n\n    def __call__(self, data):\n        d = dict(data)\n        for key in self.keys:\n            out = d[key].clone()\n            out[d[key] == 205] = 1\n            out[d[key] == 420] = 2\n            out[d[key] == 500] = 3\n            out[d[key] == 550] = 4\n            out[d[key] == 600] = 5\n            out[d[key] == 820] = 6\n            out[d[key] == 850] = 7\n\n            d[key] = out.float()\n        return d\n"
  },
  {
    "path": "Finetune/MM-WHS/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\nimport os\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n\n\ndef color_map(dataset='pascal'):\n    cmap = np.zeros((256, 3), dtype='uint8')\n\n    if dataset == 'pascal' or dataset == 'coco':\n        def bitget(byteval, idx):\n            return (byteval & (1 << idx)) != 0\n\n        for i in range(256):\n            r = g = b = 0\n            c = i\n            for j in range(8):\n                r = r | (bitget(c, 0) << 7-j)\n                g = g | (bitget(c, 1) << 7-j)\n                b = b | (bitget(c, 2) << 7-j)\n                c = c >> 3\n\n            cmap[i] = np.array([r, g, b])\n\n    elif dataset == 'cityscapes':\n        cmap[0] = np.array([128, 64, 128])\n        cmap[1] = np.array([244, 35, 232])\n        cmap[2] = np.array([70, 70, 70])\n        cmap[3] = np.array([102, 102, 156])\n        cmap[4] = np.array([190, 153, 153])\n        cmap[5] = np.array([153, 153, 153])\n        cmap[6] = np.array([250, 170, 30])\n        cmap[7] = np.array([220, 220, 0])\n        cmap[8] = np.array([107, 142, 35])\n        cmap[9] = np.array([152, 251, 152])\n        cmap[10] = np.array([70, 130, 180])\n        cmap[11] = np.array([220, 20, 60])\n        cmap[12] = np.array([255,  0,  0])\n        cmap[13] = np.array([0,  0, 142])\n        cmap[14] = np.array([0,  0, 70])\n        cmap[15] = np.array([0, 60, 100])\n        cmap[16] = np.array([0, 80, 100])\n        cmap[17] = np.array([0,  0, 230])\n        cmap[18] = np.array([119, 11, 32])\n\n        cmap[19] = np.array([0, 0, 0])\n        cmap[255] = np.array([0, 0, 0])\n\n    return cmap\n\n\ndef check_dir(dir):\n    if not os.path.exists(dir):\n        os.makedirs(dir)\n\n\ndef load(model, model_dict):\n    if \"state_dict\" in model_dict.keys():\n        state_dict = model_dict[\"state_dict\"]\n    elif \"network_weights\" in model_dict.keys():\n        state_dict = model_dict[\"network_weights\"]\n    elif \"net\" in model_dict.keys():\n        state_dict = model_dict[\"net\"]\n    else:\n        state_dict = model_dict\n\n    if \"module.\" in list(state_dict.keys())[0]:\n        print(\"Tag 'module.' found in state dict - fixing!\")\n        for key in list(state_dict.keys()):\n            state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n\n    if \"backbone.\" in list(state_dict.keys())[0]:\n        print(\"Tag 'backbone.' found in state dict - fixing!\")\n    for key in list(state_dict.keys()):\n        state_dict[key.replace(\"backbone.\", \"\")] = state_dict.pop(key)\n\n    if \"swin_vit\" in list(state_dict.keys())[0]:\n        print(\"Tag 'swin_vit' found in state dict - fixing!\")\n        for key in list(state_dict.keys()):\n            state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n\n    current_model_dict = model.state_dict()\n    new_state_dict = {\n        k: state_dict[k] if (k in state_dict.keys()) and (state_dict[k].size() == current_model_dict[k].size()) else current_model_dict[k]\n        for k in current_model_dict.keys()}\n\n    model.load_state_dict(new_state_dict, strict=True)\n    print(\"Using VoCo pretrained backbone weights !!!!!!!\")\n\n    return model\n"
  },
  {
    "path": "Finetune/Word/dataset/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Word/dataset/dataset_word.json",
    "content": "{\n    \"name\": \"WORD-V0.1.0\",\n    \"description\": \"Whole abdomen ORgan segmentation Dataset (WORD), just for research use !!!\",\n    \"reference\": \"WORD: Revisiting Organs Segmentation in the Whole Abdominal Region, link:https://arxiv.org/pdf/2111.02403.pdf, https://github.com/HiLab-git/WORD\",\n    \"licence\": \"GNU General Public License v3.0\",\n    \"release\": \"v0.1.0 10/11/2021\",\n    \"tensorImageSize\": \"3D\",\n    \"modality\": {\n        \"0\": \"CT\"\n    },\n    \"labels\": {\n        \"0\": \"background\",\n        \"1\": \"liver\",\n        \"2\": \"spleen\",\n        \"3\": \"left_kidney\",\n        \"4\": \"right_kidney\",\n        \"5\": \"stomach\",\n        \"6\": \"gallbladder\",\n        \"7\": \"esophagus\",\n        \"8\": \"pancreas\",\n        \"9\": \"duodenum\",\n        \"10\": \"colon\",\n        \"11\": \"intestine\",\n        \"12\": \"adrenal\",\n        \"13\": \"rectum\",\n        \"14\": \"bladder\",\n        \"15\": \"Head_of_femur_L\",\n        \"16\": \"Head_of_femur_R\"\n    },\n    \"numTraining\": 100,\n    \"numValidation\": 20,\n    \"numTest\": 30,\n    \"training\": [\n        {\n            \"image\": \"./imagesTr/word_0096.nii.gz\",\n            \"label\": \"./labelsTr/word_0096.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0010.nii.gz\",\n            \"label\": \"./labelsTr/word_0010.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0078.nii.gz\",\n            \"label\": \"./labelsTr/word_0078.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0109.nii.gz\",\n            \"label\": \"./labelsTr/word_0109.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0051.nii.gz\",\n            \"label\": \"./labelsTr/word_0051.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0067.nii.gz\",\n            \"label\": \"./labelsTr/word_0067.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0107.nii.gz\",\n            \"label\": \"./labelsTr/word_0107.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0105.nii.gz\",\n            \"label\": \"./labelsTr/word_0105.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0065.nii.gz\",\n            \"label\": \"./labelsTr/word_0065.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0144.nii.gz\",\n            \"label\": \"./labelsTr/word_0144.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0118.nii.gz\",\n            \"label\": \"./labelsTr/word_0118.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0140.nii.gz\",\n            \"label\": \"./labelsTr/word_0140.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0002.nii.gz\",\n            \"label\": \"./labelsTr/word_0002.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0091.nii.gz\",\n            \"label\": \"./labelsTr/word_0091.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0009.nii.gz\",\n            \"label\": \"./labelsTr/word_0009.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0100.nii.gz\",\n            \"label\": \"./labelsTr/word_0100.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0032.nii.gz\",\n            \"label\": \"./labelsTr/word_0032.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0040.nii.gz\",\n            \"label\": \"./labelsTr/word_0040.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0130.nii.gz\",\n            \"label\": \"./labelsTr/word_0130.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0101.nii.gz\",\n            \"label\": \"./labelsTr/word_0101.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0018.nii.gz\",\n            \"label\": \"./labelsTr/word_0018.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0090.nii.gz\",\n            \"label\": \"./labelsTr/word_0090.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0071.nii.gz\",\n            \"label\": \"./labelsTr/word_0071.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0042.nii.gz\",\n            \"label\": \"./labelsTr/word_0042.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0126.nii.gz\",\n            \"label\": \"./labelsTr/word_0126.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0135.nii.gz\",\n            \"label\": \"./labelsTr/word_0135.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0138.nii.gz\",\n            \"label\": \"./labelsTr/word_0138.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0116.nii.gz\",\n            \"label\": \"./labelsTr/word_0116.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0070.nii.gz\",\n            \"label\": \"./labelsTr/word_0070.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0084.nii.gz\",\n            \"label\": \"./labelsTr/word_0084.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0056.nii.gz\",\n            \"label\": \"./labelsTr/word_0056.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0148.nii.gz\",\n            \"label\": \"./labelsTr/word_0148.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0132.nii.gz\",\n            \"label\": \"./labelsTr/word_0132.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0102.nii.gz\",\n            \"label\": \"./labelsTr/word_0102.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0082.nii.gz\",\n            \"label\": \"./labelsTr/word_0082.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0062.nii.gz\",\n            \"label\": \"./labelsTr/word_0062.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0073.nii.gz\",\n            \"label\": \"./labelsTr/word_0073.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0046.nii.gz\",\n            \"label\": \"./labelsTr/word_0046.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0146.nii.gz\",\n            \"label\": \"./labelsTr/word_0146.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0113.nii.gz\",\n            \"label\": \"./labelsTr/word_0113.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0006.nii.gz\",\n            \"label\": \"./labelsTr/word_0006.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0127.nii.gz\",\n            \"label\": \"./labelsTr/word_0127.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0095.nii.gz\",\n            \"label\": \"./labelsTr/word_0095.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0058.nii.gz\",\n            \"label\": \"./labelsTr/word_0058.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0128.nii.gz\",\n            \"label\": \"./labelsTr/word_0128.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0111.nii.gz\",\n            \"label\": \"./labelsTr/word_0111.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0049.nii.gz\",\n            \"label\": \"./labelsTr/word_0049.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0029.nii.gz\",\n            \"label\": \"./labelsTr/word_0029.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0086.nii.gz\",\n            \"label\": \"./labelsTr/word_0086.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0123.nii.gz\",\n            \"label\": \"./labelsTr/word_0123.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0011.nii.gz\",\n            \"label\": \"./labelsTr/word_0011.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0005.nii.gz\",\n            \"label\": \"./labelsTr/word_0005.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0036.nii.gz\",\n            \"label\": \"./labelsTr/word_0036.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0114.nii.gz\",\n            \"label\": \"./labelsTr/word_0114.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0145.nii.gz\",\n            \"label\": \"./labelsTr/word_0145.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0136.nii.gz\",\n            \"label\": \"./labelsTr/word_0136.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0055.nii.gz\",\n            \"label\": \"./labelsTr/word_0055.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0047.nii.gz\",\n            \"label\": \"./labelsTr/word_0047.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0093.nii.gz\",\n            \"label\": \"./labelsTr/word_0093.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0026.nii.gz\",\n            \"label\": \"./labelsTr/word_0026.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0044.nii.gz\",\n            \"label\": \"./labelsTr/word_0044.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0061.nii.gz\",\n            \"label\": \"./labelsTr/word_0061.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0125.nii.gz\",\n            \"label\": \"./labelsTr/word_0125.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0064.nii.gz\",\n            \"label\": \"./labelsTr/word_0064.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0087.nii.gz\",\n            \"label\": \"./labelsTr/word_0087.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0013.nii.gz\",\n            \"label\": \"./labelsTr/word_0013.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0104.nii.gz\",\n            \"label\": \"./labelsTr/word_0104.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0008.nii.gz\",\n            \"label\": \"./labelsTr/word_0008.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0079.nii.gz\",\n            \"label\": \"./labelsTr/word_0079.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0030.nii.gz\",\n            \"label\": \"./labelsTr/word_0030.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0094.nii.gz\",\n            \"label\": \"./labelsTr/word_0094.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0022.nii.gz\",\n            \"label\": \"./labelsTr/word_0022.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0134.nii.gz\",\n            \"label\": \"./labelsTr/word_0134.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0063.nii.gz\",\n            \"label\": \"./labelsTr/word_0063.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0117.nii.gz\",\n            \"label\": \"./labelsTr/word_0117.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0142.nii.gz\",\n            \"label\": \"./labelsTr/word_0142.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0081.nii.gz\",\n            \"label\": \"./labelsTr/word_0081.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0053.nii.gz\",\n            \"label\": \"./labelsTr/word_0053.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0106.nii.gz\",\n            \"label\": \"./labelsTr/word_0106.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0003.nii.gz\",\n            \"label\": \"./labelsTr/word_0003.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0072.nii.gz\",\n            \"label\": \"./labelsTr/word_0072.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0119.nii.gz\",\n            \"label\": \"./labelsTr/word_0119.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0068.nii.gz\",\n            \"label\": \"./labelsTr/word_0068.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0027.nii.gz\",\n            \"label\": \"./labelsTr/word_0027.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0121.nii.gz\",\n            \"label\": \"./labelsTr/word_0121.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0147.nii.gz\",\n            \"label\": \"./labelsTr/word_0147.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0020.nii.gz\",\n            \"label\": \"./labelsTr/word_0020.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0133.nii.gz\",\n            \"label\": \"./labelsTr/word_0133.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0108.nii.gz\",\n            \"label\": \"./labelsTr/word_0108.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0004.nii.gz\",\n            \"label\": \"./labelsTr/word_0004.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0038.nii.gz\",\n            \"label\": \"./labelsTr/word_0038.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0089.nii.gz\",\n            \"label\": \"./labelsTr/word_0089.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0059.nii.gz\",\n            \"label\": \"./labelsTr/word_0059.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0041.nii.gz\",\n            \"label\": \"./labelsTr/word_0041.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0150.nii.gz\",\n            \"label\": \"./labelsTr/word_0150.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0122.nii.gz\",\n            \"label\": \"./labelsTr/word_0122.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0012.nii.gz\",\n            \"label\": \"./labelsTr/word_0012.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0115.nii.gz\",\n            \"label\": \"./labelsTr/word_0115.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0143.nii.gz\",\n            \"label\": \"./labelsTr/word_0143.nii.gz\"\n        },\n        {\n            \"image\": \"./imagesTr/word_0028.nii.gz\",\n            \"label\": \"./labelsTr/word_0028.nii.gz\"\n        }\n    ],\n    \"validation\": [\"imagesVal\", \"labelsVal\"],\n    \"testing\": [\"imagesTs\", \"unseen\"],\n    \"addition_validation_from_LiTS\":[\"addition_validation_from_LiTS/imagesTs\", \"addition_validation_from_LiTS/labelsTs\"]\n}"
  },
  {
    "path": "Finetune/Word/main.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom functools import partial\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR\nfrom trainer import run_training\nfrom utils.data_utils import get_loader_word\nimport torch.nn as nn\nfrom monai.inferers import sliding_window_inference\nfrom monai.losses import DiceCELoss\nfrom monai.metrics import DiceMetric\nfrom monai.networks.nets import SwinUNETR, UNETR\nfrom monai.transforms import Activations, AsDiscrete, Compose\nfrom monai.utils.enums import MetricReduction\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\nfrom monai.utils import ensure_tuple_rep\n\nos.environ['CUDA_VISIBLE_DEVICES'] = \"4\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR segmentation pipeline\")\nparser.add_argument(\"--checkpoint\", default=None, help=\"start training from saved checkpoint\")\nparser.add_argument(\"--logdir\", default=\"logs_amos\", type=str, help=\"directory to save the tensorboard logs\")\nparser.add_argument(\n    \"--pretrained_dir\", default=\"./pretrained_models/\", type=str, help=\"pretrained checkpoint directory\"\n)\nparser.add_argument(\"--data\", default=\"word\", type=str, help=\"dataset name\")\nparser.add_argument(\"--out_channels\", default=17, type=int, help=\"number of output channels\")\nparser.add_argument(\n    \"--pretrained_checkpoint\",default=\"VoCo_10k.pt\", type=str, help=\"VoCo_10k pretrained model\")\nparser.add_argument(\n    \"--pretrained_model_name\",\n    default=\"model_bestVal.pt\",\n    type=str,\n    help=\"pretrained model name\",\n)\nroi = 96\nparser.add_argument(\"--save_checkpoint\", default=True, help=\"save checkpoint during training\")\nparser.add_argument(\"--max_epochs\", default=200, type=int, help=\"max number of training epochs\")\nparser.add_argument(\"--warmup_epochs\", default=5, type=int, help=\"number of warmup epochs\")\nparser.add_argument(\"--batch_size\", default=1, type=int, help=\"number of batch size\")\nparser.add_argument(\"--sw_batch_size\", default=4, type=int, help=\"number of sliding window batch size\")\nparser.add_argument(\"--optim_lr\", default=3e-4, type=float, help=\"optimization learning rate\")\nparser.add_argument(\"--optim_name\", default=\"adamw\", type=str, help=\"optimization algorithm\")\nparser.add_argument(\"--reg_weight\", default=1e-5, type=float, help=\"regularization weight\")\nparser.add_argument(\"--momentum\", default=0.99, type=float, help=\"momentum\")\nparser.add_argument(\"--noamp\", default=False, help=\"do NOT use amp for training\")\nparser.add_argument(\"--val_every\", default=10, type=int, help=\"validation frequency\")\nparser.add_argument(\"--distributed\", action=\"store_true\", help=\"start distributed training\")\nparser.add_argument(\"--world_size\", default=1, type=int, help=\"number of nodes for distributed training\")\nparser.add_argument(\"--rank\", default=0, type=int, help=\"node rank for distributed training\")\nparser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:23456\", type=str, help=\"distributed url\")\nparser.add_argument(\"--dist-backend\", default=\"nccl\", type=str, help=\"distributed backend\")\nparser.add_argument(\"--norm_name\", default=\"instance\", type=str, help=\"normalization name\")\nparser.add_argument(\"--workers\", default=8, type=int, help=\"number of workers\")\nparser.add_argument(\"--feature_size\", default=96, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\n\nparser.add_argument(\"--use_normal_dataset\", default=True, help=\"use monai Dataset class\")\nparser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\nparser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\nparser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\nparser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\nparser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\nparser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\nparser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--dropout_rate\", default=0.0, type=float, help=\"dropout rate\")\nparser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\nparser.add_argument(\"--RandFlipd_prob\", default=0.2, type=float, help=\"RandFlipd aug probability\")\nparser.add_argument(\"--RandRotate90d_prob\", default=0.2, type=float, help=\"RandRotate90d aug probability\")\nparser.add_argument(\"--RandScaleIntensityd_prob\", default=0.1, type=float, help=\"RandScaleIntensityd aug probability\")\nparser.add_argument(\"--RandShiftIntensityd_prob\", default=0.5, type=float, help=\"RandShiftIntensityd aug probability\")\nparser.add_argument(\"--infer_overlap\", default=0.75, type=float, help=\"sliding window inference overlap\")\nparser.add_argument(\"--lrschedule\", default=\"warmup_cosine\", type=str, help=\"type of learning rate scheduler\")\nparser.add_argument(\"--resume_ckpt\", action=\"store_true\", help=\"resume training from pretrained checkpoint\")\nparser.add_argument(\"--smooth_dr\", default=1e-6, type=float, help=\"constant added to dice denominator to avoid nan\")\nparser.add_argument(\"--smooth_nr\", default=0.0, type=float, help=\"constant added to dice numerator to avoid zero\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nparser.add_argument(\"--use_ssl_pretrained\", default=False, help=\"use self-supervised pretrained weights\")\nparser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\nparser.add_argument(\"--squared_dice\", action=\"store_true\", help=\"use squared Dice\")\n\n\ndef main():\n    args = parser.parse_args()\n    args.amp = not args.noamp\n    if args.distributed:\n        args.ngpus_per_node = torch.cuda.device_count()\n        print(\"Found total gpus\", args.ngpus_per_node)\n        args.world_size = args.ngpus_per_node * args.world_size\n        mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))\n    else:\n        main_worker(gpu=0, args=args)\n\n\ndef main_worker(gpu, args):\n    if args.distributed:\n        torch.multiprocessing.set_start_method(\"fork\", force=True)\n    np.set_printoptions(formatter={\"float\": \"{: 0.3f}\".format}, suppress=True)\n    args.gpu = gpu\n    if args.distributed:\n        args.rank = args.rank * args.ngpus_per_node + gpu\n        dist.init_process_group(\n            backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank\n        )\n    torch.cuda.set_device(0)\n\n    torch.backends.cudnn.enabled = True\n    torch.backends.cudnn.benchmark = True\n    args.test_mode = False\n    loader = get_loader_word(args)\n    print(args.rank, \" gpu\", args.gpu)\n    if args.rank == 0:\n        print(\"Batch size is:\", args.batch_size, \"epochs\", args.max_epochs)\n    inf_size = [args.roi_x, args.roi_y, args.roi_z]\n\n    if args.rank == 0:\n        os.makedirs(args.logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    pretrained_dir = args.pretrained_dir\n    model = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        drop_rate=0.0,\n        attn_drop_rate=0.0,\n        dropout_path_rate=args.dropout_path_rate,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True\n    )\n\n    if args.resume_ckpt:\n        model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))[\"state_dict\"]\n        model.load_state_dict(model_dict)\n        print(\"Use pretrained weights\")\n\n    if args.use_ssl_pretrained:\n        try:\n            # model_VoCoEMA.pt\n            # model_dict = torch.load(\"./pretrained_models/supervised_suprem_swinunetr_2100.pth\", map_location=torch.device('cpu'))\n            # model_dict = torch.load(\"./pretrained_models/model_VoCoEMA.pt\", map_location=torch.device('cpu'))\n            model_dict = torch.load(args.pretrained_checkpoint,\n                                    map_location=torch.device('cpu'))\n            # state_dict = model_dict['net']\n            # fix potential differences in state dict keys from pre-training to\n            # fine-tuning\n            if \"module.\" in list(state_dict.keys())[0]:\n                print(\"Tag 'module.' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\n            if \"swin_vit\" in list(state_dict.keys())[0]:\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\n                for key in list(state_dict.keys()):\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\n            # We now load model weights, setting param `strict` to False, i.e.:\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\n            # the decoder weights untouched (CNN UNet decoder).\n            model.load_state_dict(state_dict, strict=False)\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n        except ValueError:\n            raise ValueError(\"Self-supervised pre-trained weights not available for\" + str(args.model_name))\n\n    if args.squared_dice:\n        dice_loss = DiceCELoss(\n            to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr\n        )\n    else:\n        dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)\n\n    post_label = AsDiscrete(to_onehot=args.out_channels)\n    post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)\n    dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)\n    model_inferer = partial(\n        sliding_window_inference,\n        roi_size=inf_size,\n        sw_batch_size=args.sw_batch_size,\n        predictor=model,\n        overlap=args.infer_overlap,\n    )\n\n    pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    print(\"Total parameters count\", pytorch_total_params)\n\n    best_acc = 0\n    start_epoch = 0\n\n    if args.checkpoint is not None:\n        checkpoint = torch.load(args.checkpoint, map_location=\"cpu\")\n        from collections import OrderedDict\n\n        new_state_dict = OrderedDict()\n        for k, v in checkpoint[\"state_dict\"].items():\n            new_state_dict[k.replace(\"backbone.\", \"\")] = v\n        model.load_state_dict(new_state_dict, strict=False)\n        if \"epoch\" in checkpoint:\n            start_epoch = checkpoint[\"epoch\"]\n        if \"best_acc\" in checkpoint:\n            best_acc = checkpoint[\"best_acc\"]\n        print(\"=> loaded checkpoint '{}' (epoch {}) (bestacc {})\".format(args.checkpoint, start_epoch, best_acc))\n\n    model.cuda(args.gpu)\n\n    if args.distributed:\n        torch.cuda.set_device(args.gpu)\n        if args.norm_name == \"batch\":\n            model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model.cuda(args.gpu)\n        model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)\n    if args.optim_name == \"adam\":\n        optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n\n    elif args.optim_name == \"adamw\":\n        optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)\n        # optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)\n\n    elif args.optim_name == \"sgd\":\n        optimizer = torch.optim.SGD(\n            model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight\n        )\n    else:\n        raise ValueError(\"Unsupported Optimization Procedure: \" + str(args.optim_name))\n\n    if args.lrschedule == \"warmup_cosine\":\n        max_steps = args.max_epochs*len(loader[0])\n        warmup_steps = args.warmup_epochs*len(loader[0])\n        scheduler = LinearWarmupCosineAnnealingLR(\n            optimizer, warmup_epochs=warmup_steps, max_epochs=max_steps\n        )\n    elif args.lrschedule == \"cosine_anneal\":\n        scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)\n        if args.checkpoint is not None:\n            scheduler.step(epoch=start_epoch)\n    else:\n        scheduler = None\n    accuracy = run_training(\n        model=model,\n        train_loader=loader[0],\n        val_loader=loader[1],\n        optimizer=optimizer,\n        loss_func=dice_loss,\n        acc_func=dice_acc,\n        args=args,\n        model_inferer=model_inferer,\n        scheduler=scheduler,\n        start_epoch=start_epoch,\n        post_label=post_label,\n        post_pred=post_pred,\n    )\n    return accuracy\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "Finetune/Word/optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Word/optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "Finetune/Word/train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs_swin_large_scratch\nmkdir -p $logdir\n\ntorchrun --master_port=20482 main.py \\\n    --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "Finetune/Word/train.slurm",
    "content": "#!/bin/bash\n\n# NOTE: Lines starting with \"#SBATCH\" are valid SLURM commands or statements,\n#       while those starting with \"#\" and \"##SBATCH\" are comments.\n\n#SBATCH -J Omni_btcv\n\n#SBATCH -t 72:00:00 #Maximum runtime of 48 hours\n\n# Enable email notificaitons when job begins and ends\n#SBATCH --mail-user=lwubf@connect.ust.hk #Update your email address\n#SBATCH --mail-type=begin\n#SBATCH --mail-type=end\n\n# Choose partition (queue) with \"gpu\"\n#SBATCH -p project\n\n# To use 24 cpu core and 1 gpu devices in a node\n#SBATCH -N 1 -n 16 --gres=gpu:1\n\n# Setup runtime environment if necessary\nsource ~/.bashrc\nsource activate nnunet\n\n# Go to the job submission directory and run your application\ncd /home/lwubf/SwinUNETR/Omni/\nsh train.sh"
  },
  {
    "path": "Finetune/Word/trainer.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport time\n\nimport numpy as np\nimport torch\nimport torch.nn.parallel\nimport torch.utils.data.distributed\nfrom tensorboardX import SummaryWriter\nfrom torch.cuda.amp import GradScaler, autocast\nfrom utils.utils import AverageMeter, distributed_all_gather\n\nfrom monai.data import decollate_batch\n\n\ndef train_epoch(model, loader, optimizer, scheduler, scaler, epoch, loss_func, args):\n    model.train()\n    start_time = time.time()\n    run_loss = AverageMeter()\n    for idx, batch_data in enumerate(loader):\n        if isinstance(batch_data, list):\n            data, target = batch_data\n        else:\n            data, target = batch_data[\"image\"], batch_data[\"label\"]\n        data, target = data.cuda(), target.cuda()\n        for param in model.parameters():\n            param.grad = None\n        with autocast(enabled=args.amp):\n            logits = model(data)\n            loss = loss_func(logits, target)\n            #\n        if args.amp:\n            scaler.scale(loss).backward()\n            scaler.step(optimizer)\n            scaler.update()\n        else:\n            loss.backward()\n            optimizer.step()\n        if args.distributed:\n            loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)\n            run_loss.update(\n                np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size\n            )\n        else:\n            run_loss.update(loss.item(), n=args.batch_size)\n        \n        lr = optimizer.param_groups[0][\"lr\"]\n        if scheduler is not None:\n            scheduler.step()\n        if args.rank == 0 and (idx + 1) % 100 == 0:\n            print(\n                \"Epoch {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                \"loss: {:.4f}\".format(run_loss.avg),\n                \"lr: {:.8f}\".format(lr),\n                \"time {:.2f}s\".format(time.time() - start_time),\n            )\n        start_time = time.time()\n    for param in model.parameters():\n        param.grad = None\n\n    return run_loss.avg\n\n\ndef val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):\n    model.eval()\n    run_acc = AverageMeter()\n    start_time = time.time()\n    with torch.no_grad():\n        for idx, batch_data in enumerate(loader):\n            if isinstance(batch_data, list):\n                data, target = batch_data\n            else:\n                data, target = batch_data[\"image\"], batch_data[\"label\"]\n            data, target = data.cuda(), target.cuda()\n            with autocast(enabled=args.amp):\n                if model_inferer is not None:\n                    logits = model_inferer(data)\n                else:\n                    logits = model(data)\n            if not logits.is_cuda:\n                target = target.cpu()\n            val_labels_list = decollate_batch(target)\n            val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]\n            val_outputs_list = decollate_batch(logits)\n            val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]\n            acc_func.reset()\n            acc_func(y_pred=val_output_convert, y=val_labels_convert)\n            acc, not_nans = acc_func.aggregate()\n            acc = acc.cuda(args.rank)\n\n            if args.distributed:\n                acc_list, not_nans_list = distributed_all_gather(\n                    [acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length\n                )\n                for al, nl in zip(acc_list, not_nans_list):\n                    run_acc.update(al, n=nl)\n\n            else:\n                run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())\n\n            if args.rank == 0:\n                avg_acc = np.mean(run_acc.avg)\n                print(\n                    \"Val {}/{} {}/{}\".format(epoch, args.max_epochs, idx, len(loader)),\n                    \"acc\",\n                    avg_acc,\n                    \"time {:.2f}s\".format(time.time() - start_time),\n                )\n            start_time = time.time()\n    torch.cuda.empty_cache()\n    return run_acc.avg\n\n\ndef save_checkpoint(model, epoch, args, filename=\"model.pt\", best_acc=0, optimizer=None, scheduler=None):\n    state_dict = model.state_dict() if not args.distributed else model.module.state_dict()\n    save_dict = {\"epoch\": epoch, \"best_acc\": best_acc, \"state_dict\": state_dict}\n    if optimizer is not None:\n        save_dict[\"optimizer\"] = optimizer.state_dict()\n    if scheduler is not None:\n        save_dict[\"scheduler\"] = scheduler.state_dict()\n    filename = os.path.join(args.logdir, filename)\n    torch.save(save_dict, filename)\n    print(\"Saving checkpoint\", filename)\n\n\ndef run_training(\n    model,\n    train_loader,\n    val_loader,\n    optimizer,\n    loss_func,\n    acc_func,\n    args,\n    model_inferer=None,\n    scheduler=None,\n    start_epoch=0,\n    post_label=None,\n    post_pred=None,\n):\n    writer = None\n    if args.logdir is not None and args.rank == 0:\n        writer = SummaryWriter(log_dir=args.logdir)\n        if args.rank == 0:\n            print(\"Writing Tensorboard logs to \", args.logdir)\n    scaler = None\n    if args.amp:\n        scaler = GradScaler()\n    val_acc_max = 0.0\n    for epoch in range(start_epoch, args.max_epochs):\n        if args.distributed:\n            train_loader.sampler.set_epoch(epoch)\n            torch.distributed.barrier()\n        print(args.rank, time.ctime(), \"Epoch:\", epoch)\n        epoch_time = time.time()\n        train_loss = train_epoch(\n            model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args\n        )\n        if args.rank == 0:\n            print(\n                \"Final training  {}/{}\".format(epoch, args.max_epochs - 1),\n                \"loss: {:.4f}\".format(train_loss),\n                \"time {:.2f}s\".format(time.time() - epoch_time),\n            )\n        if args.rank == 0 and writer is not None:\n            writer.add_scalar(\"train_loss\", train_loss, epoch)\n        b_new_best = False\n        if (epoch + 1) % args.val_every == 0:\n            if args.distributed:\n                torch.distributed.barrier()\n            epoch_time = time.time()\n            val_avg_acc = val_epoch(\n                model,\n                val_loader,\n                epoch=epoch,\n                acc_func=acc_func,\n                model_inferer=model_inferer,\n                args=args,\n                post_label=post_label,\n                post_pred=post_pred,\n            )\n\n            val_avg_acc = np.mean(val_avg_acc)\n\n            if args.rank == 0:\n                print(\n                    \"Final validation  {}/{}\".format(epoch, args.max_epochs - 1),\n                    \"acc\",\n                    val_avg_acc,\n                    \"time {:.2f}s\".format(time.time() - epoch_time),\n                )\n                if writer is not None:\n                    writer.add_scalar(\"val_acc\", val_avg_acc, epoch)\n                if val_avg_acc > val_acc_max:\n                    print(\"new best ({:.6f} --> {:.6f}). \".format(val_acc_max, val_avg_acc))\n                    val_acc_max = val_avg_acc\n                    b_new_best = True\n                    if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                        save_checkpoint(\n                            model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler\n                        )\n            if args.rank == 0 and args.logdir is not None and args.save_checkpoint:\n                save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename=\"model_final.pt\")\n                if b_new_best:\n                    print(\"Copying to model.pt new best model!!!!\")\n                    shutil.copyfile(os.path.join(args.logdir, \"model_final.pt\"), os.path.join(args.logdir, \"model.pt\"))\n\n    print(\"Training Finished !, Best Accuracy: \", val_acc_max)\n\n    return val_acc_max\n"
  },
  {
    "path": "Finetune/Word/utils/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/Word/utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport os\nimport pickle\nimport numpy as np\nimport torch\nimport itertools as it\nfrom monai import data, transforms\nfrom monai.data import *\nfrom torch.utils.data import DataLoader, ConcatDataset\nfrom monai.transforms import *\n\n\nclass Sampler(torch.utils.data.Sampler):\n    def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):\n        if num_replicas is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = torch.distributed.get_world_size()\n        if rank is None:\n            if not torch.distributed.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = torch.distributed.get_rank()\n        self.shuffle = shuffle\n        self.make_even = make_even\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        indices = list(range(len(self.dataset)))\n        self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])\n\n    def __iter__(self):\n        if self.shuffle:\n            g = torch.Generator()\n            g.manual_seed(self.epoch)\n            indices = torch.randperm(len(self.dataset), generator=g).tolist()\n        else:\n            indices = list(range(len(self.dataset)))\n        if self.make_even:\n            if len(indices) < self.total_size:\n                if self.total_size - len(indices) < len(indices):\n                    indices += indices[: (self.total_size - len(indices))]\n                else:\n                    extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))\n                    indices += [indices[ids] for ids in extra_ids]\n            assert len(indices) == self.total_size\n        indices = indices[self.rank : self.total_size : self.num_replicas]\n        self.num_samples = len(indices)\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n\n\ndef get_loader_word(args):\n\n    datadir = \"/data/linshan/CTs/WORD/\"\n    splits = \"dataset.json\"\n    jsonlist = os.path.join(datadir, splits)\n    datalist = load_decathlon_datalist(jsonlist, True, \"training\", base_dir=datadir)\n    print(\"Dataset 4 WORD: number of data: {}\".format(len(datalist)))\n    print(\"Dataset all training and val: number of data: {}\".format(len(datalist)))\n\n    train_trans = [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n\n            transforms.RandCropByPosNegLabeld(\n                keys=[\"image\", \"label\"],\n                label_key=\"label\",\n                spatial_size=(args.roi_x, args.roi_y, args.roi_z),\n                pos=9,\n                neg=1,\n                num_samples=args.sw_batch_size,\n                image_key=\"image\",\n                image_threshold=0,\n            ),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=0),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=1),\n            transforms.RandFlipd(keys=[\"image\", \"label\"], prob=args.RandFlipd_prob, spatial_axis=2),\n            transforms.RandRotate90d(keys=[\"image\", \"label\"], prob=args.RandRotate90d_prob, max_k=3),\n            transforms.RandScaleIntensityd(keys=\"image\", factors=0.1, prob=args.RandScaleIntensityd_prob),\n            transforms.RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=args.RandShiftIntensityd_prob),\n        ]\n\n    val_transform = [\n            transforms.LoadImaged(keys=[\"image\", \"label\"]),\n            transforms.EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n            transforms.Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n            transforms.Spacingd(\n                keys=[\"image\", \"label\"], pixdim=(args.space_x, args.space_y, args.space_z), mode=(\"bilinear\", \"nearest\")\n            ),\n            transforms.ScaleIntensityRanged(\n                keys=[\"image\"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True\n            ),\n            transforms.CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n        ]\n\n    print('use persistent')\n    train_ds = PersistentDataset(data=datalist[:-20],\n                                  transform=train_trans,\n                                  pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                  cache_dir='/data/linshan/cache/word')\n\n    train_sampler = Sampler(train_ds) if args.distributed else None\n    train_loader = data.DataLoader(\n        train_ds,\n        batch_size=args.batch_size,\n        shuffle=(train_sampler is None),\n        num_workers=args.workers,\n        sampler=train_sampler,\n        pin_memory=True,\n    )\n\n    val_ds = PersistentDataset(data=datalist[-20:],\n                             transform=val_transform,\n                             pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                             cache_dir='/data/linshan/cache/word')\n\n    val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None\n    val_loader = data.DataLoader(\n        val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=True\n    )\n    loader = [train_loader, val_loader]\n\n    return loader"
  },
  {
    "path": "Finetune/Word/utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\nimport os\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n\n\ndef color_map(dataset='pascal'):\n    cmap = np.zeros((256, 3), dtype='uint8')\n\n    if dataset == 'pascal' or dataset == 'coco':\n        def bitget(byteval, idx):\n            return (byteval & (1 << idx)) != 0\n\n        for i in range(256):\n            r = g = b = 0\n            c = i\n            for j in range(8):\n                r = r | (bitget(c, 0) << 7-j)\n                g = g | (bitget(c, 1) << 7-j)\n                b = b | (bitget(c, 2) << 7-j)\n                c = c >> 3\n\n            cmap[i] = np.array([r, g, b])\n\n    elif dataset == 'cityscapes':\n        cmap[0] = np.array([128, 64, 128])\n        cmap[1] = np.array([244, 35, 232])\n        cmap[2] = np.array([70, 70, 70])\n        cmap[3] = np.array([102, 102, 156])\n        cmap[4] = np.array([190, 153, 153])\n        cmap[5] = np.array([153, 153, 153])\n        cmap[6] = np.array([250, 170, 30])\n        cmap[7] = np.array([220, 220, 0])\n        cmap[8] = np.array([107, 142, 35])\n        cmap[9] = np.array([152, 251, 152])\n        cmap[10] = np.array([70, 130, 180])\n        cmap[11] = np.array([220, 20, 60])\n        cmap[12] = np.array([255,  0,  0])\n        cmap[13] = np.array([0,  0, 142])\n        cmap[14] = np.array([0,  0, 70])\n        cmap[15] = np.array([0, 60, 100])\n        cmap[16] = np.array([0, 80, 100])\n        cmap[17] = np.array([0,  0, 230])\n        cmap[18] = np.array([119, 11, 32])\n\n        cmap[19] = np.array([0, 0, 0])\n        cmap[255] = np.array([0, 0, 0])\n\n    return cmap\n\n\ndef check_dir(dir):\n    if not os.path.exists(dir):\n        os.makedirs(dir)"
  },
  {
    "path": "Finetune/nnUNet/LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [2019] [Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License."
  },
  {
    "path": "Finetune/nnUNet/documentation/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/documentation/benchmarking.md",
    "content": "# nnU-Netv2 benchmarks\n\nDoes your system run like it should? Is your epoch time longer than expected? What epoch times should you expect?\n\nLook no further for we have the solution here!\n\n## What does the nnU-netv2 benchmark do?\n\nnnU-Net's benchmark trains models for 5 epochs. At the end, the fastest epoch will \nbe noted down, along with the GPU name, torch version and cudnn version. You can find the benchmark output in the \ncorresponding nnUNet_results subfolder (see example below). Don't worry, we also provide scripts to collect your \nresults. Or you just start a benchmark and look at the console output. Everything is possible. Nothing is forbidden.\n\nThe benchmark implementation revolves around two trainers:\n- `nnUNetTrainerBenchmark_5epochs` runs a regular training for 5 epochs. When completed, writes a .json file with the fastest \nepoch time as well as the GPU used and the torch and cudnn versions. Useful for speed testing the entire pipeline \n(data loading, augmentation, GPU training)\n- `nnUNetTrainerBenchmark_5epochs_noDataLoading` is the same, but it doesn't do any data loading or augmentation. It \njust presents dummy arrays to the GPU. Useful for checking pure GPU speed.\n\n## How to run the nnU-Netv2 benchmark?\nIt's quite simple, actually. It looks just like a regular nnU-Net training.\n\nWe provide reference numbers for some of the Medical Segmentation Decathlon datasets because they are easily \naccessible: [download here](https://drive.google.com/drive/folders/1HqEgzS8BV2c7xYNrZdEAnrHk7osJJ--2). If it needs to be \nquick and dirty, focus on Tasks 2 and 4. Download and extract the data and convert them to the nnU-Net format with \n`nnUNetv2_convert_MSD_dataset`. \nRun `nnUNetv2_plan_and_preprocess` for them.\n\nThen, for each dataset, run the following commands (only one per GPU! Or one after the other):\n\n```bash\nnnUNetv2_train DATSET_ID 2d 0 -tr nnUNetTrainerBenchmark_5epochs\nnnUNetv2_train DATSET_ID 3d_fullres 0 -tr nnUNetTrainerBenchmark_5epochs\nnnUNetv2_train DATSET_ID 2d 0 -tr nnUNetTrainerBenchmark_5epochs_noDataLoading\nnnUNetv2_train DATSET_ID 3d_fullres 0 -tr nnUNetTrainerBenchmark_5epochs_noDataLoading\n```\n\nIf you want to inspect the outcome manually, check (for example!) your \n`nnUNet_results/DATASET_NAME/nnUNetTrainerBenchmark_5epochs__nnUNetPlans__3d_fullres/fold_0/` folder for the `benchmark_result.json` file.\n\nNote that there can be multiple entries in this file if the benchmark was run on different GPU types, torch versions or cudnn versions!\n\nIf you want to summarize your results like we did in our [results](#results), check the \n[summary script](../nnunetv2/batch_running/benchmarking/summarize_benchmark_results.py). Here you need to change the \ntorch version, cudnn version and dataset you want to summarize, then execute the script. You can find the exact \nvalues you need to put there in one of your `benchmark_result.json` files.\n\n## Results\nWe have tested a variety of GPUs and summarized the results in a \n[spreadsheet](https://docs.google.com/spreadsheets/d/12Cvt_gr8XU2qWaE0XJk5jJlxMEESPxyqW0CWbQhTNNY/edit?usp=sharing). \nNote that you can select the torch and cudnn versions at the bottom! There may be comments in this spreadsheet. Read them!\n\n## Result interpretation\n\nResults are shown as epoch time in seconds. Lower is better (duh). Epoch times can fluctuate between runs, so as \nlong as you are within like 5-10% of the numbers we report, everything should be dandy. \n\nIf not, here is how you can try to find the culprit!\n\nThe first thing to do is to compare the performance between the `nnUNetTrainerBenchmark_5epochs_noDataLoading` and \n`nnUNetTrainerBenchmark_5epochs` trainers. If the difference is about the same as we report in our spreadsheet, but \nboth your numbers are worse, the problem is with your GPU:\n\n- Are you certain you compare the correct GPU? (duh)\n- If yes, then you might want to install PyTorch in a different way. Never `pip install torch`! Go to the\n[PyTorch installation](https://pytorch.org/get-started/locally/) page, select the most recent cuda version your \nsystem supports and only then copy and execute the correct command! Either pip or conda should work\n- If the problem is still not fixed, we recommend you try \n[compiling pytorch from source](https://github.com/pytorch/pytorch#from-source). It's more difficult but that's \nhow we roll here at the DKFZ (at least the cool kids here).\n- Another thing to consider is to try exactly the same torch + cudnn version as we did in our spreadsheet. \nSometimes newer versions can actually degrade performance and there might be bugs from time to time. Older versions \nare also often a lot slower!\n- Finally, some very basic things that could impact your GPU performance: \n  - Is the GPU cooled adequately? Check the temperature with `nvidia-smi`. Hot GPUs throttle performance in order to not self-destruct\n  - Is your OS using the GPU for displaying your desktop at the same time? If so then you can expect a performance \n  penalty (I dunno like 10% !?). That's expected and OK.\n  - Are other users using the GPU as well?\n\n\nIf you see a large performance difference between `nnUNetTrainerBenchmark_5epochs_noDataLoading` (fast) and \n`nnUNetTrainerBenchmark_5epochs` (slow) then the problem might be related to data loading and augmentation. As a \nreminder, nnU-net does not use pre-augmented images (offline augmentation) but instead generates augmented training \nsamples on the fly during training (no, you cannot switch it to offline). This requires that your system can do partial \nreads of the image files fast enough (SSD storage required!) and that your CPU is powerful enough to run the augmentations.\n\nCheck the following:\n\n- [CPU bottleneck] How many CPU threads are running during the training? nnU-Net uses 12 processes for data augmentation by default. \nIf you see those 12 running constantly during training, consider increasing the number of processes used for data \naugmentation (provided there is headroom on your CPU!). Increase the number until you see less active workers than \nyou configured (or just set the number to 32 and forget about it). You can do so by setting the `nnUNet_n_proc_DA` \nenvironment variable (Linux: `export nnUNet_n_proc_DA=24`). Read [here](set_environment_variables.md) on how to do this.\nIf your CPU does not support more processes (setting more processes than your CPU has threads makes \nno sense!) you are out of luck and in desperate need of a system upgrade!\n- [I/O bottleneck] If you don't see 12 (or nnUNet_n_proc_DA if you set it) processes running but your training times \nare still slow then open up `top` (sorry, Windows users. I don't know how to do this on Windows) and look at the value \nleft of 'wa' in the row that begins \nwith '%Cpu (s)'. If this is >1.0 (arbitrarily set threshold here, essentially look for unusually high 'wa'. In a \nhealthy training 'wa' will be almost 0) then your storage cannot keep up with data loading. Make sure to set \nnnUNet_preprocessed to a folder that is located on an SSD. nvme is preferred over SATA. PCIe3 is enough. 3000MB/s \nsequential read recommended.\n- [funky stuff] Sometimes there is funky stuff going on, especially when batch sizes are large, files are small and \npatch sizes are small as well. As part of the data loading process, nnU-Net needs to open and close a file for each \ntraining sample. Now imagine a dataset like Dataset004_Hippocampus where for the 2d config we have a batch size of \n366 and we run 250 iterations in <10s on an A100. That's a lotta files per second (366 * 250 / 10 = 9150 files per second). \nOof. If the files are on some network drive (even if it's nvme) then (probably) good night. The good news: nnU-Net\nhas got you covered: add `export nnUNet_keep_files_open=True` to your .bashrc and the problem goes away. The neat \npart: it causes new problems if you are not allowed to have enough open files. You may have to increase the number \nof allowed open files. `ulimit -n` gives your current limit (Linux only). It should not be something like 1024. \nIncreasing that to 65535 works well for me. See here for how to change these limits: \n[Link](https://kupczynski.info/posts/ubuntu-18-10-ulimits/) \n(works for Ubuntu 18, google for your OS!).\n\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/changelog.md",
    "content": "# What is different in v2?\n\n- We now support **hierarchical labels** (named regions in nnU-Net). For example, instead of training BraTS with the \n'edema', 'necrosis' and 'enhancing tumor' labels you can directly train it on the target areas 'whole tumor', \n'tumor core' and 'enhancing tumor'. See [here](region_based_training.md) for a detailed description + also have a look at the \n[BraTS 2021 conversion script](../nnunetv2/dataset_conversion/Dataset137_BraTS21.py).\n- Cross-platform support. Cuda, mps (Apple M1/M2) and of course CPU support! Simply select the device with \n`-device` in `nnUNetv2_train` and `nnUNetv2_predict`.\n- Unified trainer class: nnUNetTrainer. No messing around with cascaded trainer, DDP trainer, region-based trainer, \nignore trainer etc. All default functionality is in there!\n- Supports more input/output data formats through ImageIO classes.\n- I/O formats can be extended by implementing new Adapters based on `BaseReaderWriter`.\n- The nnUNet_raw_cropped folder no longer exists -> saves disk space at no performance penalty. magic! (no jk the \nsaving of cropped npz files was really slow, so it's actually faster to crop on the fly).\n- Preprocessed data and segmentation are stored in different files when unpacked. Seg is stored as int8 and thus \ntakes 1/4 of the disk space per pixel (and I/O throughput) as in v1.\n- Native support for multi-GPU (DDP) TRAINING. \nMulti-GPU INFERENCE should still be run with `CUDA_VISIBLE_DEVICES=X nnUNetv2_predict [...] -num_parts Y -part_id X`. \nThere is no cross-GPU communication in inference, so it doesn't make sense to add additional complexity with DDP.\n- All nnU-Net functionality is now also accessible via API. Check the corresponding entry point in `setup.py` to see \nwhat functions you need to call.\n- Dataset fingerprint is now explicitly created and saved in a json file (see nnUNet_preprocessed).\n\n- Complete overhaul of plans files (read also [this](explanation_plans_files.md):\n  - Plans are now .json and can be opened and read more easily\n  - Configurations are explicitly named (\"3d_fullres\" , ...)\n  - Configurations can inherit from each other to make manual experimentation easier\n  - A ton of additional functionality is now included in and can be changed through the plans, for example normalization strategy, resampling etc.\n  - Stages of the cascade are now explicitly listed in the plans. 3d_lowres has 'next_stage' (which can also be a \n  list of configurations!). 3d_cascade_fullres has a 'previous_stage' entry. By manually editing plans files you can \n  now connect anything you want, for example 2d with 3d_fullres or whatever. Be wild! (But don't create cycles!)\n  - Multiple configurations can point to the same preprocessed data folder to save disk space. Careful! Only \n  configurations that use the same spacing, resampling, normalization etc. should share a data source! By default, \n  3d_fullres and 3d_cascade_fullres share the same data\n  - Any number of configurations can be added to the plans (remember to give them a unique \"data_identifier\"!)\n\nFolder structures are different and more user-friendly:\n- nnUNet_preprocessed\n  - By default, preprocessed data is now saved as: `nnUNet_preprocessed/DATASET_NAME/PLANS_IDENTIFIER_CONFIGURATION` to clearly link them to their corresponding plans and configuration \n  - Name of the folder containing the preprocessed images can be adapted with the `data_identifier` key.\n- nnUNet_results\n  - Results are now sorted as follows: DATASET_NAME/TRAINERCLASS__PLANSIDENTIFIER__CONFIGURATION/FOLD\n\n## What other changes are planned and not yet implemented?\n- Integration into MONAI (together with our friends at Nvidia)\n- New pretrained weights for a large number of datasets (coming very soon))\n\n\n[//]: # (- nnU-Net now also natively supports an **ignore label**. Pixels with this label will not contribute to the loss. )\n\n[//]: # (Use this to learn from sparsely annotated data, or excluding irrelevant areas from training. Read more [here]&#40;ignore_label.md&#41;.)"
  },
  {
    "path": "Finetune/nnUNet/documentation/competitions/AutoPETII.md",
    "content": "# Look Ma, no code: fine tuning nnU-Net for the AutoPET II challenge by only adjusting its JSON plans\n\nPlease cite our paper :-*\n\n```text\nCOMING SOON\n```\n\n## Intro\n\nSee the [Challenge Website](https://autopet-ii.grand-challenge.org/) for details on the challenge.\n\nOur solution to this challenge rewuires no code changes at all. All we do is optimize nnU-Net's hyperparameters \n(architecture, batch size, patch size) through modifying the nnUNetplans.json file.\n\n## Prerequisites\nUse the latest pytorch version!\n\nWe recommend you use the latest nnU-Net version as well! We ran our trainings with commit 913705f which you can try in case something doesn't work as expected:\n`pip install git+https://github.com/MIC-DKFZ/nnUNet.git@913705f`\n\n## How to reproduce our trainings\n\n### Download and convert the data\n1. Download and extract the AutoPET II dataset\n2. Convert it to nnU-Net format by running `python nnunetv2/dataset_conversion/Dataset221_AutoPETII_2023.py FOLDER` where folder is the extracted AutoPET II dataset.\n\n### Experiment planning and preprocessing\nWe deviate a little from the standard nnU-Net procedure because all our experiments are based on just the 3d_fullres configuration\n\nRun the following commands:\n   - `nnUNetv2_extract_fingerprint -d 221` extracts the dataset fingerprint \n   - `nnUNetv2_plan_experiment -d 221` does the planning for the plain unet\n   - `nnUNetv2_plan_experiment -d 221 -pl ResEncUNetPlanner` does the planning for the residual encoder unet\n   - `nnUNetv2_preprocess -d 221 -c 3d_fullres` runs all the preprocessing we need\n\n### Modification of plans files\nPlease read the [information on how to modify plans files](../explanation_plans_files.md) first!!!\n\n\nIt is easier to have everything in one plans file, so the first thing we do is transfer the ResEnc UNet to the \ndefault plans file. We use the configuration inheritance feature of nnU-Net to make it use the same data as the \n3d_fullres configuration.\nAdd the following to the 'configurations' dict in 'nnUNetPlans.json':\n\n```json\n        \"3d_fullres_resenc\": {\n            \"inherits_from\": \"3d_fullres\",\n            \"UNet_class_name\": \"ResidualEncoderUNet\",\n            \"n_conv_per_stage_encoder\": [\n                1,\n                3,\n                4,\n                6,\n                6,\n                6\n            ],\n            \"n_conv_per_stage_decoder\": [\n                1,\n                1,\n                1,\n                1,\n                1\n            ]\n        },\n```\n\n(these values are basically just copied from the 'nnUNetResEncUNetPlans.json' file! With everything redundant being omitted thanks to inheritance from 3d_fullres)\n\nNow we crank up the patch and batch sizes. Add the following configurations:\n```json\n        \"3d_fullres_resenc_bs80\": {\n            \"inherits_from\": \"3d_fullres_resenc\",\n            \"batch_size\": 80\n            },\n        \"3d_fullres_resenc_192x192x192_b24\": {\n            \"inherits_from\": \"3d_fullres_resenc\",\n            \"patch_size\": [\n                192,\n                192,\n                192\n            ],\n            \"batch_size\": 24\n        }\n```\n\nSave the file (and check for potential Syntax Errors!)\n\n### Run trainings\nTraining each model requires 8 Nvidia A100 40GB GPUs. Expect training to run for 5-7 days. You'll need a really good \nCPU to handle the data augmentation! 128C/256T are a must! If you have less threads available, scale down nnUNet_n_proc_DA accordingly.\n\n```bash\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 0 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 1 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 2 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 3 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 4 -num_gpus 8\n\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 0 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 1 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 2 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 3 -num_gpus 8\nnnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 4 -num_gpus 8\n```\n\nDone!\n\n(We also provide pretrained weights in case you don't want to invest the GPU resources, see below)\n\n## How to make predictions with pretrained weights\nOur final model is an ensemble of two configurations:\n- ResEnc UNet with batch size 80\n- ResEnc UNet with patch size 192x192x192 and batch size 24\n\nTo run inference with these models, do the following:\n\n1. Download the pretrained model weights from [Zenodo](https://zenodo.org/record/8362371)\n2. Install both .zip files using `nnUNetv2_install_pretrained_model_from_zip`\n3. Make sure \n4. Now you can run inference on new cases with `nnUNetv2_predict`:\n   - `nnUNetv2_predict -i INPUT -o OUTPUT1 -d 221 -c 3d_fullres_resenc_bs80 -f 0 1 2 3 4 -step_size 0.6 --save_probabilities`   \n   - `nnUNetv2_predict -i INPUT -o OUTPUT2 -d 221 -c 3d_fullres_resenc_192x192x192_b24 -f 0 1 2 3 4 --save_probabilities`\n   - `nnUNetv2_ensemble -i OUTPUT1 OUTPUT2 -o OUTPUT_ENSEMBLE`\n\nNote that our inference Docker omitted TTA via mirroring along the axial direction during prediction (only sagittal + \ncoronal mirroring). This was\ndone to keep the inference time below 10 minutes per image on a T4 GPU (we actually never tested whether we could \nhave left this enabled). Just leave it on! You can also leave the step_size at default for the 3d_fullres_resenc_bs80."
  },
  {
    "path": "Finetune/nnUNet/documentation/convert_msd_dataset.md",
    "content": "Use `nnUNetv2_convert_MSD_dataset`.\n\nRead `nnUNetv2_convert_MSD_dataset -h` for usage instructions."
  },
  {
    "path": "Finetune/nnUNet/documentation/dataset_format.md",
    "content": "# nnU-Net dataset format\nThe only way to bring your data into nnU-Net is by storing it in a specific format. Due to nnU-Net's roots in the\n[Medical Segmentation Decathlon](http://medicaldecathlon.com/) (MSD), its dataset is heavily inspired but has since \ndiverged (see also [here](#how-to-use-decathlon-datasets)) from the format used in the MSD.\n\nDatasets consist of three components: raw images, corresponding segmentation maps and a dataset.json file specifying \nsome metadata. \n\nIf you are migrating from nnU-Net v1, read [this](#how-to-use-nnu-net-v1-tasks) to convert your existing Tasks.\n\n\n## What do training cases look like?\nEach training case is associated with an identifier = a unique name for that case. This identifier is used by nnU-Net to \nconnect images with the correct segmentation.\n\nA training case consists of images and their corresponding segmentation. \n\n**Images** is plural because nnU-Net supports arbitrarily many input channels. In order to be as flexible as possible, \nnnU-net requires each input channel to be stored in a separate image (with the sole exception being RGB natural \nimages). So these images could for example be a T1 and a T2 MRI (or whatever else you want). The different input \nchannels MUST have the same geometry (same shape, spacing (if applicable) etc.) and\nmust be co-registered (if applicable). Input channels are identified by nnU-Net by their FILE_ENDING: a four-digit integer at the end \nof the filename. Image files must therefore follow the following naming convention: {CASE_IDENTIFIER}_{XXXX}.{FILE_ENDING}. \nHereby, XXXX is the 4-digit modality/channel identifier (should be unique for each modality/channel, e.g., “0000” for T1, “0001” for \nT2 MRI, …) and FILE_ENDING is the file extension used by your image format (.png, .nii.gz, ...). See below for concrete examples.\nThe dataset.json file connects channel names with the channel identifiers in the 'channel_names' key (see below for details).\n\nSide note: Typically, each channel/modality needs to be stored in a separate file and is accessed with the XXXX channel identifier. \nException are natural images (RGB; .png) where the three color channels can all be stored in one file (see the \n[road segmentation](../nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py) dataset as an example). \n\n**Segmentations** must share the same geometry with their corresponding images (same shape etc.). Segmentations are \ninteger maps with each value representing a semantic class. The background must be 0. If there is no background, then \ndo not use the label 0 for something else! Integer values of your semantic classes must be consecutive (0, 1, 2, 3, \n...). Of course, not all labels have to be present in each training case. Segmentations are saved as {CASE_IDENTIFER}.{FILE_ENDING} .\n\nWithin a training case, all image geometries (input channels, corresponding segmentation) must match. Between training \ncases, they can of course differ. nnU-Net takes care of that.\n\nImportant: The input channels must be consistent! Concretely, **all images need the same input channels in the same \norder and all input channels have to be present every time**. This is also true for inference!\n\n\n## Supported file formats\nnnU-Net expects the same file format for images and segmentations! These will also be used for inference. For now, it \nis thus not possible to train .png and then run inference on .jpg.\n\nOne big change in nnU-Net V2 is the support of multiple input file types. Gone are the days of converting everything to .nii.gz!\nThis is implemented by abstracting the input and output of images + segmentations through `BaseReaderWriter`. nnU-Net \ncomes with a broad collection of Readers+Writers and you can even add your own to support your data format! \nSee [here](../nnunetv2/imageio/readme.md).\n\nAs a nice bonus, nnU-Net now also natively supports 2D input images and you no longer have to mess around with \nconversions to pseudo 3D niftis. Yuck. That was disgusting.\n\nNote that internally (for storing and accessing preprocessed images) nnU-Net will use its own file format, irrespective \nof what the raw data was provided in! This is for performance reasons.\n\n\nBy default, the following file formats are supported:\n\n- NaturalImage2DIO: .png, .bmp, .tif\n- NibabelIO: .nii.gz, .nrrd, .mha\n- NibabelIOWithReorient: .nii.gz, .nrrd, .mha. This reader will reorient images to RAS!\n- SimpleITKIO: .nii.gz, .nrrd, .mha\n- Tiff3DIO: .tif, .tiff. 3D tif images! Since TIF does not have a standardized way of storing spacing information, \nnnU-Net expects each TIF file to be accompanied by an identically named .json file that contains this information (see\n[here](#datasetjson)).\n\nThe file extension lists are not exhaustive and depend on what the backend supports. For example, nibabel and SimpleITK \nsupport more than the three given here. The file endings given here are just the ones we tested!\n\nIMPORTANT: nnU-Net can only be used with file formats that use lossless (or no) compression! Because the file \nformat is defined for an entire dataset (and not separately for images and segmentations, this could be a todo for \nthe future), we must ensure that there are no compression artifacts that destroy the segmentation maps. So no .jpg and \nthe likes! \n\n## Dataset folder structure\nDatasets must be located in the `nnUNet_raw` folder (which you either define when installing nnU-Net or export/set every \ntime you intend to run nnU-Net commands!).\nEach segmentation dataset is stored as a separate 'Dataset'. Datasets are associated with a dataset ID, a three digit \ninteger, and a dataset name (which you can freely choose): For example, Dataset005_Prostate has 'Prostate' as dataset name and \nthe dataset id is 5. Datasets are stored in the `nnUNet_raw` folder like this:\n\n    nnUNet_raw/\n    ├── Dataset001_BrainTumour\n    ├── Dataset002_Heart\n    ├── Dataset003_Liver\n    ├── Dataset004_Hippocampus\n    ├── Dataset005_Prostate\n    ├── ...\n\nWithin each dataset folder, the following structure is expected:\n\n    Dataset001_BrainTumour/\n    ├── dataset.json\n    ├── imagesTr\n    ├── imagesTs  # optional\n    └── labelsTr\n\n\nWhen adding your custom dataset, take a look at the [dataset_conversion](../nnunetv2/dataset_conversion) folder and \npick an id that is not already taken. IDs 001-010 are for the Medical Segmentation Decathlon.\n\n- **imagesTr** contains the images belonging to the training cases. nnU-Net will perform pipeline configuration, training with \ncross-validation, as well as finding postprocessing and the best ensemble using this data. \n- **imagesTs** (optional) contains the images that belong to the test cases. nnU-Net does not use them! This could just \nbe a convenient location for you to store these images. Remnant of the Medical Segmentation Decathlon folder structure.\n- **labelsTr** contains the images with the ground truth segmentation maps for the training cases. \n- **dataset.json** contains metadata of the dataset.\n\nThe scheme introduced [above](#what-do-training-cases-look-like) results in the following folder structure. Given \nis an example for the first Dataset of the MSD: BrainTumour. This dataset hat four input channels: FLAIR (0000), \nT1w (0001), T1gd (0002) and T2w (0003). Note that the imagesTs folder is optional and does not have to be present.\n\n    nnUNet_raw/Dataset001_BrainTumour/\n    ├── dataset.json\n    ├── imagesTr\n    │   ├── BRATS_001_0000.nii.gz\n    │   ├── BRATS_001_0001.nii.gz\n    │   ├── BRATS_001_0002.nii.gz\n    │   ├── BRATS_001_0003.nii.gz\n    │   ├── BRATS_002_0000.nii.gz\n    │   ├── BRATS_002_0001.nii.gz\n    │   ├── BRATS_002_0002.nii.gz\n    │   ├── BRATS_002_0003.nii.gz\n    │   ├── ...\n    ├── imagesTs\n    │   ├── BRATS_485_0000.nii.gz\n    │   ├── BRATS_485_0001.nii.gz\n    │   ├── BRATS_485_0002.nii.gz\n    │   ├── BRATS_485_0003.nii.gz\n    │   ├── BRATS_486_0000.nii.gz\n    │   ├── BRATS_486_0001.nii.gz\n    │   ├── BRATS_486_0002.nii.gz\n    │   ├── BRATS_486_0003.nii.gz\n    │   ├── ...\n    └── labelsTr\n        ├── BRATS_001.nii.gz\n        ├── BRATS_002.nii.gz\n        ├── ...\n\nHere is another example of the second dataset of the MSD, which has only one input channel:\n\n    nnUNet_raw/Dataset002_Heart/\n    ├── dataset.json\n    ├── imagesTr\n    │   ├── la_003_0000.nii.gz\n    │   ├── la_004_0000.nii.gz\n    │   ├── ...\n    ├── imagesTs\n    │   ├── la_001_0000.nii.gz\n    │   ├── la_002_0000.nii.gz\n    │   ├── ...\n    └── labelsTr\n        ├── la_003.nii.gz\n        ├── la_004.nii.gz\n        ├── ...\n\nRemember: For each training case, all images must have the same geometry to ensure that their pixel arrays are aligned. Also \nmake sure that all your data is co-registered!\n\nSee also [dataset format inference](dataset_format_inference.md)!!\n\n## dataset.json\nThe dataset.json contains metadata that nnU-Net needs for training. We have greatly reduced the number of required \nfields since version 1!\n\nHere is what the dataset.json should look like at the example of the Dataset005_Prostate from the MSD:\n\n    { \n     \"channel_names\": {  # formerly modalities\n       \"0\": \"T2\", \n       \"1\": \"ADC\"\n     }, \n     \"labels\": {  # THIS IS DIFFERENT NOW!\n       \"background\": 0,\n       \"PZ\": 1,\n       \"TZ\": 2\n     }, \n     \"numTraining\": 32, \n     \"file_ending\": \".nii.gz\"\n     \"overwrite_image_reader_writer\": \"SimpleITKIO\"  # optional! If not provided nnU-Net will automatically determine the ReaderWriter\n     }\n\nThe channel_names determine the normalization used by nnU-Net. If a channel is marked as 'CT', then a global \nnormalization based on the intensities in the foreground pixels will be used. If it is something else, per-channel \nz-scoring will be used. Refer to the methods section in [our paper](https://www.nature.com/articles/s41592-020-01008-z) \nfor more details. nnU-Net v2 introduces a few more normalization schemes to \nchoose from and allows you to define your own, see [here](explanation_normalization.md) for more information. \n\nImportant changes relative to nnU-Net v1:\n- \"modality\" is now called \"channel_names\" to remove strong bias to medical images\n- labels are structured differently (name -> int instead of int -> name). This was needed to support [region-based training](region_based_training.md)\n- \"file_ending\" is added to support different input file types\n- \"overwrite_image_reader_writer\" optional! Can be used to specify a certain (custom) ReaderWriter class that should \nbe used with this dataset. If not provided, nnU-Net will automatically determine the ReaderWriter\n- \"regions_class_order\" only used in [region-based training](region_based_training.md)\n\nThere is a utility with which you can generate the dataset.json automatically. You can find it \n[here](../nnunetv2/dataset_conversion/generate_dataset_json.py). \nSee our examples in [dataset_conversion](../nnunetv2/dataset_conversion) for how to use it. And read its documentation!\n\nAs described above, a json file that contains spacing information is required for TIFF files.\nAn example for a 3D TIFF stack with units corresponding to 7.6 in x and y, 80 in z is:\n\n```\n{\n    \"spacing\": [7.6, 7.6, 80.0]\n}\n```\n\nWithin the dataset folder, this file (named `cell6.json` in this example) would be placed in the following folders:\n\n    nnUNet_raw/Dataset123_Foo/\n    ├── dataset.json\n    ├── imagesTr\n    │   ├── cell6.json\n    │   └── cell6_0000.tif\n    └── labelsTr\n        ├── cell6.json\n        └── cell6.tif\n\n\n## How to use nnU-Net v1 Tasks\nIf you are migrating from the old nnU-Net, convert your existing datasets with `nnUNetv2_convert_old_nnUNet_dataset`!\n\nExample for migrating a nnU-Net v1 Task:\n```bash\nnnUNetv2_convert_old_nnUNet_dataset /media/isensee/raw_data/nnUNet_raw_data_base/nnUNet_raw_data/Task027_ACDC Dataset027_ACDC \n```\nUse `nnUNetv2_convert_old_nnUNet_dataset -h` for detailed usage instructions.\n\n\n## How to use decathlon datasets\nSee [convert_msd_dataset.md](convert_msd_dataset.md)\n\n## How to use 2D data with nnU-Net\n2D is now natively supported (yay!). See [here](#supported-file-formats) as well as the example dataset in this \n[script](../nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py).\n\n\n## How to update an existing dataset\nWhen updating a dataset it is best practice to remove the preprocessed data in `nnUNet_preprocessed/DatasetXXX_NAME` \nto ensure a fresh start. Then replace the data in `nnUNet_raw` and rerun `nnUNetv2_plan_and_preprocess`. Optionally, \nalso remove the results from old trainings.\n\n# Example dataset conversion scripts\nIn the `dataset_conversion` folder (see [here](../nnunetv2/dataset_conversion)) are multiple example scripts for \nconverting datasets into nnU-Net format. These scripts cannot be run as they are (you need to open them and change \nsome paths) but they are excellent examples for you to learn how to convert your own datasets into nnU-Net format. \nJust pick the dataset that is closest to yours as a starting point.\nThe list of dataset conversion scripts is continually updated. If you find that some publicly available dataset is \nmissing, feel free to open a PR to add it!\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/dataset_format_inference.md",
    "content": "# Data format for Inference \nRead the documentation on the overall [data format](dataset_format.md) first!\n\nThe data format for inference must match the one used for the raw data (**specifically, the images must be in exactly \nthe same format as in the imagesTr folder**). As before, the filenames must start with a\nunique identifier, followed by a 4-digit modality identifier. Here is an example for two different datasets:\n\n1) Task005_Prostate:\n\n    This task has 2 modalities, so the files in the input folder must look like this:\n\n        input_folder\n        ├── prostate_03_0000.nii.gz\n        ├── prostate_03_0001.nii.gz\n        ├── prostate_05_0000.nii.gz\n        ├── prostate_05_0001.nii.gz\n        ├── prostate_08_0000.nii.gz\n        ├── prostate_08_0001.nii.gz\n        ├── ...\n\n    _0000 has to be the T2 image and _0001 has to be the ADC image (as specified by 'channel_names' in the \ndataset.json), exactly the same as was used for training.\n\n2) Task002_Heart:\n\n        imagesTs\n        ├── la_001_0000.nii.gz\n        ├── la_002_0000.nii.gz\n        ├── la_006_0000.nii.gz\n        ├── ...\n    \n    Task002 only has one modality, so each case only has one _0000.nii.gz file.\n  \n\nThe segmentations in the output folder will be named {CASE_IDENTIFIER}.nii.gz (omitting the modality identifier).\n\nRemember that the file format used for inference (.nii.gz in this example) must be the same as was used for training \n(and as was specified in 'file_ending' in the dataset.json)!\n   "
  },
  {
    "path": "Finetune/nnUNet/documentation/explanation_normalization.md",
    "content": "# Intensity normalization in nnU-Net \n\nThe type of intensity normalization applied in nnU-Net can be controlled via the `channel_names` (former `modalities`)\nentry in the dataset.json. Just like the old nnU-Net, per-channel z-scoring as well as dataset-wide z-scoring based on \nforeground intensities are supported. However, there have been a few additions as well.\n\nReminder: The `channel_names` entry typically looks like this: \n\n    \"channel_names\": {\n        \"0\": \"T2\",\n        \"1\": \"ADC\"\n    },\n\nIt has as many entries as there are input channels for the given dataset.\n\nTo tell you a secret, nnU-Net does not really care what your channels are called. We just use this to determine what normalization\nscheme will be used for the given dataset. nnU-Net requires you to specify a normalization strategy for each of your input channels! \nIf you enter a channel name that is not in the following list, the default (`zscore`) will be used.\n\nHere is a list of currently available normalization schemes:\n\n- `CT`: Perform CT normalization. Specifically, collect intensity values from the foreground classes (all but the \nbackground and ignore) from all training cases, compute the mean, standard deviation as well as the 0.5 and \n99.5 percentile of the values. Then clip to the percentiles, followed by subtraction of the mean and division with the \nstandard deviation. The normalization that is applied is the same for each training case (for this input channel).\nThe values used by nnU-Net for normalization are stored in the `foreground_intensity_properties_per_channel` entry in the \ncorresponding plans file. This normalization is suitable for modalities presenting physical quantities such as CT \nimages and ADC maps.\n- `noNorm` : do not perform any normalization at all\n- `rescale_to_0_1`: rescale the intensities to [0, 1]\n- `rgb_to_0_1`: assumes uint8 inputs. Divides by 255 to rescale uint8 to [0, 1]\n- `zscore`/anything else: perform z-scoring (subtract mean and standard deviation) separately for each train case\n\n**Important:** The nnU-Net default is to perform 'CT' normalization for CT images and 'zscore' for everything else! If \nyou deviate from that path, make sure to benchmark whether that actually improves results! \n\n# How to implement custom normalization strategies?\n- Head over to nnunetv2/preprocessing/normalization\n- implement a new image normalization class by deriving from ImageNormalization\n- register it in nnunetv2/preprocessing/normalization/map_channel_name_to_normalization.py:channel_name_to_normalization_mapping. \nThis is where you specify a channel name that should be associated with it\n- use it by specifying the correct channel_name\n\nNormalization can only be applied to one channel at a time. There is currently no way of implementing a normalization scheme \nthat gets multiple channels as input to be used jointly!"
  },
  {
    "path": "Finetune/nnUNet/documentation/explanation_plans_files.md",
    "content": "# Modifying the nnU-Net Configurations\n\nnnU-Net provides unprecedented out-of-the-box segmentation performance for essentially any dataset we have evaluated \nit on. That said, there is always room for improvements. A fool-proof strategy for squeezing out the last bit of \nperformance is to start with the default nnU-Net, and then further tune it manually to a concrete dataset at hand.\n**This guide is about changes to the nnU-Net configuration you can make via the plans files. It does not cover code \nextensions of nnU-Net. For that, take a look [here](extending_nnunet.md)**\n\nIn nnU-Net V2, plans files are SO MUCH MORE powerful than they were in v1. There are a lot more knobs that you can \nturn without resorting to hacky solutions or even having to touch the nnU-Net code at all! And as an added bonus: \nplans files are now also .json files and no longer require users to fiddle with pickle. Just open them in your text \neditor of choice!\n\nIf overwhelmed, look at our [Examples](#examples)!\n\n# plans.json structure\n\nPlans have global and local settings. Global settings are applied to all configurations in that plans file while \nlocal settings are attached to a specific configuration.\n\n## Global settings\n\n- `foreground_intensity_properties_by_modality`: Intensity statistics of the foreground regions (all labels except \nbackground and ignore label), computed over all training cases. Used by [CT normalization scheme](explanation_normalization.md).\n- `image_reader_writer`: Name of the image reader/writer class that should be used with this dataset. You might want \nto change this if, for example, you would like to run inference with files that have a different file format. The \nclass that is named here must be located in nnunetv2.imageio!\n- `label_manager`: The name of the class that does label handling. Take a look at \nnnunetv2.utilities.label_handling.LabelManager to see what it does. If you decide to change it, place your version \nin nnunetv2.utilities.label_handling!\n- `transpose_forward`: nnU-Net transposes the input data so that the axes with the highest resolution (lowest spacing) \ncome last. This is because the 2D U-Net operates on the trailing dimensions (more efficient slicing due to internal \nmemory layout of arrays). Future work might move this setting to affect only individual configurations. \n- transpose_backward is what numpy.transpose gets as new axis ordering.\n- `transpose_backward`: the axis ordering that inverts \"transpose_forward\"\n- \\[`original_median_shape_after_transp`\\]: just here for your information\n- \\[`original_median_spacing_after_transp`\\]: just here for your information\n- \\[`plans_name`\\]: do not change. Used internally\n- \\[`experiment_planner_used`\\]: just here as metadata so that we know what planner originally generated this file\n- \\[`dataset_name`\\]: do not change. This is the dataset these plans are intended for\n\n## Local settings\nPlans also have a `configurations` key in which the actual configurations are stored. `configurations` are again a \ndictionary, where the keys are the configuration names and the values are the local settings for each configuration.\n\nTo better understand the components describing the network topology in our plans files, please read section 6.2 \nin the [supplementary information](https://static-content.springer.com/esm/art%3A10.1038%2Fs41592-020-01008-z/MediaObjects/41592_2020_1008_MOESM1_ESM.pdf) \n(page 13) of our paper!\n\nLocal settings:\n- `spacing`: the target spacing used in this configuration\n- `patch_size`: the patch size used for training this configuration\n- `data_identifier`: the preprocessed data for this configuration will be saved in\n  nnUNet_preprocessed/DATASET_NAME/_data_identifier_. If you add a new configuration, remember to set a unique\n  data_identifier in order to not create conflicts with other configurations (unless you plan to reuse the data from\n  another configuration, for example as is done in the cascade)\n- `batch_size`: batch size used for training\n- `batch_dice`: whether to use batch dice (pretend all samples in the batch are one image, compute dice loss over that)\nor not (each sample in the batch is a separate image, compute dice loss for each sample and average over samples)\n- `preprocessor_name`: Name of the preprocessor class used for running preprocessing. Class must be located in \nnnunetv2.preprocessing.preprocessors\n- `use_mask_for_norm`: whether to use the nonzero mask for normalization or not (relevant for BraTS and the like, \nprobably False for all other datasets). Interacts with ImageNormalization class\n- `normalization_schemes`: mapping of channel identifier to ImageNormalization class name. ImageNormalization \nclasses must be located in nnunetv2.preprocessing.normalization. Also see [here](explanation_normalization.md)\n- `resampling_fn_data`: name of resampling function to be used for resizing image data. resampling function must be \ncallable(data, current_spacing, new_spacing, **kwargs). It must be located in nnunetv2.preprocessing.resampling\n- `resampling_fn_data_kwargs`: kwargs for resampling_fn_data\n- `resampling_fn_probabilities`: name of resampling function to be used for resizing predicted class probabilities/logits. \nresampling function must be `callable(data: Union[np.ndarray, torch.Tensor], current_spacing, new_spacing, **kwargs)`. It must be located in \nnnunetv2.preprocessing.resampling\n- `resampling_fn_probabilities_kwargs`: kwargs for resampling_fn_probabilities\n- `resampling_fn_seg`: name of resampling function to be used for resizing segmentation maps (integer: 0, 1, 2, 3, etc). \nresampling function must be callable(data, current_spacing, new_spacing, **kwargs). It must be located in \nnnunetv2.preprocessing.resampling\n- `resampling_fn_seg_kwargs`: kwargs for resampling_fn_seg\n- `UNet_class_name`: UNet class name, can be used to integrate custom dynamic architectures\n- `UNet_base_num_features`: The number of starting features for the UNet architecture. Default is 32. Default: Features\nare doubled with each downsampling \n- `unet_max_num_features`: Maximum number of features (default: capped at 320 for 3D and 512 for 2d). The purpose is to \nprevent parameters from exploding too much. \n- `conv_kernel_sizes`: the convolutional kernel sizes used by nnU-Net in each stage of the encoder. The decoder \n  mirrors the encoder and is therefore not explicitly listed here! The list is as long as `n_conv_per_stage_encoder` has \n  entries\n- `n_conv_per_stage_encoder`: number of convolutions used per stage (=at a feature map resolution in the encoder) in the encoder. \n  Default is 2. The list has as many entries as the encoder has stages\n- `n_conv_per_stage_decoder`: number of convolutions used per stage in the decoder. Also see `n_conv_per_stage_encoder`\n- `num_pool_per_axis`: number of times each of the spatial axes is pooled in the network. Needed to know how to pad \n  image sizes during inference (num_pool = 5 means input must be divisible by 2**5=32)\n- `pool_op_kernel_sizes`: the pooling kernel sizes (and at the same time strides) for each stage of the encoder\n- \\[`median_image_size_in_voxels`\\]: the median size of the images of the training set at the current target spacing. \nDo not modify this as this is not used. It is just here for your information.\n\nSpecial local settings:\n- `inherits_from`: configurations can inherit from each other. This makes it easy to add new configurations that only\ndiffer in a few local settings from another. If using this, remember to set a new `data_identifier` (if needed)!\n- `previous_stage`: if this configuration is part of a cascade, we need to know what the previous stage (for example \nthe low resolution configuration) was. This needs to be specified here.\n- `next_stage`: if this configuration is part of a cascade, we need to know what possible subsequent stages are! This \nis because we need to export predictions in the correct spacing when running the validation. `next_stage` can either \nbe a string or a list of strings\n\n# Examples\n\n## Increasing the batch size for large datasets\nIf your dataset is large the training can benefit from larger batch_sizes. To do this, simply create a new \nconfiguration in the `configurations` dict\n\n    \"configurations\": {\n      \"3d_fullres_bs40\": {\n        \"inherits_from\": \"3d_fullres\",\n        \"batch_size\": 40\n      }\n    }\n\nNo need to change the data_identifier. `3d_fullres_bs40` will just use the preprocessed data from `3d_fullres`.\nNo need to rerun `nnUNetv2_preprocess` because we can use already existing data (if available) from `3d_fullres`.\n\n## Using custom preprocessors\nIf you would like to use a different preprocessor class then this can be specified as follows:\n\n    \"configurations\": {\n      \"3d_fullres_my_preprocesor\": {\n        \"inherits_from\": \"3d_fullres\",\n        \"preprocessor_name\": MY_PREPROCESSOR,\n        \"data_identifier\": \"3d_fullres_my_preprocesor\"\n      }\n    }\n\nYou need to run preprocessing for this new configuration: \n`nnUNetv2_preprocess -d DATASET_ID -c 3d_fullres_my_preprocesor` because it changes the preprocessing. Remember to \nset a unique `data_identifier` whenever you make modifications to the preprocessed data!\n\n## Change target spacing\n\n    \"configurations\": {\n      \"3d_fullres_my_spacing\": {\n        \"inherits_from\": \"3d_fullres\",\n        \"spacing\": [X, Y, Z],\n        \"data_identifier\": \"3d_fullres_my_spacing\"\n      }\n    }\n\nYou need to run preprocessing for this new configuration: \n`nnUNetv2_preprocess -d DATASET_ID -c 3d_fullres_my_spacing` because it changes the preprocessing. Remember to \nset a unique `data_identifier` whenever you make modifications to the preprocessed data!\n\n## Adding a cascade to a dataset where it does not exist\nHippocampus is small. It doesn't have a cascade. It also doesn't really make sense to add a cascade here but hey for \nthe sake of demonstration we can do that.\nWe change the following things here:\n\n- `spacing`: The lowres stage should operate at a lower resolution\n- we modify the `median_image_size_in_voxels` entry as a guide for what original image sizes we deal with\n- we set some patch size that is inspired by `median_image_size_in_voxels`\n- we need to remember that the patch size must be divisible by 2**num_pool in each axis!\n- network parameters such as kernel sizes, pooling operations are changed accordingly\n- we need to specify the name of the next stage\n- we need to add the highres stage\n\nThis is how this would look like (comparisons with 3d_fullres given as reference):\n\n    \"configurations\": {\n      \"3d_lowres\": {\n        \"inherits_from\": \"3d_fullres\",\n        \"data_identifier\": \"3d_lowres\"\n        \"spacing\": [2.0, 2.0, 2.0], # from [1.0, 1.0, 1.0] in 3d_fullres\n        \"median_image_size_in_voxels\": [18, 25, 18], # from [36, 50, 35]\n        \"patch_size\": [20, 28, 20], # from [40, 56, 40]\n        \"n_conv_per_stage_encoder\": [2, 2, 2], # one less entry than 3d_fullres ([2, 2, 2, 2])\n        \"n_conv_per_stage_decoder\": [2, 2], # one less entry than 3d_fullres\n        \"num_pool_per_axis\": [2, 2, 2], # one less pooling than 3d_fullres in each dimension (3d_fullres: [3, 3, 3])\n        \"pool_op_kernel_sizes\": [[1, 1, 1], [2, 2, 2], [2, 2, 2]], # one less [2, 2, 2]\n        \"conv_kernel_sizes\": [[3, 3, 3], [3, 3, 3], [3, 3, 3]], # one less [3, 3, 3]\n        \"next_stage\": \"3d_cascade_fullres\" # name of the next stage in the cascade\n      },\n      \"3d_cascade_fullres\": { # does not need a data_identifier because we can use the data of 3d_fullres\n        \"inherits_from\": \"3d_fullres\",\n        \"previous_stage\": \"3d_lowres\" # name of the previous stage\n      }\n    }\n\nTo better understand the components describing the network topology in our plans files, please read section 6.2 \nin the [supplementary information](https://static-content.springer.com/esm/art%3A10.1038%2Fs41592-020-01008-z/MediaObjects/41592_2020_1008_MOESM1_ESM.pdf) \n(page 13) of our paper!"
  },
  {
    "path": "Finetune/nnUNet/documentation/extending_nnunet.md",
    "content": "# Extending nnU-Net\nWe hope that the new structure of nnU-Net v2 makes it much more intuitive on how to modify it! We cannot give an \nextensive tutorial on how each and every bit of it can be modified. It is better for you to search for the position \nin the repository where the thing you intend to change is implemented and start working your way through the code from \nthere. Setting breakpoints and debugging into nnU-Net really helps in understanding it and thus will help you make the \nnecessary modifications!\n\nHere are some things you might want to read before you start:\n- Editing nnU-Net configurations through plans files is really powerful now and allows you to change a lot of things regarding \npreprocessing, resampling, network topology etc. Read [this](explanation_plans_files.md)!\n- [Image normalization](explanation_normalization.md) and [i/o formats](dataset_format.md#supported-file-formats) are easy to extend!\n- Manual data splits can be defined as described [here](manual_data_splits.md)\n- You can chain arbitrary configurations together into cascades, see [this again](explanation_plans_files.md)\n- Read about our support for [region-based training](region_based_training.md)\n- If you intend to modify the training procedure (loss, sampling, data augmentation, lr scheduler, etc) then you need \nto implement your own trainer class. Best practice is to create a class that inherits from nnUNetTrainer and \nimplements the necessary changes. Head over to our [trainer classes folder](../nnunetv2/training/nnUNetTrainer) for \ninspiration! There will be similar trainers for what you intend to change and you can take them as a guide. nnUNetTrainer \nare structured similarly to PyTorch lightning trainers, this should also make things easier!\n- Integrating new network architectures can be done in two ways:\n  - Quick and dirty: implement a new nnUNetTrainer class and overwrite its `build_network_architecture` function. \n  Make sure your architecture is compatible with deep supervision (if not, use `nnUNetTrainerNoDeepSupervision`\n  as basis!) and that it can handle the patch sizes that are thrown at it! Your architecture should NOT apply any \n  nonlinearities at the end (softmax, sigmoid etc). nnU-Net does that!   \n  - The 'proper' (but difficult) way: Build a dynamically configurable architecture such as the `PlainConvUNet` class \n  used by default. It needs to have some sort of GPU memory estimation method that can be used to evaluate whether \n  certain patch sizes and \n  topologies fit into a specified GPU memory target. Build a new `ExperimentPlanner` that can configure your new \n  class and communicate with its memory budget estimation. Run `nnUNetv2_plan_and_preprocess` while specifying your \n  custom `ExperimentPlanner` and a custom `plans_name`. Implement a nnUNetTrainer that can use the plans generated by \n  your `ExperimentPlanner` to instantiate the network architecture. Specify your plans and trainer when running `nnUNetv2_train`. \n  It always pays off to first read and understand the corresponding nnU-Net code and use it as a template for your implementation!\n- Remember that multi-GPU training, region-based training, ignore label and cascaded training are now simply integrated \ninto one unified nnUNetTrainer class. No separate classes needed (remember that when implementing your own trainer \nclasses and ensure support for all of these features! Or raise `NotImplementedError`)\n\n[//]: # (- Read about our support for [ignore label]&#40;ignore_label.md&#41; and [region-based training]&#40;region_based_training.md&#41;)\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/how_to_use_nnunet.md",
    "content": "## How to run nnU-Net on a new dataset\nGiven some dataset, nnU-Net fully automatically configures an entire segmentation pipeline that matches its properties.\nnnU-Net covers the entire pipeline, from preprocessing to model configuration, model training, postprocessing\nall the way to ensembling. After running nnU-Net, the trained model(s) can be applied to the test cases for inference.\n\n### Dataset Format\nnnU-Net expects datasets in a structured format. This format is inspired by the data structure of\nthe [Medical Segmentation Decthlon](http://medicaldecathlon.com/). Please read\n[this](dataset_format.md) for information on how to set up datasets to be compatible with nnU-Net.\n\n**Since version 2 we support multiple image file formats (.nii.gz, .png, .tif, ...)! Read the dataset_format \ndocumentation to learn more!**\n\n**Datasets from nnU-Net v1 can be converted to V2 by running `nnUNetv2_convert_old_nnUNet_dataset INPUT_FOLDER \nOUTPUT_DATASET_NAME`.** Remember that v2 calls datasets DatasetXXX_Name (not Task) where XXX is a 3-digit number.\nPlease provide the **path** to the old task, not just the Task name. nnU-Net V2 doesn't know where v1 tasks were!\n\n### Experiment planning and preprocessing\nGiven a new dataset, nnU-Net will extract a dataset fingerprint (a set of dataset-specific properties such as\nimage sizes, voxel spacings, intensity information etc). This information is used to design three U-Net configurations. \nEach of these pipelines operates on its own preprocessed version of the dataset.\n\nThe easiest way to run fingerprint extraction, experiment planning and preprocessing is to use:\n\n```bash\nnnUNetv2_plan_and_preprocess -d DATASET_ID --verify_dataset_integrity\n```\n\nWhere `DATASET_ID` is the dataset id (duh). We recommend `--verify_dataset_integrity` whenever it's the first time \nyou run this command. This will check for some of the most common error sources!\n\nYou can also process several datasets at once by giving `-d 1 2 3 [...]`. If you already know what U-Net configuration \nyou need you can also specify that with `-c 3d_fullres` (make sure to adapt -np in this case!). For more information \nabout all the options available to you please run `nnUNetv2_plan_and_preprocess -h`.\n\nnnUNetv2_plan_and_preprocess will create a new subfolder in your nnUNet_preprocessed folder named after the dataset. \nOnce the command is completed there will be a dataset_fingerprint.json file as well as a nnUNetPlans.json file for you to look at \n(in case you are interested!). There will also be subfolders containing the preprocessed data for your UNet configurations.\n\n[Optional]\nIf you prefer to keep things separate, you can also use `nnUNetv2_extract_fingerprint`, `nnUNetv2_plan_experiment` \nand `nnUNetv2_preprocess` (in that order). \n\n### Model training\n#### Overview\nYou pick which configurations (2d, 3d_fullres, 3d_lowres, 3d_cascade_fullres) should be trained! If you have no idea \nwhat performs best on your data, just run all of them and let nnU-Net identify the best one. It's up to you!\n\nnnU-Net trains all configurations in a 5-fold cross-validation over the training cases. This is 1) needed so that \nnnU-Net can estimate the performance of each configuration and tell you which one should be used for your \nsegmentation problem and 2) a natural way of obtaining a good model ensemble (average the output of these 5 models \nfor prediction) to boost performance.\n\nYou can influence the splits nnU-Net uses for 5-fold cross-validation (see [here](manual_data_splits.md)). If you \nprefer to train a single model on all training cases, this is also possible (see below).\n\n**Note that not all U-Net configurations are created for all datasets. In datasets with small image sizes, the U-Net\ncascade (and with it the 3d_lowres configuration) is omitted because the patch size of the full resolution U-Net \nalready covers a large part of the input images.**\n\nTraining models is done with the `nnUNetv2_train` command. The general structure of the command is:\n```bash\nnnUNetv2_train DATASET_NAME_OR_ID UNET_CONFIGURATION FOLD [additional options, see -h]\n```\n\nUNET_CONFIGURATION is a string that identifies the requested U-Net configuration (defaults: 2d, 3d_fullres, 3d_lowres, \n3d_cascade_lowres). DATASET_NAME_OR_ID specifies what dataset should be trained on and FOLD specifies which fold of \nthe 5-fold-cross-validation is trained.\n\nnnU-Net stores a checkpoint every 50 epochs. If you need to continue a previous training, just add a `--c` to the\ntraining command.\n\nIMPORTANT: If you plan to use `nnUNetv2_find_best_configuration` (see below) add the `--npz` flag. This makes \nnnU-Net save the softmax outputs during the final validation. They are needed for that. Exported softmax\npredictions are very large and therefore can take up a lot of disk space, which is why this is not enabled by default.\nIf you ran initially without the `--npz` flag but now require the softmax predictions, simply rerun the validation with:\n```bash\nnnUNetv2_train DATASET_NAME_OR_ID UNET_CONFIGURATION FOLD --val --npz\n```\n\nYou can specify the device nnU-net should use by using `-device DEVICE`. DEVICE can only be cpu, cuda or mps. If \nyou have multiple GPUs, please select the gpu id using `CUDA_VISIBLE_DEVICES=X nnUNetv2_train [...]` (requires device to be cuda).\n\nSee `nnUNetv2_train -h` for additional options.\n\n### 2D U-Net\nFor FOLD in [0, 1, 2, 3, 4], run:\n```bash\nnnUNetv2_train DATASET_NAME_OR_ID 2d FOLD [--npz]\n```\n\n### 3D full resolution U-Net\nFor FOLD in [0, 1, 2, 3, 4], run:\n```bash\nnnUNetv2_train DATASET_NAME_OR_ID 3d_fullres FOLD [--npz]\n```\n\n### 3D U-Net cascade\n#### 3D low resolution U-Net\nFor FOLD in [0, 1, 2, 3, 4], run:\n```bash\nnnUNetv2_train DATASET_NAME_OR_ID 3d_lowres FOLD [--npz]\n```\n\n#### 3D full resolution U-Net\nFor FOLD in [0, 1, 2, 3, 4], run:\n```bash\nnnUNetv2_train DATASET_NAME_OR_ID 3d_cascade_fullres FOLD [--npz]\n```\n**Note that the 3D full resolution U-Net of the cascade requires the five folds of the low resolution U-Net to be\ncompleted!**\n\nThe trained models will be written to the nnUNet_results folder. Each training obtains an automatically generated\noutput folder name:\n\nnnUNet_results/DatasetXXX_MYNAME/TRAINER_CLASS_NAME__PLANS_NAME__CONFIGURATION/FOLD\n\nFor Dataset002_Heart (from the MSD), for example, this looks like this:\n\n    nnUNet_results/\n    ├── Dataset002_Heart\n        │── nnUNetTrainer__nnUNetPlans__2d\n        │    ├── fold_0\n        │    ├── fold_1\n        │    ├── fold_2\n        │    ├── fold_3\n        │    ├── fold_4\n        │    ├── dataset.json\n        │    ├── dataset_fingerprint.json\n        │    └── plans.json\n        └── nnUNetTrainer__nnUNetPlans__3d_fullres\n             ├── fold_0\n             ├── fold_1\n             ├── fold_2\n             ├── fold_3\n             ├── fold_4\n             ├── dataset.json\n             ├── dataset_fingerprint.json\n             └── plans.json\n\nNote that 3d_lowres and 3d_cascade_fullres do not exist here because this dataset did not trigger the cascade. In each\nmodel training output folder (each of the fold_x folder), the following files will be created:\n- debug.json: Contains a summary of blueprint and inferred parameters used for training this model as well as a \nbunch of additional stuff. Not easy to read, but very useful for debugging ;-)\n- checkpoint_best.pth: checkpoint files of the best model identified during training. Not used right now unless you \nexplicitly tell nnU-Net to use it.\n- checkpoint_final.pth: checkpoint file of the final model (after training has ended). This is what is used for both \nvalidation and inference.\n- network_architecture.pdf (only if hiddenlayer is installed!): a pdf document with a figure of the network architecture in it.\n- progress.png: Shows losses, pseudo dice, learning rate and epoch times ofer the course of the training. At the top is \na plot of the training (blue) and validation (red) loss during training. Also shows an approximation of\n  the dice (green) as well as a moving average of it (dotted green line). This approximation is the average Dice score \n  of the foreground classes. **It needs to be taken with a big (!) \n  grain of salt** because it is computed on randomly drawn patches from the validation\n  data at the end of each epoch, and the aggregation of TP, FP and FN for the Dice computation treats the patches as if\n  they all originate from the same volume ('global Dice'; we do not compute a Dice for each validation case and then\n  average over all cases but pretend that there is only one validation case from which we sample patches). The reason for\n  this is that the 'global Dice' is easy to compute during training and is still quite useful to evaluate whether a model\n  is training at all or not. A proper validation takes way too long to be done each epoch. It is run at the end of the training.\n- validation_raw: in this folder are the predicted validation cases after the training has finished. The summary.json file in here\n  contains the validation metrics (a mean over all cases is provided at the start of the file). If `--npz` was set then \nthe compressed softmax outputs (saved as .npz files) are in here as well. \n\nDuring training it is often useful to watch the progress. We therefore recommend that you have a look at the generated\nprogress.png when running the first training. It will be updated after each epoch.\n\nTraining times largely depend on the GPU. The smallest GPU we recommend for training is the Nvidia RTX 2080ti. With \nthat all network trainings take less than 2 days. Refer to our [benchmarks](benchmarking.md) to see if your system is \nperforming as expected.\n\n### Using multiple GPUs for training\n\nIf multiple GPUs are at your disposal, the best way of using them is to train multiple nnU-Net trainings at once, one \non each GPU. This is because data parallelism never scales perfectly linearly, especially not with small networks such \nas the ones used by nnU-Net.\n\nExample:\n\n```bash\nCUDA_VISIBLE_DEVICES=0 nnUNetv2_train DATASET_NAME_OR_ID 2d 0 [--npz] & # train on GPU 0\nCUDA_VISIBLE_DEVICES=1 nnUNetv2_train DATASET_NAME_OR_ID 2d 1 [--npz] & # train on GPU 1\nCUDA_VISIBLE_DEVICES=2 nnUNetv2_train DATASET_NAME_OR_ID 2d 2 [--npz] & # train on GPU 2\nCUDA_VISIBLE_DEVICES=3 nnUNetv2_train DATASET_NAME_OR_ID 2d 3 [--npz] & # train on GPU 3\nCUDA_VISIBLE_DEVICES=4 nnUNetv2_train DATASET_NAME_OR_ID 2d 4 [--npz] & # train on GPU 4\n...\nwait\n```\n\n**Important: The first time a training is run nnU-Net will extract the preprocessed data into uncompressed numpy \narrays for speed reasons! This operation must be completed before starting more than one training of the same \nconfiguration! Wait with starting subsequent folds until the first training is using the GPU! Depending on the \ndataset size and your System this should only take a couple of minutes at most.**\n\nIf you insist on running DDP multi-GPU training, we got you covered:\n\n`nnUNetv2_train DATASET_NAME_OR_ID 2d 0 [--npz] -num_gpus X`\n\nAgain, note that this will be slower than running separate training on separate GPUs. DDP only makes sense if you have \nmanually interfered with the nnU-Net configuration and are training larger models with larger patch and/or batch sizes!\n\nImportant when using `-num_gpus`:\n1) If you train using, say, 2 GPUs but have more GPUs in the system you need to specify which GPUs should be used via \nCUDA_VISIBLE_DEVICES=0,1 (or whatever your ids are).\n2) You cannot specify more GPUs than you have samples in your minibatches. If the batch size is 2, 2 GPUs is the maximum!\n3) Make sure your batch size is divisible by the numbers of GPUs you use or you will not make good use of your hardware.\n\nIn contrast to the old nnU-Net, DDP is now completely hassle free. Enjoy!\n\n### Automatically determine the best configuration\nOnce the desired configurations were trained (full cross-validation) you can tell nnU-Net to automatically identify \nthe best combination for you:\n\n```commandline\nnnUNetv2_find_best_configuration DATASET_NAME_OR_ID -c CONFIGURATIONS \n```\n\n`CONFIGURATIONS` hereby is the list of configurations you would like to explore. Per default, ensembling is enabled \nmeaning that nnU-Net will generate all possible combinations of ensembles (2 configurations per ensemble). This requires \nthe .npz files containing the predicted probabilities of the validation set to be present (use `nnUNetv2_train` with \n`--npz` flag, see above). You can disable ensembling by setting the `--disable_ensembling` flag.\n\nSee `nnUNetv2_find_best_configuration -h` for more options.\n\nnnUNetv2_find_best_configuration will also automatically determine the postprocessing that should be used. \nPostprocessing in nnU-Net only considers the removal of all but the largest component in the prediction (once for \nforeground vs background and once for each label/region).\n\nOnce completed, the command will print to your console exactly what commands you need to run to make predictions. It \nwill also create two files in the `nnUNet_results/DATASET_NAME` folder for you to inspect: \n- `inference_instructions.txt` again contains the exact commands you need to use for predictions\n- `inference_information.json` can be inspected to see the performance of all configurations and ensembles, as well \nas the effect of the postprocessing plus some debug information. \n\n### Run inference\nRemember that the data located in the input folder must have the file endings as the dataset you trained the model on \nand must adhere to the nnU-Net naming scheme for image files (see [dataset format](dataset_format.md) and \n[inference data format](dataset_format_inference.md)!)\n\n`nnUNetv2_find_best_configuration` (see above) will print a string to the terminal with the inference commands you need to use.\nThe easiest way to run inference is to simply use these commands.\n\nIf you wish to manually specify the configuration(s) used for inference, use the following commands:\n\n#### Run prediction\nFor each of the desired configurations, run:\n```\nnnUNetv2_predict -i INPUT_FOLDER -o OUTPUT_FOLDER -d DATASET_NAME_OR_ID -c CONFIGURATION --save_probabilities\n```\n\nOnly specify `--save_probabilities` if you intend to use ensembling. `--save_probabilities` will make the command save the predicted\nprobabilities alongside of the predicted segmentation masks requiring a lot of disk space.\n\nPlease select a separate `OUTPUT_FOLDER` for each configuration!\n\nNote that per default, inference will be done with all 5 folds from the cross-validation as an ensemble. We very \nstrongly recommend you use all 5 folds. Thus, all 5 folds must have been trained prior to running inference. \n\nIf you wish to make predictions with a single model, train the `all` fold and specify it in `nnUNetv2_predict`\nwith `-f all`\n\n#### Ensembling multiple configurations\nIf you wish to ensemble multiple predictions (typically form different configurations), you can do so with the following command:\n```bash\nnnUNetv2_ensemble -i FOLDER1 FOLDER2 ... -o OUTPUT_FOLDER -np NUM_PROCESSES\n```\n\nYou can specify an arbitrary number of folders, but remember that each folder needs to contain npz files that were\ngenerated by `nnUNetv2_predict`. Again, `nnUNetv2_ensemble -h` will tell you more about additional options.\n\n#### Apply postprocessing\nFinally, apply the previously determined postprocessing to the (ensembled) predictions: \n\n```commandline\nnnUNetv2_apply_postprocessing -i FOLDER_WITH_PREDICTIONS -o OUTPUT_FOLDER --pp_pkl_file POSTPROCESSING_FILE -plans_json PLANS_FILE -dataset_json DATASET_JSON_FILE\n```\n\n`nnUNetv2_find_best_configuration` (or its generated `inference_instructions.txt` file) will tell you where to find \nthe postprocessing file. If not you can just look for it in your results folder (it's creatively named \n`postprocessing.pkl`). If your source folder is from an ensemble, you also need to specify a `-plans_json` file and \na `-dataset_json` file that should be used (for single configuration predictions these are automatically copied \nfrom the respective training). You can pick these files from any of the ensemble members.\n\n\n## How to run inference with pretrained models\nSee [here](run_inference_with_pretrained_models.md)\n\n[//]: # (## Examples)\n\n[//]: # ()\n[//]: # (To get you started we compiled two simple to follow examples:)\n\n[//]: # (- run a training with the 3d full resolution U-Net on the Hippocampus dataset. See [here]&#40;documentation/training_example_Hippocampus.md&#41;.)\n\n[//]: # (- run inference with nnU-Net's pretrained models on the Prostate dataset. See [here]&#40;documentation/inference_example_Prostate.md&#41;.)\n\n[//]: # ()\n[//]: # (Usability not good enough? Let us know!)\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/installation_instructions.md",
    "content": "# System requirements\n\n## Operating System\nnnU-Net has been tested on Linux (Ubuntu 18.04, 20.04, 22.04; centOS, RHEL), Windows and MacOS! It should work out of the box!\n\n## Hardware requirements\nWe support GPU (recommended), CPU and Apple M1/M2 as devices (currently Apple mps does not implement 3D \nconvolutions, so you might have to use the CPU on those devices).\n\n### Hardware requirements for Training\nWe recommend you use a GPU for training as this will take a really long time on CPU or MPS (Apple M1/M2). \nFor training a GPU with at least 10 GB (popular non-datacenter options are the RTX 2080ti, RTX 3080/3090 or RTX 4080/4090) is \nrequired. We also recommend a strong CPU to go along with the GPU. 6 cores (12 threads) \nare the bare minimum! CPU requirements are mostly related to data augmentation and scale with the number of \ninput channels and target structures. Plus, the faster the GPU, the better the CPU should be!\n\n### Hardware Requirements for inference\nAgain we recommend a GPU to make predictions as this will be substantially faster than the other options. However, \ninference times are typically still manageable on CPU and MPS (Apple M1/M2). If using a GPU, it should have at least \n4 GB of available (unused) VRAM.\n\n### Example hardware configurations\nExample workstation configurations for training:\n- CPU: Ryzen 5800X - 5900X or 7900X would be even better! We have not yet tested Intel Alder/Raptor lake but they will likely work as well.\n- GPU: RTX 3090 or RTX 4090\n- RAM: 64GB\n- Storage: SSD (M.2 PCIe Gen 3 or better!)\n\nExample Server configuration for training:\n- CPU: 2x AMD EPYC7763 for a total of 128C/256T. 16C/GPU are highly recommended for fast GPUs such as the A100!\n- GPU: 8xA100 PCIe (price/performance superior to SXM variant + they use less power)\n- RAM: 1 TB\n- Storage: local SSD storage (PCIe Gen 3 or better) or ultra fast network storage\n\n(nnU-net by default uses one GPU per training. The server configuration can run up to 8 model trainings simultaneously)\n\n### Setting the correct number of Workers for data augmentation (training only)\nNote that you will need to manually set the number of processes nnU-Net uses for data augmentation according to your \nCPU/GPU ratio. For the server above (256 threads for 8 GPUs), a good value would be 24-30. You can do this by \nsetting the `nnUNet_n_proc_DA` environment variable (`export nnUNet_n_proc_DA=XX`). \nRecommended values (assuming a recent CPU with good IPC) are 10-12 for RTX 2080 ti, 12 for a RTX 3090, 16-18 for \nRTX 4090, 28-32 for A100. Optimal values may vary depending on the number of input channels/modalities and number of classes.\n\n# Installation instructions\nWe strongly recommend that you install nnU-Net in a virtual environment! Pip or anaconda are both fine. If you choose to \ncompile PyTorch from source (see below), you will need to use conda instead of pip. \n\nUse a recent version of Python! 3.9 or newer is guaranteed to work!\n\n**nnU-Net v2 can coexist with nnU-Net v1! Both can be installed at the same time.**\n\n1) Install [PyTorch](https://pytorch.org/get-started/locally/) as described on their website (conda/pip). Please \ninstall the latest version with support for your hardware (cuda, mps, cpu).\n**DO NOT JUST `pip install nnunetv2` WITHOUT PROPERLY INSTALLING PYTORCH FIRST**. For maximum speed, consider \n[compiling pytorch yourself](https://github.com/pytorch/pytorch#from-source) (experienced users only!). \n2) Install nnU-Net depending on your use case:\n    1) For use as **standardized baseline**, **out-of-the-box segmentation algorithm** or for running \n     **inference with pretrained models**:\n\n       ```pip install nnunetv2```\n\n    2) For use as integrative **framework** (this will create a copy of the nnU-Net code on your computer so that you\n   can modify it as needed):\n          ```bash\n          git clone https://github.com/MIC-DKFZ/nnUNet.git\n          cd nnUNet\n          pip install -e .\n          ```\n3) nnU-Net needs to know where you intend to save raw data, preprocessed data and trained models. For this you need to\n   set a few environment variables. Please follow the instructions [here](setting_up_paths.md).\n4) (OPTIONAL) Install [hiddenlayer](https://github.com/waleedka/hiddenlayer). hiddenlayer enables nnU-net to generate\n   plots of the network topologies it generates (see [Model training](how_to_use_nnunet.md#model-training)). \nTo install hiddenlayer,\n   run the following command:\n    ```bash\n    pip install --upgrade git+https://github.com/FabianIsensee/hiddenlayer.git\n    ```\n\nInstalling nnU-Net will add several new commands to your terminal. These commands are used to run the entire nnU-Net\npipeline. You can execute them from any location on your system. All nnU-Net commands have the prefix `nnUNetv2_` for\neasy identification.\n\nNote that these commands simply execute python scripts. If you installed nnU-Net in a virtual environment, this\nenvironment must be activated when executing the commands. You can see what scripts/functions are executed by \nchecking the project.scripts in the [pyproject.toml](../pyproject.toml) file.\n\nAll nnU-Net commands have a `-h` option which gives information on how to use them.\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/manual_data_splits.md",
    "content": "# How to generate custom splits in nnU-Net\n\nSometimes, the default 5-fold cross-validation split by nnU-Net does not fit a project. Maybe you want to run 3-fold \ncross-validation instead? Or maybe your training cases cannot be split randomly and require careful stratification. \nFear not, for nnU-Net has got you covered (it really can do anything <3).\n\nThe splits nnU-Net uses are generated in the `do_split` function of nnUNetTrainer. This function will first look for \nexisting splits, stored as a file, and if no split exists it will create one. So if you wish to influence the split, \nmanually creating a split file that will then be recognized and used is the way to go!\n\nThe split file is located in the `nnUNet_preprocessed/DATASETXXX_NAME` folder. So it is best practice to first \npopulate this folder by running `nnUNetv2_plan_and_preproccess`.\n\nSplits are stored as a .json file. They are a simple python list. The length of that list is the number of splits it \ncontains (so it's 5 in the default nnU-Net). Each list entry is a dictionary with keys 'train' and 'val'. Values are \nagain simply lists with the train identifiers in each set. To illustrate this, I am just messing with the Dataset002 \nfile as an example:\n\n```commandline\nIn [1]: from batchgenerators.utilities.file_and_folder_operations import load_json\n\nIn [2]: splits = load_json('splits_final.json')\n\nIn [3]: len(splits)\nOut[3]: 5\n\nIn [4]: splits[0].keys()\nOut[4]: dict_keys(['train', 'val'])\n\nIn [5]: len(splits[0]['train'])\nOut[5]: 16\n\nIn [6]: len(splits[0]['val'])\nOut[6]: 4\n\nIn [7]: print(splits[0])\n{'train': ['la_003', 'la_004', 'la_005', 'la_009', 'la_010', 'la_011', 'la_014', 'la_017', 'la_018', 'la_019', 'la_020', 'la_022', 'la_023', 'la_026', 'la_029', 'la_030'],\n'val': ['la_007', 'la_016', 'la_021', 'la_024']}\n```\n\nIf you are still not sure what splits are supposed to look like, simply download some reference dataset from the\n[Medical Decathlon](http://medicaldecathlon.com/), start some training (to generate the splits) and manually inspect \nthe .json file with your text editor of choice!\n\nIn order to generate your custom splits, all you need to do is reproduce the data structure explained above and save it as \n`splits_final.json` in the `nnUNet_preprocessed/DATASETXXX_NAME` folder. Then use `nnUNetv2_train` etc. as usual."
  },
  {
    "path": "Finetune/nnUNet/documentation/pretraining_and_finetuning.md",
    "content": "# Pretraining with nnU-Net\n\n## Intro\n\nSo far nnU-Net only supports supervised pre-training, meaning that you train a regular nnU-Net on some source dataset \nand then use the final network weights as initialization for your target dataset. \n\nAs a reminder, many training hyperparameters such as patch size and network topology differ between datasets as a \nresult of the automated dataset analysis and experiment planning nnU-Net is known for. So, out of the box, it is not \npossible to simply take the network weights from some dataset and then reuse them for another.\n\nConsequently, the plans need to be aligned between the two tasks. In this README we show how this can be achieved and \nhow the resulting weights can then be used for initialization.\n\n### Terminology\n\nThroughout this README we use the following terminology:\n\n- `source dataset` is the dataset you intend to run the pretraining on\n- `target dataset` is the dataset you are interested in; the one you wish to fine tune on\n\n\n## Pretraining on the source dataset\n\nIn order to obtain matching network topologies we need to transfer the plans from one dataset to another. Since we are \nonly interested in the target dataset, we first need to run experiment planning (and preprocessing) for it:\n\n```bash\nnnUNetv2_plan_and_preprocess -d TARGET_DATASET\n```\n\nThen we need to extract the dataset fingerprint of the source dataset, if not yet available:\n\n```bash\nnnUNetv2_extract_fingerprint -d SOURCE_DATASET\n```\n\nNow we can take the plans from the target dataset and transfer it to the source:\n\n```bash\nnnUNetv2_move_plans_between_datasets -s TARGET_DATASET -t SOURCE_DATASET -sp TARGET_PLANS_IDENTIFIER -tp SOURCE_PLANS_IDENTIFIER\n```\n\n`SOURCE_PLANS_IDENTIFIER` is hereby probably nnUNetPlans unless you changed the experiment planner in \nnnUNetv2_plan_and_preprocess. For `TARGET_PLANS_IDENTIFIER` we recommend you set something custom in order to not \noverwrite default plans.\n\nNote that EVERYTHING is transferred between the datasets. Not just the network topology, batch size and patch size but \nalso the normalization scheme! Therefore, a transfer between datasets that use different normalization schemes may not \nwork well (but it could, depending on the schemes!).\n\nNote on CT normalization: Yes, also the clip values, mean and std are transferred!\n\nNow you can run the preprocessing on the source task:\n\n```bash\nnnUNetv2_preprocess -d SOURCE_DATSET -plans_name TARGET_PLANS_IDENTIFIER\n```\n\nAnd run the training as usual:\n\n```bash\nnnUNetv2_train SOURCE_DATSET CONFIG all -p TARGET_PLANS_IDENTIFIER\n```\n\nNote how we use the 'all' fold to train on all available data. For pretraining it does not make sense to split the data.\n\n## Using pretrained weights\n\nOnce pretraining is completed (or you obtain compatible weights by other means) you can use them to initialize your model:\n\n```bash\nnnUNetv2_train TARGET_DATASET CONFIG FOLD -pretrained_weights PATH_TO_CHECKPOINT\n```\n\nSpecify the checkpoint in PATH_TO_CHECKPOINT.\n\nWhen loading pretrained weights, all layers except the segmentation layers will be used! \n\nSo far there are no specific nnUNet trainers for fine tuning, so the current recommendation is to just use \nnnUNetTrainer. You can however easily write your own trainers with learning rate ramp up, fine-tuning of segmentation \nheads or shorter training time."
  },
  {
    "path": "Finetune/nnUNet/documentation/region_based_training.md",
    "content": "# Region-based training\n\n## What is this about?\nIn some segmentation tasks, most prominently the \n[Brain Tumor Segmentation Challenge](http://braintumorsegmentation.org/), the target areas (based on which the metric \nwill be computed) are different from the labels provided in the training data. This is the case because for some \nclinical applications, it is more relevant to detect the whole tumor, tumor core and enhancing tumor instead of the \nindividual labels (edema, necrosis and non-enhancing tumor, enhancing tumor). \n\n<img src=\"assets/regions_vs_labels.png\" width=\"768px\" />\n\nThe figure shows an example BraTS case along with label-based representation of the task (top) and region-based \nrepresentation (bottom). The challenge evaluation is done on the regions. As we have shown in our \n[BraTS 2018 contribution](https://arxiv.org/abs/1809.10483), directly optimizing those \noverlapping areas over the individual labels yields better scoring models!\n\n## What can nnU-Net do?\nnnU-Net's region-based training allows you to learn areas that are constructed by merging individual labels. For \nsome segmentation tasks this provides a benefit, as this shifts the importance allocated to different labels during training. \nMost prominently, this feature can be used to represent **hierarchical classes**, for example when organs + \nsubstructures are to be segmented. Imagine a liver segmentation problem, where vessels and tumors are also to be \nsegmented. The first target region could thus be the entire liver (including the substructures), while the remaining \ntargets are the individual substructues.\n\nImportant: nnU-Net still requires integer label maps as input and will produce integer label maps as output! \nRegion-based training can be used to learn overlapping labels, but there must be a way to model these overlaps \nfor nnU-Net to work (see below how this is done).\n\n## How do you use it?\n\nWhen declaring the labels in the `dataset.json` file, BraTS would typically look like this:\n\n```python\n...\n\"labels\": {\n    \"background\": 0,\n    \"edema\": 1,\n    \"non_enhancing_and_necrosis\": 2,\n    \"enhancing_tumor\": 3\n},\n...\n```\n(we use different int values than the challenge because nnU-Net needs consecutive integers!)\n\nThis representation corresponds to the upper row in the figure above.\n\nFor region-based training, the labels need to be changed to the following:\n\n```python\n...\n\"labels\": {\n    \"background\": 0,\n    \"whole_tumor\": [1, 2, 3],\n    \"tumor_core\": [2, 3],\n    \"enhancing_tumor\": 3  # or [3]\n},\n\"regions_class_order\": [1, 2, 3],\n...\n```\nThis corresponds to the bottom row in the figure above. Note how an additional entry in the dataset.json is \nrequired: `regions_class_order`. This tells nnU-Net how to convert the region representations back to an integer map. \nIt essentially just tells nnU-Net what labels to place for which region in what order. The length of the \nlist here needs to be the same as the number of regions (excl background). Each element in the list corresponds \nto the label that is placed instead of the region into the final segmentation. Later entries will overwrite earlier ones! \nConcretely, for the example given here, nnU-Net \nwill firstly place the label 1 (edema) where the 'whole_tumor' region was predicted, then place the label 2 \n(non-enhancing tumor and necrosis) where the \"tumor_core\" was predicted and finally place the label 3 in the \npredicted 'enhancing_tumor' area. With each step, part of the previously set pixels \nwill be overwritten with the new label! So when setting your `regions_class_order`, place encompassing regions \n(like whole tumor etc) first, followed by substructures.\n\n**IMPORTANT** Because the conversion back to a segmentation map is sensitive to the order in which the regions are \ndeclared (\"place label X in the first region\") you need to make sure that this order is not perturbed! When \nautomatically generating the dataset.json, make sure the dictionary keys do not get sorted alphabetically! Set \n`sort_keys=False` in `json.dump()`!!!\n\nnnU-Net will perform the evaluation + model selection also on the regions, not the individual labels!\n\nThat's all. Easy, huh?"
  },
  {
    "path": "Finetune/nnUNet/documentation/run_inference_with_pretrained_models.md",
    "content": "# How to run inference with pretrained models\n**Important:** Pretrained weights from nnU-Net v1 are NOT compatible with V2. You will need to retrain with the new \nversion. But honestly, you already have a fully trained model with which you can run inference (in v1), so \njust continue using that!\n\nNot yet available for V2 :-(\nIf you wish to run inference with pretrained models, check out the old nnU-Net for now. We are working on this full steam!\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/set_environment_variables.md",
    "content": "# How to set environment variables\n\nnnU-Net requires some environment variables so that it always knows where the raw data, preprocessed data and trained \nmodels are. Depending on the operating system, these environment variables need to be set in different ways.\n\nVariables can either be set permanently (recommended!) or you can decide to set them every time you call nnU-Net. \n\n# Linux & MacOS\n\n## Permanent\nLocate the `.bashrc` file in your home folder and add the following lines to the bottom:\n\n```bash\nexport nnUNet_raw=\"/media/fabian/nnUNet_raw\"\nexport nnUNet_preprocessed=\"/media/fabian/nnUNet_preprocessed\"\nexport nnUNet_results=\"/media/fabian/nnUNet_results\"\n```\n\n(Of course you need to adapt the paths to the actual folders you intend to use).\nIf you are using a different shell, such as zsh, you will need to find the correct script for it. For zsh this is `.zshrc`.\n\n## Temporary\nJust execute the following lines whenever you run nnU-Net:\n```bash\nexport nnUNet_raw=\"/media/fabian/nnUNet_raw\"\nexport nnUNet_preprocessed=\"/media/fabian/nnUNet_preprocessed\"\nexport nnUNet_results=\"/media/fabian/nnUNet_results\"\n```\n(Of course you need to adapt the paths to the actual folders you intend to use).\n\nImportant: These variables will be deleted if you close your terminal! They will also only apply to the current \nterminal window and DO NOT transfer to other terminals!\n\nAlternatively you can also just prefix them to your nnU-Net commands:\n\n`nnUNet_results=\"/media/fabian/nnUNet_results\" nnUNet_preprocessed=\"/media/fabian/nnUNet_preprocessed\" nnUNetv2_train[...]`\n\n## Verify that environment parameters are set\nYou can always execute `echo ${nnUNet_raw}` etc to print the environment variables. This will return an empty string if \nthey were not set.\n\n# Windows\nUseful links:\n- [https://www3.ntu.edu.sg](https://www3.ntu.edu.sg/home/ehchua/programming/howto/Environment_Variables.html#:~:text=To%20set%20(or%20change)%20a,it%20to%20an%20empty%20string.)\n- [https://phoenixnap.com](https://phoenixnap.com/kb/windows-set-environment-variable)\n\n## Permanent\nSee `Set Environment Variable in Windows via GUI` [here](https://phoenixnap.com/kb/windows-set-environment-variable). \nOr read about setx (command prompt).\n\n## Temporary\nJust execute the following before you run nnU-Net:\n\n(PowerShell)\n```PowerShell\n$Env:nnUNet_raw = \"C:/Users/fabian/nnUNet_raw\"\n$Env:nnUNet_preprocessed = \"C:/Users/fabian/nnUNet_preprocessed\"\n$Env:nnUNet_results = \"C:/Users/fabian/nnUNet_results\"\n```\n\n(Command Prompt)\n```Command Prompt\nset nnUNet_raw=C:/Users/fabian/nnUNet_raw\nset nnUNet_preprocessed=C:/Users/fabian/nnUNet_preprocessed\nset nnUNet_results=C:/Users/fabian/fabian/nnUNet_results\n```\n\n(Of course you need to adapt the paths to the actual folders you intend to use).\n\nImportant: These variables will be deleted if you close your session! They will also only apply to the current \nwindow and DO NOT transfer to other sessions!\n\n## Verify that environment parameters are set\nPrinting in Windows works differently depending on the environment you are in:\n\nPowerShell: `echo $Env:[variable_name]`\n\nCommand Prompt: `echo %[variable_name]%`\n"
  },
  {
    "path": "Finetune/nnUNet/documentation/setting_up_paths.md",
    "content": "# Setting up Paths\n\nnnU-Net relies on environment variables to know where raw data, preprocessed data and trained model weights are stored. \nTo use the full functionality of nnU-Net, the following three environment variables must be set:\n\n1) `nnUNet_raw`: This is where you place the raw datasets. This folder will have one subfolder for each dataset names \nDatasetXXX_YYY where XXX is a 3-digit identifier (such as 001, 002, 043, 999, ...) and YYY is the (unique) \ndataset name. The datasets must be in nnU-Net format, see [here](dataset_format.md).\n\n    Example tree structure:\n    ```\n    nnUNet_raw/Dataset001_NAME1\n    ├── dataset.json\n    ├── imagesTr\n    │   ├── ...\n    ├── imagesTs\n    │   ├── ...\n    └── labelsTr\n        ├── ...\n    nnUNet_raw/Dataset002_NAME2\n    ├── dataset.json\n    ├── imagesTr\n    │   ├── ...\n    ├── imagesTs\n    │   ├── ...\n    └── labelsTr\n        ├── ...\n    ```\n\n2) `nnUNet_preprocessed`: This is the folder where the preprocessed data will be saved. The data will also be read from \nthis folder during training. It is important that this folder is located on a drive with low access latency and high \nthroughput (such as a nvme SSD (PCIe gen 3 is sufficient)).\n\n3) `nnUNet_results`: This specifies where nnU-Net will save the model weights. If pretrained models are downloaded, this \nis where it will save them.\n\n### How to set environment variables\nSee [here](set_environment_variables.md)."
  },
  {
    "path": "Finetune/nnUNet/documentation/tldr_migration_guide_from_v1.md",
    "content": "# TLDR Migration Guide from nnU-Net V1\n\n- nnU-Net V2 can be installed simultaneously with V1. They won't get in each other's way\n- The environment variables needed for V2 have slightly different names. Read [this](setting_up_paths.md). \n- nnU-Net V2 datasets are called DatasetXXX_NAME. Not Task.\n- Datasets have the same structure (imagesTr, labelsTr, dataset.json) but we now support more \n[file types](dataset_format.md#supported-file-formats). The dataset.json is simplified. Use `generate_dataset_json` \nfrom nnunetv2.dataset_conversion.generate_dataset_json.py. \n- Careful: labels are now no longer declared as value:name but name:value. This has to do with [hierarchical labels](region_based_training.md). \n- nnU-Net v2 commands start with `nnUNetv2...`. They work mostly (but not entirely) the same. Just use the `-h` option.\n- You can transfer your V1 raw datasets to V2 with `nnUNetv2_convert_old_nnUNet_dataset`. You cannot transfer trained \nmodels. Continue to use the old nnU-Net Version for making inference with those.\n- These are the commands you are most likely to be using (in that order)\n  - `nnUNetv2_plan_and_preprocess`. Example: `nnUNetv2_plan_and_preprocess -d 2`\n  - `nnUNetv2_train`. Example: `nnUNetv2_train 2 3d_fullres 0`\n  - `nnUNetv2_find_best_configuration`. Example: `nnUNetv2_find_best_configuration 2 -c 2d 3d_fullres`. This command\n    will now create a `inference_instructions.txt` file in your `nnUNet_preprocessed/DatasetXXX_NAME/` folder which\n    tells you exactly how to do inference.\n  - `nnUNetv2_predict`. Example: `nnUNetv2_predict -i INPUT_FOLDER -o OUTPUT_FOLDER -c 3d_fullres -d 2`\n  - `nnUNetv2_apply_postprocessing` (see inference_instructions.txt)\n"
  },
  {
    "path": "Finetune/nnUNet/msd.txt",
    "content": "A. convert\r\npython Dataset220_KiTS2023.py /data/linshan/CTs/kits23/dataset/\r\npython Dataset218_Amos2022_task1.py /data/linshan/CTs/Amos2022/\r\nnnUNetv2_convert_old_nnUNet_dataset /data/linshan/CTs/Amos2022/  /data/linshan/nnunet_data/nnUNet_raw/Dataset218_Amos2022\r\n\r\nB. pre-process\r\nnnUNetv2_plan_and_preprocess -d 003 -c 3d_fullres --verbose --verify_dataset_integrity\r\n\r\nnnUNetv2_plan_and_preprocess -d 218 -c 3d_fullres --verbose --verify_dataset_integrity\r\n\r\nC. training\r\nCUDA_VISIBLE_DEVICES=0 nnUNetv2_train 008 3d_fullres 0 -tr nnUNetTrainer_250epochs --val\r\n\r\nCUDA_VISIBLE_DEVICES=1 nnUNetv2_train 002 3d_fullres 0 -tr nnUNetTrainer_swin_pre\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_train 009 3d_fullres 0 -tr nnUNetTrainer_250epochs\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_train 010 3d_fullres 0 -tr nnUNetTrainer_250epochs\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_train 218 3d_fullres 0 -tr nnUNetTrainer_250epochs\r\n\r\nD. inference\r\nCUDA_VISIBLE_DEVICES=4 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset017_BTCV/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset017_BTCV/imagesTs_pred -d 017 -f 0 -c 3d_fullres --verbose\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset003_Liver/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset003_Liver/imagesTs_pred -d 003 -f 0 -c 3d_fullres --verbose\r\n\r\nCUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset006_Lung/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset006_Lung/imagesTs_pred -d 006 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs\r\n\r\n\r\nCUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset001_BrainTumour/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset001_BrainTumour/imagesTs_pred -d 001 -f 0 -c 3d_fullres --verbose\r\n\r\nCUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset002_Heart/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset002_Heart/imagesTs_pred -d 002 -f 0 -c 3d_fullres --verbose\r\n\r\nCUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset004_Hippocampus/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset004_Hippocampus/imagesTs_pred -d 004 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset005_Prostate/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset005_Prostate/imagesTs_pred -d 005 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset007_Pancreas/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset007_Pancreas/imagesTs_pred -d 007 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs\r\n\r\nCUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset008_HepaticVessel/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset008_HepaticVessel/imagesTs_pred -d 008 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs\r\n\r\nCUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset218_AMOS2022_postChallenge_task1/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset218_AMOS2022_postChallenge_task1/imagesTs_pred -d 218 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs --verbose -npp 1 -nps 1\r\n\r\n--disable_tta\r\n\r\n\r\n### predict colon cancer for flare23\r\nCUDA_VISIBLE_DEVICES=3 nnUNetv2_predict -i /data/linshan/CTs/Flare23/Flare23_test/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset010_Colon_flare23/flare23_imagesTs_pred -d 010 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/benchmarking/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/benchmarking/generate_benchmarking_commands.py",
    "content": "if __name__ == '__main__':\n    \"\"\"\n    This code probably only works within the DKFZ infrastructure (using LSF). You will need to adapt it to your scheduler! \n    \"\"\"\n    gpu_models = [#'NVIDIAA100_PCIE_40GB', 'NVIDIAGeForceRTX2080Ti', 'NVIDIATITANRTX', 'TeslaV100_SXM2_32GB',\n                  'NVIDIAA100_SXM4_40GB']#, 'TeslaV100_PCIE_32GB']\n    datasets = [2, 3, 4, 5]\n    trainers = ['nnUNetTrainerBenchmark_5epochs', 'nnUNetTrainerBenchmark_5epochs_noDataLoading']\n    plans = ['nnUNetPlans']\n    configs = ['2d', '2d_bs3x', '2d_bs6x', '3d_fullres', '3d_fullres_bs3x', '3d_fullres_bs6x']\n    num_gpus = 1\n\n    benchmark_configurations = {d: configs for d in datasets}\n\n    exclude_hosts = \"-R \\\"select[hname!='e230-dgxa100-1']'\\\"\"\n    resources = \"-R \\\"tensorcore\\\"\"\n    queue = \"-q gpu\"\n    preamble = \"-L /bin/bash \\\"source ~/load_env_torch210.sh && \"\n    train_command = 'nnUNet_compile=False nnUNet_results=/dkfz/cluster/gpu/checkpoints/OE0441/isensee/nnUNet_results_remake_benchmark nnUNetv2_train'\n\n    folds = (0, )\n\n    use_these_modules = {\n        tr: plans for tr in trainers\n    }\n\n    additional_arguments = f' -num_gpus {num_gpus}'  # ''\n\n    output_file = \"/home/isensee/deleteme.txt\"\n    with open(output_file, 'w') as f:\n        for g in gpu_models:\n            gpu_requirements = f\"-gpu num={num_gpus}:j_exclusive=yes:gmodel={g}\"\n            for tr in use_these_modules.keys():\n                for p in use_these_modules[tr]:\n                    for dataset in benchmark_configurations.keys():\n                        for config in benchmark_configurations[dataset]:\n                            for fl in folds:\n                                command = f'bsub {exclude_hosts} {resources} {queue} {gpu_requirements} {preamble} {train_command} {dataset} {config} {fl} -tr {tr} -p {p}'\n                                if additional_arguments is not None and len(additional_arguments) > 0:\n                                    command += f' {additional_arguments}'\n                                f.write(f'{command}\\\"\\n')"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/benchmarking/summarize_benchmark_results.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import join, load_json, isfile\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.paths import nnUNet_results\nfrom nnunetv2.utilities.file_path_utilities import get_output_folder\n\nif __name__ == '__main__':\n    trainers = ['nnUNetTrainerBenchmark_5epochs', 'nnUNetTrainerBenchmark_5epochs_noDataLoading']\n    datasets = [2, 3, 4, 5]\n    plans = ['nnUNetPlans']\n    configs = ['2d', '2d_bs3x', '2d_bs6x', '3d_fullres', '3d_fullres_bs3x', '3d_fullres_bs6x']\n    output_file = join(nnUNet_results, 'benchmark_results.csv')\n\n    torch_version = '2.1.0.dev20230330'#\"2.0.0\"#\"2.1.0.dev20230328\"  #\"1.11.0a0+gitbc2c6ed\"  #\n    cudnn_version = 8700  # 8302  #\n    num_gpus = 1\n\n    unique_gpus = set()\n\n    # collect results in the most janky way possible. Amazing coding skills!\n    all_results = {}\n    for tr in trainers:\n        all_results[tr] = {}\n        for p in plans:\n            all_results[tr][p] = {}\n            for c in configs:\n                all_results[tr][p][c] = {}\n                for d in datasets:\n                    dataset_name = maybe_convert_to_dataset_name(d)\n                    output_folder = get_output_folder(dataset_name, tr, p, c, fold=0)\n                    expected_benchmark_file = join(output_folder, 'benchmark_result.json')\n                    all_results[tr][p][c][d] = {}\n                    if isfile(expected_benchmark_file):\n                        # filter results for what we want\n                        results = [i for i in load_json(expected_benchmark_file).values()\n                                   if i['num_gpus'] == num_gpus and i['cudnn_version'] == cudnn_version and\n                                   i['torch_version'] == torch_version]\n                        for r in results:\n                            all_results[tr][p][c][d][r['gpu_name']] = r\n                            unique_gpus.add(r['gpu_name'])\n\n    # haha. Fuck this. Collect GPUs in the code above.\n    # unique_gpus = np.unique([i[\"gpu_name\"] for tr in trainers for p in plans for c in configs for d in datasets for i in all_results[tr][p][c][d]])\n\n    unique_gpus = list(unique_gpus)\n    unique_gpus.sort()\n\n    with open(output_file, 'w') as f:\n        f.write('Dataset,Trainer,Plans,Config')\n        for g in unique_gpus:\n            f.write(f\",{g}\")\n        f.write(\"\\n\")\n        for d in datasets:\n            for tr in trainers:\n                for p in plans:\n                    for c in configs:\n                        gpu_results = []\n                        for g in unique_gpus:\n                            if g in all_results[tr][p][c][d].keys():\n                                gpu_results.append(round(all_results[tr][p][c][d][g][\"fastest_epoch\"], ndigits=2))\n                            else:\n                                gpu_results.append(\"MISSING\")\n                        # skip if all are missing\n                        if all([i == 'MISSING' for i in gpu_results]):\n                            continue\n                        f.write(f\"{d},{tr},{p},{c}\")\n                        for g in gpu_results:\n                            f.write(f\",{g}\")\n                        f.write(\"\\n\")\n            f.write(\"\\n\")\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/collect_results_custom_Decathlon.py",
    "content": "from typing import Tuple\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.evaluation.evaluate_predictions import load_summary_json\nfrom nnunetv2.paths import nnUNet_results\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name, convert_dataset_name_to_id\nfrom nnunetv2.utilities.file_path_utilities import get_output_folder\n\n\ndef collect_results(trainers: dict, datasets: List, output_file: str,\n                    configurations=(\"2d\", \"3d_fullres\", \"3d_lowres\", \"3d_cascade_fullres\"),\n                    folds=tuple(np.arange(5))):\n    results_dirs = (nnUNet_results,)\n    datasets_names = [maybe_convert_to_dataset_name(i) for i in datasets]\n    with open(output_file, 'w') as f:\n        for i, d in zip(datasets, datasets_names):\n            for c in configurations:\n                for module in trainers.keys():\n                    for plans in trainers[module]:\n                        for r in results_dirs:\n                            expected_output_folder = get_output_folder(d, module, plans, c)\n                            if isdir(expected_output_folder):\n                                results_folds = []\n                                f.write(f\"{d},{c},{module},{plans},{r}\")\n                                for fl in folds:\n                                    expected_output_folder_fold = get_output_folder(d, module, plans, c, fl)\n                                    expected_summary_file = join(expected_output_folder_fold, \"validation\",\n                                                                 \"summary.json\")\n                                    if not isfile(expected_summary_file):\n                                        print('expected output file not found:', expected_summary_file)\n                                        f.write(\",\")\n                                        results_folds.append(np.nan)\n                                    else:\n                                        foreground_mean = load_summary_json(expected_summary_file)['foreground_mean'][\n                                            'Dice']\n                                        results_folds.append(foreground_mean)\n                                        f.write(f\",{foreground_mean:02.4f}\")\n                                f.write(f\",{np.nanmean(results_folds):02.4f}\\n\")\n\n\ndef summarize(input_file, output_file, folds: Tuple[int, ...], configs: Tuple[str, ...], datasets, trainers):\n    txt = np.loadtxt(input_file, dtype=str, delimiter=',')\n    num_folds = txt.shape[1] - 6\n    valid_configs = {}\n    for d in datasets:\n        if isinstance(d, int):\n            d = maybe_convert_to_dataset_name(d)\n        configs_in_txt = np.unique(txt[:, 1][txt[:, 0] == d])\n        valid_configs[d] = [i for i in configs_in_txt if i in configs]\n    assert max(folds) < num_folds\n\n    with open(output_file, 'w') as f:\n        f.write(\"name\")\n        for d in valid_configs.keys():\n            for c in valid_configs[d]:\n                f.write(\",%d_%s\" % (convert_dataset_name_to_id(d), c[:4]))\n        f.write(',mean\\n')\n        valid_entries = txt[:, 4] == nnUNet_results\n        for t in trainers.keys():\n            trainer_locs = valid_entries & (txt[:, 2] == t)\n            for pl in trainers[t]:\n                f.write(f\"{t}__{pl}\")\n                trainer_plan_locs = trainer_locs & (txt[:, 3] == pl)\n                r = []\n                for d in valid_configs.keys():\n                    trainer_plan_d_locs = trainer_plan_locs & (txt[:, 0] == d)\n                    for v in valid_configs[d]:\n                        trainer_plan_d_config_locs = trainer_plan_d_locs & (txt[:, 1] == v)\n                        if np.any(trainer_plan_d_config_locs):\n                            # we cannot have more than one row\n                            assert np.sum(trainer_plan_d_config_locs) == 1\n\n                            # now check that we have all folds\n                            selected_row = txt[np.argwhere(trainer_plan_d_config_locs)[0,0]]\n\n                            fold_results = selected_row[[i + 5 for i in folds]]\n\n                            if '' in fold_results:\n                                print('missing fold in', t, pl, d, v)\n                                f.write(\",nan\")\n                                r.append(np.nan)\n                            else:\n                                mean_dice = np.mean([float(i) for i in fold_results])\n                                f.write(f\",{mean_dice:02.4f}\")\n                                r.append(mean_dice)\n                        else:\n                            print('missing:', t, pl, d, v)\n                            f.write(\",nan\")\n                            r.append(np.nan)\n                f.write(f\",{np.mean(r):02.4f}\\n\")\n\n\nif __name__ == '__main__':\n    use_these_trainers = {\n        'nnUNetTrainer': ('nnUNetPlans',),\n        'nnUNetTrainerDiceCELoss_noSmooth': ('nnUNetPlans',),\n        'nnUNetTrainer_DASegOrd0': ('nnUNetPlans',),\n     }\n    all_results_file= join(nnUNet_results, 'customDecResults.csv')\n    datasets = [2, 3, 4, 17, 20, 24, 27, 38, 55, 64, 82]\n    collect_results(use_these_trainers, datasets, all_results_file)\n\n    folds = (0, 1, 2, 3, 4)\n    configs = (\"3d_fullres\", \"3d_lowres\")\n    output_file = join(nnUNet_results, 'customDecResults_summary5fold.csv')\n    summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)\n\n    folds = (0, )\n    configs = (\"3d_fullres\", \"3d_lowres\")\n    output_file = join(nnUNet_results, 'customDecResults_summaryfold0.csv')\n    summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/collect_results_custom_Decathlon_2d.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.batch_running.collect_results_custom_Decathlon import collect_results, summarize\nfrom nnunetv2.paths import nnUNet_results\n\nif __name__ == '__main__':\n    use_these_trainers = {\n        'nnUNetTrainer': ('nnUNetPlans', ),\n    }\n    all_results_file = join(nnUNet_results, 'hrnet_results.csv')\n    datasets = [2, 3, 4, 17, 20, 24, 27, 38, 55, 64, 82]\n    collect_results(use_these_trainers, datasets, all_results_file)\n\n    folds = (0, )\n    configs = ('2d', )\n    output_file = join(nnUNet_results, 'hrnet_results_summary_fold0.csv')\n    summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/generate_lsf_runs_customDecathlon.py",
    "content": "from copy import deepcopy\nimport numpy as np\n\n\ndef merge(dict1, dict2):\n    keys = np.unique(list(dict1.keys()) + list(dict2.keys()))\n    keys = np.unique(keys)\n    res = {}\n    for k in keys:\n        all_configs = []\n        if dict1.get(k) is not None:\n            all_configs += list(dict1[k])\n        if dict2.get(k) is not None:\n            all_configs += list(dict2[k])\n        if len(all_configs) > 0:\n            res[k] = tuple(np.unique(all_configs))\n    return res\n\n\nif __name__ == \"__main__\":\n    # after the Nature Methods paper we switch our evaluation to a different (more stable/high quality) set of\n    # datasets for evaluation and future development\n    configurations_all = {\n        2: (\"3d_fullres\", \"2d\"),\n        3: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        4: (\"2d\", \"3d_fullres\"),\n        17: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        20: (\"2d\", \"3d_fullres\"),\n        24: (\"2d\", \"3d_fullres\"),\n        27: (\"2d\", \"3d_fullres\"),\n        38: (\"2d\", \"3d_fullres\"),\n        55: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        64: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        82: (\"2d\", \"3d_fullres\"),\n        # 83: (\"2d\", \"3d_fullres\"),\n    }\n\n    configurations_3d_fr_only = {\n        i: (\"3d_fullres\", ) for i in configurations_all if \"3d_fullres\" in configurations_all[i]\n    }\n\n    configurations_3d_c_only = {\n        i: (\"3d_cascade_fullres\", ) for i in configurations_all if \"3d_cascade_fullres\" in configurations_all[i]\n    }\n\n    configurations_3d_lr_only = {\n        i: (\"3d_lowres\", ) for i in configurations_all if \"3d_lowres\" in configurations_all[i]\n    }\n\n    configurations_2d_only = {\n        i: (\"2d\", ) for i in configurations_all if \"2d\" in configurations_all[i]\n    }\n\n    num_gpus = 1\n    exclude_hosts = \"-R \\\"select[hname!='e230-dgx2-2']\\\" -R \\\"select[hname!='e230-dgx2-1']\\\" -R \\\"select[hname!='e230-dgx1-1']\\\" -R \\\"select[hname!='e230-dgxa100-1']\\\" -R \\\"select[hname!='e230-dgxa100-2']\\\" -R \\\"select[hname!='e230-dgxa100-3']\\\" -R \\\"select[hname!='e230-dgxa100-4']\\\"\"\n    resources = \"-R \\\"tensorcore\\\"\"\n    gpu_requirements = f\"-gpu num={num_gpus}:j_exclusive=yes:gmem=33G\"\n    queue = \"-q gpu-lowprio\"\n    preamble = \"-L /bin/bash \\\"source ~/load_env_cluster4.sh && \"\n    train_command = 'nnUNet_results=/dkfz/cluster/gpu/checkpoints/OE0441/isensee/nnUNet_results_remake_release nnUNetv2_train'\n\n    folds = (0, )\n    # use_this = configurations_2d_only\n    use_this = merge(configurations_3d_fr_only, configurations_3d_lr_only)\n    # use_this = merge(use_this, configurations_3d_c_only)\n\n    use_these_modules = {\n        'nnUNetTrainer': ('nnUNetPlans',),\n        'nnUNetTrainerDiceCELoss_noSmooth': ('nnUNetPlans',),\n        # 'nnUNetTrainer_DASegOrd0': ('nnUNetPlans',),\n    }\n\n    additional_arguments = f'--disable_checkpointing -num_gpus {num_gpus}'  # ''\n\n    output_file = \"/home/isensee/deleteme.txt\"\n    with open(output_file, 'w') as f:\n        for tr in use_these_modules.keys():\n            for p in use_these_modules[tr]:\n                for dataset in use_this.keys():\n                    for config in use_this[dataset]:\n                        for fl in folds:\n                            command = f'bsub {exclude_hosts} {resources} {queue} {gpu_requirements} {preamble} {train_command} {dataset} {config} {fl} -tr {tr} -p {p}'\n                            if additional_arguments is not None and len(additional_arguments) > 0:\n                                command += f' {additional_arguments}'\n                            f.write(f'{command}\\\"\\n')\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/release_trainings/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/release_trainings/nnunetv2_v1/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/release_trainings/nnunetv2_v1/collect_results.py",
    "content": "from typing import Tuple\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.evaluation.evaluate_predictions import load_summary_json\nfrom nnunetv2.paths import nnUNet_results\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name, convert_dataset_name_to_id\nfrom nnunetv2.utilities.file_path_utilities import get_output_folder\n\n\ndef collect_results(trainers: dict, datasets: List, output_file: str,\n                    configurations=(\"2d\", \"3d_fullres\", \"3d_lowres\", \"3d_cascade_fullres\"),\n                    folds=tuple(np.arange(5))):\n    results_dirs = (nnUNet_results,)\n    datasets_names = [maybe_convert_to_dataset_name(i) for i in datasets]\n    with open(output_file, 'w') as f:\n        for i, d in zip(datasets, datasets_names):\n            for c in configurations:\n                for module in trainers.keys():\n                    for plans in trainers[module]:\n                        for r in results_dirs:\n                            expected_output_folder = get_output_folder(d, module, plans, c)\n                            if isdir(expected_output_folder):\n                                results_folds = []\n                                f.write(f\"{d},{c},{module},{plans},{r}\")\n                                for fl in folds:\n                                    expected_output_folder_fold = get_output_folder(d, module, plans, c, fl)\n                                    expected_summary_file = join(expected_output_folder_fold, \"validation\",\n                                                                 \"summary.json\")\n                                    if not isfile(expected_summary_file):\n                                        print('expected output file not found:', expected_summary_file)\n                                        f.write(\",\")\n                                        results_folds.append(np.nan)\n                                    else:\n                                        foreground_mean = load_summary_json(expected_summary_file)['foreground_mean'][\n                                            'Dice']\n                                        results_folds.append(foreground_mean)\n                                        f.write(f\",{foreground_mean:02.4f}\")\n                                f.write(f\",{np.nanmean(results_folds):02.4f}\\n\")\n\n\ndef summarize(input_file, output_file, folds: Tuple[int, ...], configs: Tuple[str, ...], datasets, trainers):\n    txt = np.loadtxt(input_file, dtype=str, delimiter=',')\n    num_folds = txt.shape[1] - 6\n    valid_configs = {}\n    for d in datasets:\n        if isinstance(d, int):\n            d = maybe_convert_to_dataset_name(d)\n        configs_in_txt = np.unique(txt[:, 1][txt[:, 0] == d])\n        valid_configs[d] = [i for i in configs_in_txt if i in configs]\n    assert max(folds) < num_folds\n\n    with open(output_file, 'w') as f:\n        f.write(\"name\")\n        for d in valid_configs.keys():\n            for c in valid_configs[d]:\n                f.write(\",%d_%s\" % (convert_dataset_name_to_id(d), c[:4]))\n        f.write(',mean\\n')\n        valid_entries = txt[:, 4] == nnUNet_results\n        for t in trainers.keys():\n            trainer_locs = valid_entries & (txt[:, 2] == t)\n            for pl in trainers[t]:\n                f.write(f\"{t}__{pl}\")\n                trainer_plan_locs = trainer_locs & (txt[:, 3] == pl)\n                r = []\n                for d in valid_configs.keys():\n                    trainer_plan_d_locs = trainer_plan_locs & (txt[:, 0] == d)\n                    for v in valid_configs[d]:\n                        trainer_plan_d_config_locs = trainer_plan_d_locs & (txt[:, 1] == v)\n                        if np.any(trainer_plan_d_config_locs):\n                            # we cannot have more than one row\n                            assert np.sum(trainer_plan_d_config_locs) == 1\n\n                            # now check that we have all folds\n                            selected_row = txt[np.argwhere(trainer_plan_d_config_locs)[0,0]]\n\n                            fold_results = selected_row[[i + 5 for i in folds]]\n\n                            if '' in fold_results:\n                                print('missing fold in', t, pl, d, v)\n                                f.write(\",nan\")\n                                r.append(np.nan)\n                            else:\n                                mean_dice = np.mean([float(i) for i in fold_results])\n                                f.write(f\",{mean_dice:02.4f}\")\n                                r.append(mean_dice)\n                        else:\n                            print('missing:', t, pl, d, v)\n                            f.write(\",nan\")\n                            r.append(np.nan)\n                f.write(f\",{np.mean(r):02.4f}\\n\")\n\n\nif __name__ == '__main__':\n    use_these_trainers = {\n        'nnUNetTrainer': ('nnUNetPlans',),\n        'nnUNetTrainer_v1loss': ('nnUNetPlans',),\n     }\n    all_results_file = join(nnUNet_results, 'customDecResults.csv')\n    datasets = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 17, 20, 24, 27, 35, 38, 48, 55, 64, 82]\n    collect_results(use_these_trainers, datasets, all_results_file)\n\n    folds = (0, 1, 2, 3, 4)\n    configs = (\"3d_fullres\", \"3d_lowres\")\n    output_file = join(nnUNet_results, 'customDecResults_summary5fold.csv')\n    summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)\n\n    folds = (0, )\n    configs = (\"3d_fullres\", \"3d_lowres\")\n    output_file = join(nnUNet_results, 'customDecResults_summaryfold0.csv')\n    summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/batch_running/release_trainings/nnunetv2_v1/generate_lsf_commands.py",
    "content": "from copy import deepcopy\nimport numpy as np\n\n\ndef merge(dict1, dict2):\n    keys = np.unique(list(dict1.keys()) + list(dict2.keys()))\n    keys = np.unique(keys)\n    res = {}\n    for k in keys:\n        all_configs = []\n        if dict1.get(k) is not None:\n            all_configs += list(dict1[k])\n        if dict2.get(k) is not None:\n            all_configs += list(dict2[k])\n        if len(all_configs) > 0:\n            res[k] = tuple(np.unique(all_configs))\n    return res\n\n\nif __name__ == \"__main__\":\n    # after the Nature Methods paper we switch our evaluation to a different (more stable/high quality) set of\n    # datasets for evaluation and future development\n    configurations_all = {\n        # 1: (\"3d_fullres\", \"2d\"),\n        2: (\"3d_fullres\", \"2d\"),\n        # 3: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 4: (\"2d\", \"3d_fullres\"),\n        5: (\"2d\", \"3d_fullres\"),\n        # 6: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 7: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 8: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 9: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 10: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 17: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        20: (\"2d\", \"3d_fullres\"),\n        24: (\"2d\", \"3d_fullres\"),\n        27: (\"2d\", \"3d_fullres\"),\n        35: (\"2d\", \"3d_fullres\"),\n        38: (\"2d\", \"3d_fullres\"),\n        # 55: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 64: (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n        # 82: (\"2d\", \"3d_fullres\"),\n        # 83: (\"2d\", \"3d_fullres\"),\n    }\n\n    configurations_3d_fr_only = {\n        i: (\"3d_fullres\", ) for i in configurations_all if \"3d_fullres\" in configurations_all[i]\n    }\n\n    configurations_3d_c_only = {\n        i: (\"3d_cascade_fullres\", ) for i in configurations_all if \"3d_cascade_fullres\" in configurations_all[i]\n    }\n\n    configurations_3d_lr_only = {\n        i: (\"3d_lowres\", ) for i in configurations_all if \"3d_lowres\" in configurations_all[i]\n    }\n\n    configurations_2d_only = {\n        i: (\"2d\", ) for i in configurations_all if \"2d\" in configurations_all[i]\n    }\n\n    num_gpus = 1\n    exclude_hosts = \"-R \\\"select[hname!='e230-dgx2-2']\\\" -R \\\"select[hname!='e230-dgx2-1']\\\"\"\n    resources = \"-R \\\"tensorcore\\\"\"\n    gpu_requirements = f\"-gpu num={num_gpus}:j_exclusive=yes:gmem=1G\"\n    queue = \"-q gpu-lowprio\"\n    preamble = \"-L /bin/bash \\\"source ~/load_env_cluster4.sh && \"\n    train_command = 'nnUNet_keep_files_open=True nnUNet_results=/dkfz/cluster/gpu/data/OE0441/isensee/nnUNet_results_remake_release_normfix nnUNetv2_train'\n\n    folds = (0, 1, 2, 3, 4)\n    # use_this = configurations_2d_only\n    # use_this = merge(configurations_3d_fr_only, configurations_3d_lr_only)\n    # use_this = merge(use_this, configurations_3d_c_only)\n    use_this = configurations_all\n\n    use_these_modules = {\n        'nnUNetTrainer': ('nnUNetPlans',),\n    }\n\n    additional_arguments = f'--disable_checkpointing -num_gpus {num_gpus}'  # ''\n\n    output_file = \"/home/isensee/deleteme.txt\"\n    with open(output_file, 'w') as f:\n        for tr in use_these_modules.keys():\n            for p in use_these_modules[tr]:\n                for dataset in use_this.keys():\n                    for config in use_this[dataset]:\n                        for fl in folds:\n                            command = f'bsub {exclude_hosts} {resources} {queue} {gpu_requirements} {preamble} {train_command} {dataset} {config} {fl} -tr {tr} -p {p}'\n                            if additional_arguments is not None and len(additional_arguments) > 0:\n                                command += f' {additional_arguments}'\n                            f.write(f'{command}\\\"\\n')\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/configuration.py",
    "content": "import os\n\nfrom nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA\n\ndefault_num_processes = 8 if 'nnUNet_def_n_proc' not in os.environ else int(os.environ['nnUNet_def_n_proc'])\n\nANISO_THRESHOLD = 3  # determines when a sample is considered anisotropic (3 means that the spacing in the low\n# resolution axis must be 3x as large as the next largest spacing)\n\ndefault_n_proc_DA = get_allowed_n_proc_DA()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset017_BTCV.py",
    "content": "#    Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany\r\n#\r\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\r\n#    you may not use this file except in compliance with the License.\r\n#    You may obtain a copy of the License at\r\n#\r\n#        http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n#    Unless required by applicable law or agreed to in writing, software\r\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\r\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n#    See the License for the specific language governing permissions and\r\n#    limitations under the License.\r\n\r\n\r\nimport multiprocessing\r\nimport shutil\r\nfrom multiprocessing import Pool\r\nfrom collections import OrderedDict\r\nimport SimpleITK as sitk\r\nimport numpy as np\r\nfrom batchgenerators.utilities.file_and_folder_operations import *\r\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\r\nfrom nnunetv2.paths import nnUNet_raw\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    base = \"/data/linshan/CTs/BTCV/\"\r\n\r\n    task_id = 17\r\n    task_name = \"BTCV\"\r\n    prefix = 'BTCV'\r\n\r\n    foldername = \"Dataset%03.0d_%s\" % (task_id, task_name)\r\n\r\n    out_base = join(nnUNet_raw, foldername)\r\n    imagestr = join(out_base, \"imagesTr\")\r\n    imagests = join(out_base, \"imagesTs\")\r\n    labelstr = join(out_base, \"labelsTr\")\r\n    maybe_mkdir_p(imagestr)\r\n    maybe_mkdir_p(imagests)\r\n    maybe_mkdir_p(labelstr)\r\n\r\n    train_folder = join(base, \"imagesTr\")\r\n    label_folder = join(base, \"labelsTr\")\r\n    test_folder = join(base, \"imagesTs\")\r\n    train_patient_names = []\r\n    test_patient_names = []\r\n    train_patients = subfiles(train_folder, join=False, suffix = 'nii.gz')\r\n    for p in train_patients:\r\n        serial_number = int(p[3:7])\r\n        train_patient_name = f'{prefix}_{serial_number:03d}.nii.gz'\r\n        label_file = join(label_folder, f'label{p[3:]}')\r\n        image_file = join(train_folder, p)\r\n        shutil.copy(image_file, join(imagestr, f'{train_patient_name[:8]}_0000.nii.gz'))\r\n        shutil.copy(label_file, join(labelstr, train_patient_name))\r\n        train_patient_names.append(train_patient_name)\r\n\r\n    test_patients = subfiles(test_folder, join=False, suffix=\".nii.gz\")\r\n    for p in test_patients:\r\n        p = p[:-7]\r\n        image_file = join(test_folder, p + \".nii.gz\")\r\n        serial_number = int(p[3:7])\r\n        test_patient_name = f'{prefix}_{serial_number:03d}.nii.gz'\r\n        shutil.copy(image_file, join(imagests, f'{test_patient_name[:8]}_0000.nii.gz'))\r\n        test_patient_names.append(test_patient_name)\r\n\r\n    generate_dataset_json(out_base,\r\n                          channel_names={0: 'CT'},\r\n                          labels={\r\n                                \"background\":0,\r\n                                \"spleen\":1,\r\n                                \"right kidney\":2,\r\n                                \"left kidney\":3,\r\n                                \"gallbladder\":4,\r\n                                \"esophagus\":5,\r\n                                \"liver\":6,\r\n                                \"stomach\":7,\r\n                                \"aorta\":8,\r\n                                \"inferior vena cava\":9,\r\n                                 \"portal vein and splenic vein\":10,\r\n                                 \"pancreas\":11,\r\n                                 \"right adrenal gland\":12,\r\n                                 \"left adrenal gland\":13\r\n                          },\r\n                          num_training_cases=len(train_patient_names),\r\n                          file_ending='.nii.gz',\r\n                          license='see challenge website',\r\n                          reference='see https://www.synapse.org/#!Synapse:syn3193805/wiki/217789',\r\n                          dataset_release='0.0')\r\n\r\n\r\n    # json_dict = OrderedDict()\r\n    # json_dict['name'] = \"AbdominalOrganSegmentation\"\r\n    # json_dict['description'] = \"Multi-Atlas Labeling Beyond the Cranial Vault Abdominal Organ Segmentation\"\r\n    # json_dict['tensorImageSize'] = \"3D\"\r\n    # json_dict['reference'] = \"https://www.synapse.org/#!Synapse:syn3193805/wiki/217789\"\r\n    # json_dict['licence'] = \"see challenge website\"\r\n    # json_dict['release'] = \"0.0\"\r\n    # json_dict['modality'] = {\r\n    #     \"0\": \"CT\",\r\n    # }\r\n    # json_dict['labels'] = OrderedDict({\r\n    #     \"00\": \"background\",\r\n    #     \"01\": \"spleen\",\r\n    #     \"02\": \"right kidney\",\r\n    #     \"03\": \"left kidney\",\r\n    #     \"04\": \"gallbladder\",\r\n    #     \"05\": \"esophagus\",\r\n    #     \"06\": \"liver\",\r\n    #     \"07\": \"stomach\",\r\n    #     \"08\": \"aorta\",\r\n    #     \"09\": \"inferior vena cava\",\r\n    #     \"10\": \"portal vein and splenic vein\",\r\n    #     \"11\": \"pancreas\",\r\n    #     \"12\": \"right adrenal gland\",\r\n    #     \"13\": \"left adrenal gland\"}\r\n    # )\r\n    # json_dict['numTraining'] = len(train_patient_names)\r\n    # json_dict['numTest'] = len(test_patient_names)\r\n    # json_dict['training'] = [{'image': \"./imagesTr/%s\" % train_patient_name, \"label\": \"./labelsTr/%s\" % train_patient_name} for i, train_patient_name in enumerate(train_patient_names)]\r\n    # json_dict['test'] = [\"./imagesTs/%s\" % test_patient_name for test_patient_name in test_patient_names]\r\n    #\r\n    # save_json(json_dict, os.path.join(out_base, \"dataset.json\"))"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset027_ACDC.py",
    "content": "import os\nimport shutil\nfrom pathlib import Path\n\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_raw\n\n\ndef make_out_dirs(dataset_id: int, task_name=\"ACDC\"):\n    dataset_name = f\"Dataset{dataset_id:03d}_{task_name}\"\n\n    out_dir = Path(nnUNet_raw.replace('\"', \"\")) / dataset_name\n    out_train_dir = out_dir / \"imagesTr\"\n    out_labels_dir = out_dir / \"labelsTr\"\n    out_test_dir = out_dir / \"imagesTs\"\n\n    os.makedirs(out_dir, exist_ok=True)\n    os.makedirs(out_train_dir, exist_ok=True)\n    os.makedirs(out_labels_dir, exist_ok=True)\n    os.makedirs(out_test_dir, exist_ok=True)\n\n    return out_dir, out_train_dir, out_labels_dir, out_test_dir\n\n\ndef copy_files(src_data_folder: Path, train_dir: Path, labels_dir: Path, test_dir: Path):\n    \"\"\"Copy files from the ACDC dataset to the nnUNet dataset folder. Returns the number of training cases.\"\"\"\n    patients_train = sorted([f for f in (src_data_folder / \"training\").iterdir() if f.is_dir()])\n    patients_test = sorted([f for f in (src_data_folder / \"testing\").iterdir() if f.is_dir()])\n\n    num_training_cases = 0\n    # Copy training files and corresponding labels.\n    for patient_dir in patients_train:\n        for file in patient_dir.iterdir():\n            if file.suffix == \".gz\" and \"_gt\" not in file.name and \"_4d\" not in file.name:\n                # The stem is 'patient.nii', and the suffix is '.gz'.\n                # We split the stem and append _0000 to the patient part.\n                shutil.copy(file, train_dir / f\"{file.stem.split('.')[0]}_0000.nii.gz\")\n                num_training_cases += 1\n            elif file.suffix == \".gz\" and \"_gt\" in file.name:\n                shutil.copy(file, labels_dir / file.name.replace(\"_gt\", \"\"))\n\n    # Copy test files.\n    for patient_dir in patients_test:\n        for file in patient_dir.iterdir():\n            if file.suffix == \".gz\" and \"_gt\" not in file.name and \"_4d\" not in file.name:\n                shutil.copy(file, test_dir / f\"{file.stem.split('.')[0]}_0000.nii.gz\")\n\n    return num_training_cases\n\n\ndef convert_acdc(src_data_folder: str, dataset_id=27):\n    out_dir, train_dir, labels_dir, test_dir = make_out_dirs(dataset_id=dataset_id)\n    num_training_cases = copy_files(Path(src_data_folder), train_dir, labels_dir, test_dir)\n\n    generate_dataset_json(\n        str(out_dir),\n        channel_names={\n            0: \"cineMRI\",\n        },\n        labels={\n            \"background\": 0,\n            \"RV\": 1,\n            \"MLV\": 2,\n            \"LVC\": 3,\n        },\n        file_ending=\".nii.gz\",\n        num_training_cases=num_training_cases,\n    )\n\n\nif __name__ == \"__main__\":\n    import argparse\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\n        \"-i\",\n        \"--input_folder\",\n        type=str,\n        help=\"The downloaded ACDC dataset dir. Should contain extracted 'training' and 'testing' folders.\",\n    )\n    parser.add_argument(\n        \"-d\", \"--dataset_id\", required=False, type=int, default=27, help=\"nnU-Net Dataset ID, default: 27\"\n    )\n    args = parser.parse_args()\n    print(\"Converting...\")\n    convert_acdc(args.input_folder, args.dataset_id)\n    print(\"Done!\")\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset073_Fluo_C3DH_A549_SIM.py",
    "content": "from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\nimport tifffile\nfrom batchgenerators.utilities.file_and_folder_operations import *\nimport shutil\n\n\nif __name__ == '__main__':\n    \"\"\"\n    This is going to be my test dataset for working with tif as input and output images\n    \n    All we do here is copy the files and rename them. Not file conversions take place \n    \"\"\"\n    dataset_name = 'Dataset073_Fluo_C3DH_A549_SIM'\n\n    imagestr = join(nnUNet_raw, dataset_name, 'imagesTr')\n    imagests = join(nnUNet_raw, dataset_name, 'imagesTs')\n    labelstr = join(nnUNet_raw, dataset_name, 'labelsTr')\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(imagests)\n    maybe_mkdir_p(labelstr)\n\n    # we extract the downloaded train and test datasets to two separate folders and name them Fluo-C3DH-A549-SIM_train\n    # and Fluo-C3DH-A549-SIM_test\n    train_source = '/home/fabian/Downloads/Fluo-C3DH-A549-SIM_train'\n    test_source = '/home/fabian/Downloads/Fluo-C3DH-A549-SIM_test'\n\n    # with the old nnU-Net we had to convert all the files to nifti. This is no longer required. We can just copy the\n    # tif files\n\n    # tif is broken when it comes to spacing. No standards. Grr. So when we use tif nnU-Net expects a separate file\n    # that specifies the spacing. This file needs to exist for EVERY training/test case to allow for different spacings\n    # between files. Important! The spacing must align with the axes.\n    # Here when we do print(tifffile.imread('IMAGE').shape) we get (29, 300, 350). The low resolution axis is the first.\n    # The spacing on the website is griven in the wrong axis order. Great.\n    spacing = (1, 0.126, 0.126)\n\n    # train set\n    for seq in ['01', '02']:\n        images_dir = join(train_source, seq)\n        seg_dir = join(train_source, seq + '_GT', 'SEG')\n        # if we were to be super clean we would go by IDs but here we just trust the files are sorted the correct way.\n        # Simpler filenames in the cell tracking challenge would be soooo nice.\n        images = subfiles(images_dir, suffix='.tif', sort=True, join=False)\n        segs = subfiles(seg_dir, suffix='.tif', sort=True, join=False)\n        for i, (im, se) in enumerate(zip(images, segs)):\n            target_name = f'{seq}_image_{i:03d}'\n            # we still need the '_0000' suffix for images! Otherwise we would not be able to support multiple input\n            # channels distributed over separate files\n            shutil.copy(join(images_dir, im), join(imagestr, target_name + '_0000.tif'))\n            # spacing file!\n            save_json({'spacing': spacing}, join(imagestr, target_name + '.json'))\n            shutil.copy(join(seg_dir, se), join(labelstr, target_name + '.tif'))\n            # spacing file!\n            save_json({'spacing': spacing}, join(labelstr, target_name + '.json'))\n\n    # test set, same a strain just without the segmentations\n    for seq in ['01', '02']:\n        images_dir = join(test_source, seq)\n        images = subfiles(images_dir, suffix='.tif', sort=True, join=False)\n        for i, im in enumerate(images):\n            target_name = f'{seq}_image_{i:03d}'\n            shutil.copy(join(images_dir, im), join(imagests, target_name + '_0000.tif'))\n            # spacing file!\n            save_json({'spacing': spacing}, join(imagests, target_name + '.json'))\n\n    # now we generate the dataset json\n    generate_dataset_json(\n        join(nnUNet_raw, dataset_name),\n        {0: 'fluorescence_microscopy'},\n        {'background': 0, 'cell': 1},\n        60,\n        '.tif'\n    )\n\n    # custom split to ensure we are stratifying properly. This dataset only has 2 folds\n    caseids = [i[:-4] for i in subfiles(labelstr, suffix='.tif', join=False)]\n    splits = []\n    splits.append(\n        {'train': [i for i in caseids if i.startswith('01_')], 'val': [i for i in caseids if i.startswith('02_')]}\n    )\n    splits.append(\n        {'train': [i for i in caseids if i.startswith('02_')], 'val': [i for i in caseids if i.startswith('01_')]}\n    )\n    save_json(splits, join(nnUNet_preprocessed, dataset_name, 'splits_final.json'))"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset114_MNMs.py",
    "content": "import csv\nimport os\nimport random\nfrom pathlib import Path\n\nimport nibabel as nib\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, save_json\n\nfrom nnunetv2.dataset_conversion.Dataset027_ACDC import make_out_dirs\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_preprocessed\n\n\ndef read_csv(csv_file: str):\n    patient_info = {}\n\n    with open(csv_file) as csvfile:\n        reader = csv.reader(csvfile)\n        headers = next(reader)\n        patient_index = headers.index(\"External code\")\n        ed_index = headers.index(\"ED\")\n        es_index = headers.index(\"ES\")\n        vendor_index = headers.index(\"Vendor\")\n\n        for row in reader:\n            patient_info[row[patient_index]] = {\n                \"ed\": int(row[ed_index]),\n                \"es\": int(row[es_index]),\n                \"vendor\": row[vendor_index],\n            }\n\n    return patient_info\n\n\n# ------------------------------------------------------------------------------\n# Conversion to nnUNet format\n# ------------------------------------------------------------------------------\ndef convert_mnms(src_data_folder: Path, csv_file_name: str, dataset_id: int):\n    out_dir, out_train_dir, out_labels_dir, out_test_dir = make_out_dirs(dataset_id, task_name=\"MNMs\")\n    patients_train = [f for f in (src_data_folder / \"Training\" / \"Labeled\").iterdir() if f.is_dir()]\n    patients_test = [f for f in (src_data_folder / \"Testing\").iterdir() if f.is_dir()]\n\n    patient_info = read_csv(str(src_data_folder / csv_file_name))\n\n    save_cardiac_phases(patients_train, patient_info, out_train_dir, out_labels_dir)\n    save_cardiac_phases(patients_test, patient_info, out_test_dir)\n\n    # There are non-orthonormal direction cosines in the test and validation data.\n    # Not sure if the data should be fixed, or we should skip the problematic data.\n    # patients_val = [f for f in (src_data_folder / \"Validation\").iterdir() if f.is_dir()]\n    # save_cardiac_phases(patients_val, patient_info, out_train_dir, out_labels_dir)\n\n    generate_dataset_json(\n        str(out_dir),\n        channel_names={\n            0: \"cineMRI\",\n        },\n        labels={\"background\": 0, \"LVBP\": 1, \"LVM\": 2, \"RV\": 3},\n        file_ending=\".nii.gz\",\n        num_training_cases=len(patients_train) * 2,  # 2 since we have ED and ES for each patient\n    )\n\n\ndef save_cardiac_phases(\n    patients: list[Path], patient_info: dict[str, dict[str, int]], out_dir: Path, labels_dir: Path = None\n):\n    for patient in patients:\n        print(f\"Processing patient: {patient.name}\")\n\n        image = nib.load(patient / f\"{patient.name}_sa.nii.gz\")\n        ed_frame = patient_info[patient.name][\"ed\"]\n        es_frame = patient_info[patient.name][\"es\"]\n\n        save_extracted_nifti_slice(image, ed_frame=ed_frame, es_frame=es_frame, out_dir=out_dir, patient=patient)\n\n        if labels_dir:\n            label = nib.load(patient / f\"{patient.name}_sa_gt.nii.gz\")\n            save_extracted_nifti_slice(label, ed_frame=ed_frame, es_frame=es_frame, out_dir=labels_dir, patient=patient)\n\n\ndef save_extracted_nifti_slice(image, ed_frame: int, es_frame: int, out_dir: Path, patient: Path):\n    # Save only extracted diastole and systole slices from the 4D H x W x D x time volume.\n    image_ed = nib.Nifti1Image(image.dataobj[..., ed_frame], image.affine)\n    image_es = nib.Nifti1Image(image.dataobj[..., es_frame], image.affine)\n\n    # Labels do not have modality identifiers. Labels always end with 'gt'.\n    suffix = \".nii.gz\" if image.get_filename().endswith(\"_gt.nii.gz\") else \"_0000.nii.gz\"\n\n    nib.save(image_ed, str(out_dir / f\"{patient.name}_frame{ed_frame:02d}{suffix}\"))\n    nib.save(image_es, str(out_dir / f\"{patient.name}_frame{es_frame:02d}{suffix}\"))\n\n\n# ------------------------------------------------------------------------------\n# Create custom splits\n# ------------------------------------------------------------------------------\ndef create_custom_splits(src_data_folder: Path, csv_file: str, dataset_id: int, num_val_patients: int = 25):\n    existing_splits = os.path.join(nnUNet_preprocessed, f\"Dataset{dataset_id}_MNMs\", \"splits_final.json\")\n    splits = load_json(existing_splits)\n\n    patients_train = [f.name for f in (src_data_folder / \"Training\" / \"Labeled\").iterdir() if f.is_dir()]\n    # Filter out any patients not in the training set\n    patient_info = {\n        patient: data\n        for patient, data in read_csv(str(src_data_folder / csv_file)).items()\n        if patient in patients_train\n    }\n\n    # Get train and validation patients for both vendors\n    patients_a = [patient for patient, patient_data in patient_info.items() if patient_data[\"vendor\"] == \"A\"]\n    patients_b = [patient for patient, patient_data in patient_info.items() if patient_data[\"vendor\"] == \"B\"]\n    train_a, val_a = get_vendor_split(patients_a, num_val_patients)\n    train_b, val_b = get_vendor_split(patients_b, num_val_patients)\n\n    # Build filenames from corresponding patient frames\n    train_a = [f\"{patient}_frame{patient_info[patient][frame]:02d}\" for patient in train_a for frame in [\"es\", \"ed\"]]\n    train_b = [f\"{patient}_frame{patient_info[patient][frame]:02d}\" for patient in train_b for frame in [\"es\", \"ed\"]]\n    train_a_mix_1, train_a_mix_2 = train_a[: len(train_a) // 2], train_a[len(train_a) // 2 :]\n    train_b_mix_1, train_b_mix_2 = train_b[: len(train_b) // 2], train_b[len(train_b) // 2 :]\n    val_a = [f\"{patient}_frame{patient_info[patient][frame]:02d}\" for patient in val_a for frame in [\"es\", \"ed\"]]\n    val_b = [f\"{patient}_frame{patient_info[patient][frame]:02d}\" for patient in val_b for frame in [\"es\", \"ed\"]]\n\n    for train_set in [train_a, train_b, train_a_mix_1 + train_b_mix_1, train_a_mix_2 + train_b_mix_2]:\n        # For each train set, we evaluate on A, B and (A + B) respectively\n        # See table 3 from the original paper for more details.\n        splits.append({\"train\": train_set, \"val\": val_a})\n        splits.append({\"train\": train_set, \"val\": val_b})\n        splits.append({\"train\": train_set, \"val\": val_a + val_b})\n\n    save_json(splits, existing_splits)\n\n\ndef get_vendor_split(patients: list[str], num_val_patients: int):\n    random.shuffle(patients)\n    total_patients = len(patients)\n    num_training_patients = total_patients - num_val_patients\n    return patients[:num_training_patients], patients[num_training_patients:]\n\n\nif __name__ == \"__main__\":\n    import argparse\n\n    class RawTextArgumentDefaultsHelpFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawTextHelpFormatter):\n        pass\n\n    parser = argparse.ArgumentParser(add_help=False, formatter_class=RawTextArgumentDefaultsHelpFormatter)\n    parser.add_argument(\n        \"-h\",\n        \"--help\",\n        action=\"help\",\n        default=argparse.SUPPRESS,\n        help=\"MNMs conversion utility helper. This script can be used to convert MNMs data into the expected nnUNet \"\n        \"format. It can also be used to create additional custom splits, for explicitly training on combinations \"\n        \"of vendors A and B (see `--custom-splits`).\\n\"\n        \"If you wish to generate the custom splits, run the following pipeline:\\n\\n\"\n        \"(1) Run `Dataset114_MNMs -i <raw_Data_dir>\\n\"\n        \"(2) Run `nnUNetv2_plan_and_preprocess -d 114 --verify_dataset_integrity`\\n\"\n        \"(3) Start training, but stop after initial splits are created: `nnUNetv2_train 114 2d 0`\\n\"\n        \"(4) Re-run `Dataset114_MNMs`, with `-s True`.\\n\"\n        \"(5) Re-run training.\\n\",\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--input_folder\",\n        type=str,\n        default=\"./data/M&Ms/OpenDataset/\",\n        help=\"The downloaded MNMs dataset dir. Should contain a csv file, as well as Training, Validation and Testing \"\n        \"folders.\",\n    )\n    parser.add_argument(\n        \"-c\",\n        \"--csv_file_name\",\n        type=str,\n        default=\"211230_M&Ms_Dataset_information_diagnosis_opendataset.csv\",\n        help=\"The csv file containing the dataset information.\",\n    ),\n    parser.add_argument(\"-d\", \"--dataset_id\", type=int, default=114, help=\"nnUNet Dataset ID.\")\n    parser.add_argument(\n        \"-s\",\n        \"--custom_splits\",\n        type=bool,\n        default=False,\n        help=\"Whether to append custom splits for training and testing on different vendors. If True, will create \"\n        \"splits for training on patients from vendors A, B or a mix of A and B. Splits are tested on a hold-out \"\n        \"validation sets of patients from A, B or A and B combined. See section 2.4 and table 3 from \"\n        \"https://arxiv.org/abs/2011.07592 for more info.\",\n    )\n\n    args = parser.parse_args()\n    args.input_folder = Path(args.input_folder)\n\n    if args.custom_splits:\n        print(\"Appending custom splits...\")\n        create_custom_splits(args.input_folder, args.csv_file_name, args.dataset_id)\n    else:\n        print(\"Converting...\")\n        convert_mnms(args.input_folder, args.csv_file_name, args.dataset_id)\n\n    print(\"Done!\")\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset115_EMIDEC.py",
    "content": "import shutil\nfrom pathlib import Path\n\nfrom nnunetv2.dataset_conversion.Dataset027_ACDC import make_out_dirs\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\n\n\ndef copy_files(src_data_dir: Path, src_test_dir: Path, train_dir: Path, labels_dir: Path, test_dir: Path):\n    \"\"\"Copy files from the EMIDEC dataset to the nnUNet dataset folder. Returns the number of training cases.\"\"\"\n    patients_train = sorted([f for f in src_data_dir.iterdir() if f.is_dir()])\n    patients_test = sorted([f for f in src_test_dir.iterdir() if f.is_dir()])\n\n    # Copy training files and corresponding labels.\n    for patient in patients_train:\n        train_file = patient / \"Images\" / f\"{patient.name}.nii.gz\"\n        label_file = patient / \"Contours\" / f\"{patient.name}.nii.gz\"\n        shutil.copy(train_file, train_dir / f\"{train_file.stem.split('.')[0]}_0000.nii.gz\")\n        shutil.copy(label_file, labels_dir)\n\n    # Copy test files.\n    for patient in patients_test:\n        test_file = patient / \"Images\" / f\"{patient.name}.nii.gz\"\n        shutil.copy(test_file, test_dir / f\"{test_file.stem.split('.')[0]}_0000.nii.gz\")\n\n    return len(patients_train)\n\n\ndef convert_emidec(src_data_dir: str, src_test_dir: str, dataset_id=27):\n    out_dir, train_dir, labels_dir, test_dir = make_out_dirs(dataset_id=dataset_id, task_name=\"EMIDEC\")\n    num_training_cases = copy_files(Path(src_data_dir), Path(src_test_dir), train_dir, labels_dir, test_dir)\n\n    generate_dataset_json(\n        str(out_dir),\n        channel_names={\n            0: \"cineMRI\",\n        },\n        labels={\n            \"background\": 0,\n            \"cavity\": 1,\n            \"normal_myocardium\": 2,\n            \"myocardial_infarction\": 3,\n            \"no_reflow\": 4,\n        },\n        file_ending=\".nii.gz\",\n        num_training_cases=num_training_cases,\n    )\n\n\nif __name__ == \"__main__\":\n    import argparse\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"-i\", \"--input_dir\", type=str, help=\"The EMIDEC dataset directory.\")\n    parser.add_argument(\"-t\", \"--test_dir\", type=str, help=\"The EMIDEC test set directory.\")\n    parser.add_argument(\n        \"-d\", \"--dataset_id\", required=False, type=int, default=115, help=\"nnU-Net Dataset ID, default: 115\"\n    )\n    args = parser.parse_args()\n    print(\"Converting...\")\n    convert_emidec(args.input_dir, args.test_dir, args.dataset_id)\n    print(\"Done!\")\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py",
    "content": "import multiprocessing\nimport shutil\nfrom multiprocessing import Pool\n\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_raw\nfrom skimage import io\nfrom acvl_utils.morphology.morphology_helper import generic_filter_components\nfrom scipy.ndimage import binary_fill_holes\n\n\ndef load_and_covnert_case(input_image: str, input_seg: str, output_image: str, output_seg: str,\n                          min_component_size: int = 50):\n    seg = io.imread(input_seg)\n    seg[seg == 255] = 1\n    image = io.imread(input_image)\n    image = image.sum(2)\n    mask = image == (3 * 255)\n    # the dataset has large white areas in which road segmentations can exist but no image information is available.\n    # Remove the road label in these areas\n    mask = generic_filter_components(mask, filter_fn=lambda ids, sizes: [i for j, i in enumerate(ids) if\n                                                                         sizes[j] > min_component_size])\n    mask = binary_fill_holes(mask)\n    seg[mask] = 0\n    io.imsave(output_seg, seg, check_contrast=False)\n    shutil.copy(input_image, output_image)\n\n\nif __name__ == \"__main__\":\n    # extracted archive from https://www.kaggle.com/datasets/insaff/massachusetts-roads-dataset?resource=download\n    source = '/media/fabian/data/raw_datasets/Massachussetts_road_seg/road_segmentation_ideal'\n\n    dataset_name = 'Dataset120_RoadSegmentation'\n\n    imagestr = join(nnUNet_raw, dataset_name, 'imagesTr')\n    imagests = join(nnUNet_raw, dataset_name, 'imagesTs')\n    labelstr = join(nnUNet_raw, dataset_name, 'labelsTr')\n    labelsts = join(nnUNet_raw, dataset_name, 'labelsTs')\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(imagests)\n    maybe_mkdir_p(labelstr)\n    maybe_mkdir_p(labelsts)\n\n    train_source = join(source, 'training')\n    test_source = join(source, 'testing')\n\n    with multiprocessing.get_context(\"spawn\").Pool(8) as p:\n\n        # not all training images have a segmentation\n        valid_ids = subfiles(join(train_source, 'output'), join=False, suffix='png')\n        num_train = len(valid_ids)\n        r = []\n        for v in valid_ids:\n            r.append(\n                p.starmap_async(\n                    load_and_covnert_case,\n                    ((\n                         join(train_source, 'input', v),\n                         join(train_source, 'output', v),\n                         join(imagestr, v[:-4] + '_0000.png'),\n                         join(labelstr, v),\n                         50\n                     ),)\n                )\n            )\n\n        # test set\n        valid_ids = subfiles(join(test_source, 'output'), join=False, suffix='png')\n        for v in valid_ids:\n            r.append(\n                p.starmap_async(\n                    load_and_covnert_case,\n                    ((\n                         join(test_source, 'input', v),\n                         join(test_source, 'output', v),\n                         join(imagests, v[:-4] + '_0000.png'),\n                         join(labelsts, v),\n                         50\n                     ),)\n                )\n            )\n        _ = [i.get() for i in r]\n\n    generate_dataset_json(join(nnUNet_raw, dataset_name), {0: 'R', 1: 'G', 2: 'B'}, {'background': 0, 'road': 1},\n                          num_train, '.png', dataset_name=dataset_name)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset137_BraTS21.py",
    "content": "import multiprocessing\nimport shutil\nfrom multiprocessing import Pool\n\nimport SimpleITK as sitk\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_raw\n\n\ndef copy_BraTS_segmentation_and_convert_labels_to_nnUNet(in_file: str, out_file: str) -> None:\n    # use this for segmentation only!!!\n    # nnUNet wants the labels to be continuous. BraTS is 0, 1, 2, 4 -> we make that into 0, 1, 2, 3\n    img = sitk.ReadImage(in_file)\n    img_npy = sitk.GetArrayFromImage(img)\n\n    uniques = np.unique(img_npy)\n    for u in uniques:\n        if u not in [0, 1, 2, 4]:\n            raise RuntimeError('unexpected label')\n\n    seg_new = np.zeros_like(img_npy)\n    seg_new[img_npy == 4] = 3\n    seg_new[img_npy == 2] = 1\n    seg_new[img_npy == 1] = 2\n    img_corr = sitk.GetImageFromArray(seg_new)\n    img_corr.CopyInformation(img)\n    sitk.WriteImage(img_corr, out_file)\n\n\ndef convert_labels_back_to_BraTS(seg: np.ndarray):\n    new_seg = np.zeros_like(seg)\n    new_seg[seg == 1] = 2\n    new_seg[seg == 3] = 4\n    new_seg[seg == 2] = 1\n    return new_seg\n\n\ndef load_convert_labels_back_to_BraTS(filename, input_folder, output_folder):\n    a = sitk.ReadImage(join(input_folder, filename))\n    b = sitk.GetArrayFromImage(a)\n    c = convert_labels_back_to_BraTS(b)\n    d = sitk.GetImageFromArray(c)\n    d.CopyInformation(a)\n    sitk.WriteImage(d, join(output_folder, filename))\n\n\ndef convert_folder_with_preds_back_to_BraTS_labeling_convention(input_folder: str, output_folder: str, num_processes: int = 12):\n    \"\"\"\n    reads all prediction files (nifti) in the input folder, converts the labels back to BraTS convention and saves the\n    \"\"\"\n    maybe_mkdir_p(output_folder)\n    nii = subfiles(input_folder, suffix='.nii.gz', join=False)\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        p.starmap(load_convert_labels_back_to_BraTS, zip(nii, [input_folder] * len(nii), [output_folder] * len(nii)))\n\n\nif __name__ == '__main__':\n    # brats_data_dir = '/home/isensee/drives/E132-Rohdaten/BraTS_2021/training'\n    brats_data_dir = \"/data/nnUNet_raw_data/original/\"\n\n    task_id = 137\n    task_name = \"BraTS2021\"\n\n    foldername = \"Dataset%03.0d_%s\" % (task_id, task_name)\n\n    # setting up nnU-Net folders\n    out_base = join(nnUNet_raw, foldername)\n    imagestr = join(out_base, \"imagesTr\")\n    labelstr = join(out_base, \"labelsTr\")\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(labelstr)\n\n    case_ids = subdirs(brats_data_dir, prefix='BraTS', join=False)\n\n    for c in case_ids:\n        shutil.copy(join(brats_data_dir, c, c + \"_t1.nii.gz\"), join(imagestr, c + '_0000.nii.gz'))\n        shutil.copy(join(brats_data_dir, c, c + \"_t1ce.nii.gz\"), join(imagestr, c + '_0001.nii.gz'))\n        shutil.copy(join(brats_data_dir, c, c + \"_t2.nii.gz\"), join(imagestr, c + '_0002.nii.gz'))\n        shutil.copy(join(brats_data_dir, c, c + \"_flair.nii.gz\"), join(imagestr, c + '_0003.nii.gz'))\n\n        copy_BraTS_segmentation_and_convert_labels_to_nnUNet(join(brats_data_dir, c, c + \"_seg.nii.gz\"),\n                                                             join(labelstr, c + '.nii.gz'))\n\n    generate_dataset_json(out_base,\n                          channel_names={0: 'T1', 1: 'T1ce', 2: 'T2', 3: 'Flair'},\n                          labels={\n                              'background': 0,\n                              'whole tumor': (1, 2, 3),\n                              'tumor core': (2, 3),\n                              'enhancing tumor': (3, )\n                          },\n                          num_training_cases=len(case_ids),\n                          file_ending='.nii.gz',\n                          regions_class_order=(1, 2, 3),\n                          license='see https://www.synapse.org/#!Synapse:syn25829067/wiki/610863',\n                          reference='see https://www.synapse.org/#!Synapse:syn25829067/wiki/610863',\n                          dataset_release='1.0')\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset218_Amos2022_task1.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import *\nimport shutil\nfrom generate_dataset_json import generate_dataset_json\n# from nnunetv2.paths import nnUNet_raw\nnnUNet_raw = '/data/linshan/nnunet_data/nnUNet_raw'\n\ndef convert_amos_task1(amos_base_dir: str, nnunet_dataset_id: int = 218):\n    \"\"\"\n    AMOS doesn't say anything about how the validation set is supposed to be used. So we just incorporate that into\n    the train set. Having a 5-fold cross-validation is superior to a single train:val split\n    \"\"\"\n    task_name = \"AMOS2022_postChallenge_task1\"\n\n    foldername = \"Dataset%03.0d_%s\" % (nnunet_dataset_id, task_name)\n\n    # setting up nnU-Net folders\n    out_base = join(nnUNet_raw, foldername)\n    imagestr = join(out_base, \"imagesTr\")\n    imagests = join(out_base, \"imagesTs\")\n    labelstr = join(out_base, \"labelsTr\")\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(imagests)\n    maybe_mkdir_p(labelstr)\n\n    dataset_json_source = load_json(join(amos_base_dir, 'dataset.json'))\n\n    training_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['training']]\n    tr_ctr = 0\n    for tr in training_identifiers:\n        if int(tr.split(\"_\")[-1]) <= 410: # these are the CT images\n            tr_ctr += 1\n            shutil.copy(join(amos_base_dir, 'imagesTr', tr + '.nii.gz'), join(imagestr, f'{tr}_0000.nii.gz'))\n            shutil.copy(join(amos_base_dir, 'labelsTr', tr + '.nii.gz'), join(labelstr, f'{tr}.nii.gz'))\n\n    test_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['test']]\n    for ts in test_identifiers:\n        if int(ts.split(\"_\")[-1]) <= 500: # these are the CT images\n            shutil.copy(join(amos_base_dir, 'imagesTs', ts + '.nii.gz'), join(imagests, f'{ts}_0000.nii.gz'))\n\n    val_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['validation']]\n    for vl in val_identifiers:\n        if int(vl.split(\"_\")[-1]) <= 409: # these are the CT images\n            tr_ctr += 1\n            shutil.copy(join(amos_base_dir, 'imagesVa', vl + '.nii.gz'), join(imagestr, f'{vl}_0000.nii.gz'))\n            shutil.copy(join(amos_base_dir, 'labelsVa', vl + '.nii.gz'), join(labelstr, f'{vl}.nii.gz'))\n\n    generate_dataset_json(out_base, {0: \"CT\"}, labels={v: int(k) for k,v in dataset_json_source['labels'].items()},\n                          num_training_cases=tr_ctr, file_ending='.nii.gz',\n                          dataset_name=task_name, reference='https://amos22.grand-challenge.org/',\n                          release='https://zenodo.org/record/7262581',\n                          overwrite_image_reader_writer='NibabelIOWithReorient',\n                          description=\"This is the dataset as released AFTER the challenge event. It has the \"\n                                      \"validation set gt in it! We just use the validation images as additional \"\n                                      \"training cases because AMOS doesn't specify how they should be used. nnU-Net's\"\n                                      \" 5-fold CV is better than some random train:val split.\")\n\n\nif __name__ == '__main__':\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('input_folder', type=str, default='/data/linshan/CTs/Amos2022/',  \n                        help=\"The downloaded and extracted AMOS2022 (https://amos22.grand-challenge.org/) data. \"\n                             \"Use this link: https://zenodo.org/record/7262581.\"\n                             \"You need to specify the folder with the imagesTr, imagesVal, labelsTr etc subfolders here!\")\n    parser.add_argument('-d', required=False, type=int, default=218, help='nnU-Net Dataset ID, default: 218')\n    args = parser.parse_args()\n    amos_base = args.input_folder\n    convert_amos_task1(amos_base, args.d)\n\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset219_Amos2022_task2.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import *\nimport shutil\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_raw\n\n\ndef convert_amos_task2(amos_base_dir: str, nnunet_dataset_id: int = 219):\n    \"\"\"\n    AMOS doesn't say anything about how the validation set is supposed to be used. So we just incorporate that into\n    the train set. Having a 5-fold cross-validation is superior to a single train:val split\n    \"\"\"\n    task_name = \"AMOS2022_postChallenge_task2\"\n\n    foldername = \"Dataset%03.0d_%s\" % (nnunet_dataset_id, task_name)\n\n    # setting up nnU-Net folders\n    out_base = join(nnUNet_raw, foldername)\n    imagestr = join(out_base, \"imagesTr\")\n    imagests = join(out_base, \"imagesTs\")\n    labelstr = join(out_base, \"labelsTr\")\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(imagests)\n    maybe_mkdir_p(labelstr)\n\n    dataset_json_source = load_json(join(amos_base_dir, 'dataset.json'))\n\n    training_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['training']]\n    for tr in training_identifiers:\n        shutil.copy(join(amos_base_dir, 'imagesTr', tr + '.nii.gz'), join(imagestr, f'{tr}_0000.nii.gz'))\n        shutil.copy(join(amos_base_dir, 'labelsTr', tr + '.nii.gz'), join(labelstr, f'{tr}.nii.gz'))\n\n    test_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['test']]\n    for ts in test_identifiers:\n        shutil.copy(join(amos_base_dir, 'imagesTs', ts + '.nii.gz'), join(imagests, f'{ts}_0000.nii.gz'))\n\n    val_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['validation']]\n    for vl in val_identifiers:\n        shutil.copy(join(amos_base_dir, 'imagesVa', vl + '.nii.gz'), join(imagestr, f'{vl}_0000.nii.gz'))\n        shutil.copy(join(amos_base_dir, 'labelsVa', vl + '.nii.gz'), join(labelstr, f'{vl}.nii.gz'))\n\n    generate_dataset_json(out_base, {0: \"either_CT_or_MR\"}, labels={v: int(k) for k,v in dataset_json_source['labels'].items()},\n                          num_training_cases=len(training_identifiers) + len(val_identifiers), file_ending='.nii.gz',\n                          dataset_name=task_name, reference='https://amos22.grand-challenge.org/',\n                          release='https://zenodo.org/record/7262581',\n                          overwrite_image_reader_writer='NibabelIOWithReorient',\n                          description=\"This is the dataset as released AFTER the challenge event. It has the \"\n                                      \"validation set gt in it! We just use the validation images as additional \"\n                                      \"training cases because AMOS doesn't specify how they should be used. nnU-Net's\"\n                                      \" 5-fold CV is better than some random train:val split.\")\n\n\nif __name__ == '__main__':\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('input_folder', type=str,\n                        help=\"The downloaded and extracted AMOS2022 (https://amos22.grand-challenge.org/) data. \"\n                             \"Use this link: https://zenodo.org/record/7262581.\"\n                             \"You need to specify the folder with the imagesTr, imagesVal, labelsTr etc subfolders here!\")\n    parser.add_argument('-d', required=False, type=int, default=219, help='nnU-Net Dataset ID, default: 219')\n    args = parser.parse_args()\n    amos_base = args.input_folder\n    convert_amos_task2(amos_base, args.d)\n\n    # /home/isensee/Downloads/amos22/amos22/\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset220_KiTS2023.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import *\nimport shutil\nfrom generate_dataset_json import generate_dataset_json\n#from nnunetv2.paths import nnUNet_raw\nnnUNet_raw = '/data/linshan/nnunet_data/nnUNet_raw'\n\ndef convert_kits2023(kits_base_dir: str, nnunet_dataset_id: int = 220):\n    task_name = \"KiTS2023\"\n\n    foldername = \"Dataset%03.0d_%s\" % (nnunet_dataset_id, task_name)\n\n    # setting up nnU-Net folders\n    out_base = join(nnUNet_raw, foldername)\n    imagestr = join(out_base, \"imagesTr\")\n    labelstr = join(out_base, \"labelsTr\")\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(labelstr)\n\n    cases = subdirs(kits_base_dir, prefix='case_', join=False)\n    for tr in cases:\n        shutil.copy(join(kits_base_dir, tr, 'imaging.nii.gz'), join(imagestr, f'{tr}_0000.nii.gz'))\n        shutil.copy(join(kits_base_dir, tr, 'segmentation.nii.gz'), join(labelstr, f'{tr}.nii.gz'))\n\n    generate_dataset_json(out_base, {0: \"CT\"},\n                          labels={\n                              \"background\": 0,\n                              \"kidney\": (1, 2, 3),\n                              \"masses\": (2, 3),\n                              \"tumor\": 2\n                          },\n                          regions_class_order=(1, 3, 2),\n                          num_training_cases=len(cases), file_ending='.nii.gz',\n                          dataset_name=task_name, reference='none',\n                          release='prerelease',\n                          overwrite_image_reader_writer='NibabelIOWithReorient',\n                          description=\"KiTS2023\")\n\n\nif __name__ == '__main__':\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('input_folder', type=str,\n                        help=\"The downloaded and extracted KiTS2023 dataset (must have case_XXXXX subfolders)\")\n    parser.add_argument('-d', required=False, type=int, default=220, help='nnU-Net Dataset ID, default: 220')\n    args = parser.parse_args()\n    amos_base = args.input_folder\n    convert_kits2023(amos_base, args.d)\n\n    # /media/isensee/raw_data/raw_datasets/kits23/dataset\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset221_AutoPETII_2023.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import *\nimport shutil\nfrom nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\n\n\ndef convert_autopet(autopet_base_dir:str = '/media/isensee/My Book1/AutoPET/nifti/FDG-PET-CT-Lesions',\n                     nnunet_dataset_id: int = 221):\n    task_name = \"AutoPETII_2023\"\n\n    foldername = \"Dataset%03.0d_%s\" % (nnunet_dataset_id, task_name)\n\n    # setting up nnU-Net folders\n    out_base = join(nnUNet_raw, foldername)\n    imagestr = join(out_base, \"imagesTr\")\n    labelstr = join(out_base, \"labelsTr\")\n    maybe_mkdir_p(imagestr)\n    maybe_mkdir_p(labelstr)\n\n    patients = subdirs(autopet_base_dir, prefix='PETCT', join=False)\n    n = 0\n    identifiers = []\n    for pat in patients:\n        patient_acquisitions = subdirs(join(autopet_base_dir, pat), join=False)\n        for pa in patient_acquisitions:\n            n += 1\n            identifier = f\"{pat}_{pa}\"\n            identifiers.append(identifier)\n            if not isfile(join(imagestr, f'{identifier}_0000.nii.gz')):\n                shutil.copy(join(autopet_base_dir, pat, pa, 'CTres.nii.gz'), join(imagestr, f'{identifier}_0000.nii.gz'))\n            if not isfile(join(imagestr, f'{identifier}_0001.nii.gz')):\n                shutil.copy(join(autopet_base_dir, pat, pa, 'SUV.nii.gz'), join(imagestr, f'{identifier}_0001.nii.gz'))\n            if not isfile(join(imagestr, f'{identifier}.nii.gz')):\n                shutil.copy(join(autopet_base_dir, pat, pa, 'SEG.nii.gz'), join(labelstr, f'{identifier}.nii.gz'))\n\n    generate_dataset_json(out_base, {0: \"CT\", 1:\"CT\"},\n                          labels={\n                              \"background\": 0,\n                              \"tumor\": 1\n                          },\n                          num_training_cases=n, file_ending='.nii.gz',\n                          dataset_name=task_name, reference='https://autopet-ii.grand-challenge.org/',\n                          release='release',\n                          # overwrite_image_reader_writer='NibabelIOWithReorient',\n                          description=task_name)\n\n    # manual split\n    splits = []\n    for fold in range(5):\n        val_patients = patients[fold :: 5]\n        splits.append(\n            {\n                'train': [i for i in identifiers if not any([i.startswith(v) for v in val_patients])],\n                'val': [i for i in identifiers if any([i.startswith(v) for v in val_patients])],\n            }\n        )\n    pp_out_dir = join(nnUNet_preprocessed, foldername)\n    maybe_mkdir_p(pp_out_dir)\n    save_json(splits, join(pp_out_dir, 'splits_final.json'), sort_keys=False)\n\n\nif __name__ == '__main__':\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('input_folder', type=str,\n                        help=\"The downloaded and extracted autopet dataset (must have PETCT_XXX subfolders)\")\n    parser.add_argument('-d', required=False, type=int, default=221, help='nnU-Net Dataset ID, default: 221')\n    args = parser.parse_args()\n    amos_base = args.input_folder\n    convert_autopet(amos_base, args.d)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset988_dummyDataset4.py",
    "content": "import os\n\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.paths import nnUNet_raw\nfrom nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets\n\nif __name__ == '__main__':\n    # creates a dummy dataset where there are no files in imagestr and labelstr\n    source_dataset = 'Dataset004_Hippocampus'\n\n    target_dataset = 'Dataset987_dummyDataset4'\n    target_dataset_dir = join(nnUNet_raw, target_dataset)\n    maybe_mkdir_p(target_dataset_dir)\n\n    dataset = get_filenames_of_train_images_and_targets(join(nnUNet_raw, source_dataset))\n\n    # the returned dataset will have absolute paths. We should use relative paths so that you can freely copy\n    # datasets around between systems. As long as the source dataset is there it will continue working even if\n    # nnUNet_raw is in different locations\n\n    # paths must be relative to target_dataset_dir!!!\n    for k in dataset.keys():\n        dataset[k]['label'] = os.path.relpath(dataset[k]['label'], target_dataset_dir)\n        dataset[k]['images'] = [os.path.relpath(i, target_dataset_dir) for i in dataset[k]['images']]\n\n    # load old dataset.json\n    dataset_json = load_json(join(nnUNet_raw, source_dataset, 'dataset.json'))\n    dataset_json['dataset'] = dataset\n\n    # save\n    save_json(dataset_json, join(target_dataset_dir, 'dataset.json'), sort_keys=False)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/convert_MSD_dataset.py",
    "content": "import argparse\nimport multiprocessing\nimport shutil\nfrom multiprocessing import Pool\nfrom typing import Optional\nimport SimpleITK as sitk\nfrom batchgenerators.utilities.file_and_folder_operations import *\nfrom nnunetv2.paths import nnUNet_raw\nfrom nnunetv2.utilities.dataset_name_id_conversion import find_candidate_datasets\nfrom nnunetv2.configuration import default_num_processes\nimport numpy as np\n\n\ndef split_4d_nifti(filename, output_folder):\n    img_itk = sitk.ReadImage(filename)\n    dim = img_itk.GetDimension()\n    file_base = os.path.basename(filename)\n    if dim == 3:\n        shutil.copy(filename, join(output_folder, file_base[:-7] + \"_0000.nii.gz\"))\n        return\n    elif dim != 4:\n        raise RuntimeError(\"Unexpected dimensionality: %d of file %s, cannot split\" % (dim, filename))\n    else:\n        img_npy = sitk.GetArrayFromImage(img_itk)\n        spacing = img_itk.GetSpacing()\n        origin = img_itk.GetOrigin()\n        direction = np.array(img_itk.GetDirection()).reshape(4,4)\n        # now modify these to remove the fourth dimension\n        spacing = tuple(list(spacing[:-1]))\n        origin = tuple(list(origin[:-1]))\n        direction = tuple(direction[:-1, :-1].reshape(-1))\n        for i, t in enumerate(range(img_npy.shape[0])):\n            img = img_npy[t]\n            img_itk_new = sitk.GetImageFromArray(img)\n            img_itk_new.SetSpacing(spacing)\n            img_itk_new.SetOrigin(origin)\n            img_itk_new.SetDirection(direction)\n            sitk.WriteImage(img_itk_new, join(output_folder, file_base[:-7] + \"_%04.0d.nii.gz\" % i))\n\n\ndef convert_msd_dataset(source_folder: str, overwrite_target_id: Optional[int] = None,\n                        num_processes: int = default_num_processes) -> None:\n    if source_folder.endswith('/') or source_folder.endswith('\\\\'):\n        source_folder = source_folder[:-1]\n\n    labelsTr = join(source_folder, 'labelsTr')\n    imagesTs = join(source_folder, 'imagesTs')\n    imagesTr = join(source_folder, 'imagesTr')\n    assert isdir(labelsTr), f\"labelsTr subfolder missing in source folder\"\n    assert isdir(imagesTs), f\"imagesTs subfolder missing in source folder\"\n    assert isdir(imagesTr), f\"imagesTr subfolder missing in source folder\"\n    dataset_json = join(source_folder, 'dataset.json')\n    assert isfile(dataset_json), f\"dataset.json missing in source_folder\"\n\n    # infer source dataset id and name\n    task, dataset_name = os.path.basename(source_folder).split('_')\n    task_id = int(task[4:])\n\n    # check if target dataset id is taken\n    target_id = task_id if overwrite_target_id is None else overwrite_target_id\n    existing_datasets = find_candidate_datasets(target_id)\n    assert len(existing_datasets) == 0, f\"Target dataset id {target_id} is already taken, please consider changing \" \\\n                                        f\"it using overwrite_target_id. Conflicting dataset: {existing_datasets} (check nnUNet_results, nnUNet_preprocessed and nnUNet_raw!)\"\n\n    target_dataset_name = f\"Dataset{target_id:03d}_{dataset_name}\"\n    target_folder = join(nnUNet_raw, target_dataset_name)\n    target_imagesTr = join(target_folder, 'imagesTr')\n    target_imagesTs = join(target_folder, 'imagesTs')\n    target_labelsTr = join(target_folder, 'labelsTr')\n    maybe_mkdir_p(target_imagesTr)\n    maybe_mkdir_p(target_imagesTs)\n    maybe_mkdir_p(target_labelsTr)\n\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        results = []\n\n        # convert 4d train images\n        source_images = [i for i in subfiles(imagesTr, suffix='.nii.gz', join=False) if\n                         not i.startswith('.') and not i.startswith('_')]\n        source_images = [join(imagesTr, i) for i in source_images]\n\n        results.append(\n            p.starmap_async(\n                split_4d_nifti, zip(source_images, [target_imagesTr] * len(source_images))\n            )\n        )\n\n        # convert 4d test images\n        source_images = [i for i in subfiles(imagesTs, suffix='.nii.gz', join=False) if\n                         not i.startswith('.') and not i.startswith('_')]\n        source_images = [join(imagesTs, i) for i in source_images]\n\n        results.append(\n            p.starmap_async(\n                split_4d_nifti, zip(source_images, [target_imagesTs] * len(source_images))\n            )\n        )\n\n        # copy segmentations\n        source_images = [i for i in subfiles(labelsTr, suffix='.nii.gz', join=False) if\n                         not i.startswith('.') and not i.startswith('_')]\n        for s in source_images:\n            shutil.copy(join(labelsTr, s), join(target_labelsTr, s))\n\n        [i.get() for i in results]\n\n    dataset_json = load_json(dataset_json)\n    dataset_json['labels'] = {j: int(i) for i, j in dataset_json['labels'].items()}\n    dataset_json['file_ending'] = \".nii.gz\"\n    dataset_json[\"channel_names\"] = dataset_json[\"modality\"]\n    del dataset_json[\"modality\"]\n    del dataset_json[\"training\"]\n    del dataset_json[\"test\"]\n    save_json(dataset_json, join(nnUNet_raw, target_dataset_name, 'dataset.json'), sort_keys=False)\n\n\ndef entry_point():\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-i', type=str, required=True,\n                        help='Downloaded and extracted MSD dataset folder. CANNOT be nnUNetv1 dataset! Example: '\n                             '/home/fabian/Downloads/Task05_Prostate')\n    parser.add_argument('-overwrite_id', type=int, required=False, default=None,\n                        help='Overwrite the dataset id. If not set we use the id of the MSD task (inferred from '\n                             'folder name). Only use this if you already have an equivalently numbered dataset!')\n    parser.add_argument('-np', type=int, required=False, default=default_num_processes,\n                        help=f'Number of processes used. Default: {default_num_processes}')\n    args = parser.parse_args()\n    convert_msd_dataset(args.i, args.overwrite_id, args.np)\n\n\nif __name__ == '__main__':\n    convert_msd_dataset('/data/jiaxin/data/10_Decathlon/Task01_BrainTumour', overwrite_target_id=201)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/convert_raw_dataset_from_old_nnunet_format.py",
    "content": "import shutil\nfrom copy import deepcopy\n\nfrom batchgenerators.utilities.file_and_folder_operations import join, maybe_mkdir_p, isdir, load_json, save_json\nfrom nnunetv2.paths import nnUNet_raw\n\n\ndef convert(source_folder, target_dataset_name):\n    \"\"\"\n    remember that old tasks were called TaskXXX_YYY and new ones are called DatasetXXX_YYY\n    source_folder\n    \"\"\"\n    if isdir(join(nnUNet_raw, target_dataset_name)):\n        raise RuntimeError(f'Target dataset name {target_dataset_name} already exists. Aborting... '\n                           f'(we might break something). If you are sure you want to proceed, please manually '\n                           f'delete {join(nnUNet_raw, target_dataset_name)}')\n    maybe_mkdir_p(join(nnUNet_raw, target_dataset_name))\n    shutil.copytree(join(source_folder, 'imagesTr'), join(nnUNet_raw, target_dataset_name, 'imagesTr'))\n    shutil.copytree(join(source_folder, 'labelsTr'), join(nnUNet_raw, target_dataset_name, 'labelsTr'))\n    if isdir(join(source_folder, 'imagesTs')):\n        shutil.copytree(join(source_folder, 'imagesTs'), join(nnUNet_raw, target_dataset_name, 'imagesTs'))\n    if isdir(join(source_folder, 'labelsTs')):\n        shutil.copytree(join(source_folder, 'labelsTs'), join(nnUNet_raw, target_dataset_name, 'labelsTs'))\n    if isdir(join(source_folder, 'imagesVal')):\n        shutil.copytree(join(source_folder, 'imagesVal'), join(nnUNet_raw, target_dataset_name, 'imagesVal'))\n    if isdir(join(source_folder, 'labelsVal')):\n        shutil.copytree(join(source_folder, 'labelsVal'), join(nnUNet_raw, target_dataset_name, 'labelsVal'))\n    shutil.copy(join(source_folder, 'dataset.json'), join(nnUNet_raw, target_dataset_name))\n\n    dataset_json = load_json(join(nnUNet_raw, target_dataset_name, 'dataset.json'))\n    del dataset_json['tensorImageSize']\n    del dataset_json['numTest']\n    del dataset_json['training']\n    del dataset_json['test']\n    dataset_json['channel_names'] = deepcopy(dataset_json['modality'])\n    del dataset_json['modality']\n\n    dataset_json['labels'] = {j: int(i) for i, j in dataset_json['labels'].items()}\n    dataset_json['file_ending'] = \".nii.gz\"\n    save_json(dataset_json, join(nnUNet_raw, target_dataset_name, 'dataset.json'), sort_keys=False)\n\n\ndef convert_entry_point():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"input_folder\", type=str,\n                        help='Raw old nnUNet dataset. This must be the folder with imagesTr,labelsTr etc subfolders! '\n                             'Please provide the PATH to the old Task, not just the task name. nnU-Net V2 does not '\n                             'know where v1 tasks are.')\n    parser.add_argument(\"output_dataset_name\", type=str,\n                        help='New dataset NAME (not path!). Must follow the DatasetXXX_NAME convention!')\n    args = parser.parse_args()\n    convert(args.input_folder, args.output_dataset_name)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset996_IntegrationTest_Hippocampus_regions_ignore.py",
    "content": "import SimpleITK as sitk\nimport shutil\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import isdir, join, load_json, save_json, nifti_files\n\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.paths import nnUNet_raw\nfrom nnunetv2.utilities.label_handling.label_handling import LabelManager\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\n\n\ndef sparsify_segmentation(seg: np.ndarray, label_manager: LabelManager, percent_of_slices: float) -> np.ndarray:\n        assert label_manager.has_ignore_label, \"This preprocessor only works with datasets that have an ignore label!\"\n        seg_new = np.ones_like(seg) * label_manager.ignore_label\n        x, y, z = seg.shape\n        # x\n        num_slices = max(1, round(x * percent_of_slices))\n        selected_slices = np.random.choice(x, num_slices, replace=False)\n        seg_new[selected_slices] = seg[selected_slices]\n        # y\n        num_slices = max(1, round(y * percent_of_slices))\n        selected_slices = np.random.choice(y, num_slices, replace=False)\n        seg_new[:, selected_slices] = seg[:, selected_slices]\n        # z\n        num_slices = max(1, round(z * percent_of_slices))\n        selected_slices = np.random.choice(z, num_slices, replace=False)\n        seg_new[:, :, selected_slices] = seg[:, :, selected_slices]\n        return seg_new\n\n\nif __name__ == '__main__':\n    dataset_name = 'IntegrationTest_Hippocampus_regions_ignore'\n    dataset_id = 996\n    dataset_name = f\"Dataset{dataset_id:03d}_{dataset_name}\"\n\n    try:\n        existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)\n        if existing_dataset_name != dataset_name:\n            raise FileExistsError(f\"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If \"\n                               f\"you intent to delete it, remember to also remove it in nnUNet_preprocessed and \"\n                               f\"nnUNet_results!\")\n    except RuntimeError:\n        pass\n\n    if isdir(join(nnUNet_raw, dataset_name)):\n        shutil.rmtree(join(nnUNet_raw, dataset_name))\n\n    source_dataset = maybe_convert_to_dataset_name(4)\n    shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))\n\n    # additionally optimize entire hippocampus region, remove Posterior\n    dj = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))\n    dj['labels'] = {\n        'background': 0,\n        'hippocampus': (1, 2),\n        'anterior': 1,\n        'ignore': 3\n    }\n    dj['regions_class_order'] = (2, 1)\n    save_json(dj, join(nnUNet_raw, dataset_name, 'dataset.json'), sort_keys=False)\n\n    # now add ignore label to segmentation images\n    np.random.seed(1234)\n    lm = LabelManager(label_dict=dj['labels'], regions_class_order=dj.get('regions_class_order'))\n\n    segs = nifti_files(join(nnUNet_raw, dataset_name, 'labelsTr'))\n    for s in segs:\n        seg_itk = sitk.ReadImage(s)\n        seg_npy = sitk.GetArrayFromImage(seg_itk)\n        seg_npy = sparsify_segmentation(seg_npy, lm, 0.1 / 3)\n        seg_itk_new = sitk.GetImageFromArray(seg_npy)\n        seg_itk_new.CopyInformation(seg_itk)\n        sitk.WriteImage(seg_itk_new, s)\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset997_IntegrationTest_Hippocampus_regions.py",
    "content": "import shutil\n\nfrom batchgenerators.utilities.file_and_folder_operations import isdir, join, load_json, save_json\n\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.paths import nnUNet_raw\n\nif __name__ == '__main__':\n    dataset_name = 'IntegrationTest_Hippocampus_regions'\n    dataset_id = 997\n    dataset_name = f\"Dataset{dataset_id:03d}_{dataset_name}\"\n\n    try:\n        existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)\n        if existing_dataset_name != dataset_name:\n            raise FileExistsError(\n                f\"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If \"\n                f\"you intent to delete it, remember to also remove it in nnUNet_preprocessed and \"\n                f\"nnUNet_results!\")\n    except RuntimeError:\n        pass\n\n    if isdir(join(nnUNet_raw, dataset_name)):\n        shutil.rmtree(join(nnUNet_raw, dataset_name))\n\n    source_dataset = maybe_convert_to_dataset_name(4)\n    shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))\n\n    # additionally optimize entire hippocampus region, remove Posterior\n    dj = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))\n    dj['labels'] = {\n        'background': 0,\n        'hippocampus': (1, 2),\n        'anterior': 1\n    }\n    dj['regions_class_order'] = (2, 1)\n    save_json(dj, join(nnUNet_raw, dataset_name, 'dataset.json'), sort_keys=False)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset998_IntegrationTest_Hippocampus_ignore.py",
    "content": "import shutil\n\nfrom batchgenerators.utilities.file_and_folder_operations import isdir, join, load_json, save_json\n\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.paths import nnUNet_raw\n\n\nif __name__ == '__main__':\n    dataset_name = 'IntegrationTest_Hippocampus_ignore'\n    dataset_id = 998\n    dataset_name = f\"Dataset{dataset_id:03d}_{dataset_name}\"\n\n    try:\n        existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)\n        if existing_dataset_name != dataset_name:\n            raise FileExistsError(f\"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If \"\n                               f\"you intent to delete it, remember to also remove it in nnUNet_preprocessed and \"\n                               f\"nnUNet_results!\")\n    except RuntimeError:\n        pass\n\n    if isdir(join(nnUNet_raw, dataset_name)):\n        shutil.rmtree(join(nnUNet_raw, dataset_name))\n\n    source_dataset = maybe_convert_to_dataset_name(4)\n    shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))\n\n    # set class 2 to ignore label\n    dj = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))\n    dj['labels']['ignore'] = 2\n    del dj['labels']['Posterior']\n    save_json(dj, join(nnUNet_raw, dataset_name, 'dataset.json'), sort_keys=False)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset999_IntegrationTest_Hippocampus.py",
    "content": "import shutil\n\nfrom batchgenerators.utilities.file_and_folder_operations import isdir, join\n\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.paths import nnUNet_raw\n\n\nif __name__ == '__main__':\n    dataset_name = 'IntegrationTest_Hippocampus'\n    dataset_id = 999\n    dataset_name = f\"Dataset{dataset_id:03d}_{dataset_name}\"\n\n    try:\n        existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)\n        if existing_dataset_name != dataset_name:\n            raise FileExistsError(f\"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If \"\n                               f\"you intent to delete it, remember to also remove it in nnUNet_preprocessed and \"\n                               f\"nnUNet_results!\")\n    except RuntimeError:\n        pass\n\n    if isdir(join(nnUNet_raw, dataset_name)):\n        shutil.rmtree(join(nnUNet_raw, dataset_name))\n\n    source_dataset = maybe_convert_to_dataset_name(4)\n    shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/dataset_conversion/generate_dataset_json.py",
    "content": "from typing import Tuple\n\nfrom batchgenerators.utilities.file_and_folder_operations import save_json, join\n\n\ndef generate_dataset_json(output_folder: str,\n                          channel_names: dict,\n                          labels: dict,\n                          num_training_cases: int,\n                          file_ending: str,\n                          regions_class_order: Tuple[int, ...] = None,\n                          dataset_name: str = None, reference: str = None, release: str = None, license: str = None,\n                          description: str = None,\n                          overwrite_image_reader_writer: str = None, **kwargs):\n    \"\"\"\n    Generates a dataset.json file in the output folder\n\n    channel_names:\n        Channel names must map the index to the name of the channel, example:\n        {\n            0: 'T1',\n            1: 'CT'\n        }\n        Note that the channel names may influence the normalization scheme!! Learn more in the documentation.\n\n    labels:\n        This will tell nnU-Net what labels to expect. Important: This will also determine whether you use region-based training or not.\n        Example regular labels:\n        {\n            'background': 0,\n            'left atrium': 1,\n            'some other label': 2\n        }\n        Example region-based training:\n        {\n            'background': 0,\n            'whole tumor': (1, 2, 3),\n            'tumor core': (2, 3),\n            'enhancing tumor': 3\n        }\n\n        Remember that nnU-Net expects consecutive values for labels! nnU-Net also expects 0 to be background!\n\n    num_training_cases: is used to double check all cases are there!\n\n    file_ending: needed for finding the files correctly. IMPORTANT! File endings must match between images and\n    segmentations!\n\n    dataset_name, reference, release, license, description: self-explanatory and not used by nnU-Net. Just for\n    completeness and as a reminder that these would be great!\n\n    overwrite_image_reader_writer: If you need a special IO class for your dataset you can derive it from\n    BaseReaderWriter, place it into nnunet.imageio and reference it here by name\n\n    kwargs: whatever you put here will be placed in the dataset.json as well\n\n    \"\"\"\n    has_regions: bool = any([isinstance(i, (tuple, list)) and len(i) > 1 for i in labels.values()])\n    if has_regions:\n        assert regions_class_order is not None, f\"You have defined regions but regions_class_order is not set. \" \\\n                                                f\"You need that.\"\n    # channel names need strings as keys\n    keys = list(channel_names.keys())\n    for k in keys:\n        if not isinstance(k, str):\n            channel_names[str(k)] = channel_names[k]\n            del channel_names[k]\n\n    # labels need ints as values\n    for l in labels.keys():\n        value = labels[l]\n        if isinstance(value, (tuple, list)):\n            value = tuple([int(i) for i in value])\n            labels[l] = value\n        else:\n            labels[l] = int(labels[l])\n\n    dataset_json = {\n        'channel_names': channel_names,  # previously this was called 'modality'. I didn't like this so this is\n        # channel_names now. Live with it.\n        'labels': labels,\n        'numTraining': num_training_cases,\n        'file_ending': file_ending,\n    }\n\n    if dataset_name is not None:\n        dataset_json['name'] = dataset_name\n    if reference is not None:\n        dataset_json['reference'] = reference\n    if release is not None:\n        dataset_json['release'] = release\n    if license is not None:\n        dataset_json['licence'] = license\n    if description is not None:\n        dataset_json['description'] = description\n    if overwrite_image_reader_writer is not None:\n        dataset_json['overwrite_image_reader_writer'] = overwrite_image_reader_writer\n    if regions_class_order is not None:\n        dataset_json['regions_class_order'] = regions_class_order\n\n    dataset_json.update(kwargs)\n\n    save_json(dataset_json, join(output_folder, 'dataset.json'), sort_keys=False)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/ensembling/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/ensembling/ensemble.py",
    "content": "import argparse\nimport multiprocessing\nimport shutil\nfrom copy import deepcopy\nfrom multiprocessing import Pool\nfrom typing import List, Union, Tuple\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join, subfiles, \\\n    maybe_mkdir_p, isdir, save_pickle, load_pickle, isfile\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.utilities.label_handling.label_handling import LabelManager\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\n\n\ndef average_probabilities(list_of_files: List[str]) -> np.ndarray:\n    assert len(list_of_files), 'At least one file must be given in list_of_files'\n    avg = None\n    for f in list_of_files:\n        if avg is None:\n            avg = np.load(f)['probabilities']\n            # maybe increase precision to prevent rounding errors\n            if avg.dtype != np.float32:\n                avg = avg.astype(np.float32)\n        else:\n            avg += np.load(f)['probabilities']\n    avg /= len(list_of_files)\n    return avg\n\n\ndef merge_files(list_of_files,\n                output_filename_truncated: str,\n                output_file_ending: str,\n                image_reader_writer: BaseReaderWriter,\n                label_manager: LabelManager,\n                save_probabilities: bool = False):\n    # load the pkl file associated with the first file in list_of_files\n    properties = load_pickle(list_of_files[0][:-4] + '.pkl')\n    # load and average predictions\n    probabilities = average_probabilities(list_of_files)\n    segmentation = label_manager.convert_logits_to_segmentation(probabilities)\n    image_reader_writer.write_seg(segmentation, output_filename_truncated + output_file_ending, properties)\n    if save_probabilities:\n        np.savez_compressed(output_filename_truncated + '.npz', probabilities=probabilities)\n        save_pickle(probabilities, output_filename_truncated + '.pkl')\n\n\ndef ensemble_folders(list_of_input_folders: List[str],\n                     output_folder: str,\n                     save_merged_probabilities: bool = False,\n                     num_processes: int = default_num_processes,\n                     dataset_json_file_or_dict: str = None,\n                     plans_json_file_or_dict: str = None):\n    \"\"\"we need too much shit for this function. Problem is that we now have to support region-based training plus\n    multiple input/output formats so there isn't really a way around this.\n\n    If plans and dataset json are not specified, we assume each of the folders has a corresponding plans.json\n    and/or dataset.json in it. These are usually copied into those folders by nnU-Net during prediction.\n    We just pick the dataset.json and plans.json from the first of the folders and we DONT check whether the 5\n    folders contain the same plans etc! This can be a feature if results from different datasets are to be merged (only\n    works if label dict in dataset.json is the same between these datasets!!!)\"\"\"\n    if dataset_json_file_or_dict is not None:\n        if isinstance(dataset_json_file_or_dict, str):\n            dataset_json = load_json(dataset_json_file_or_dict)\n        else:\n            dataset_json = dataset_json_file_or_dict\n    else:\n        dataset_json = load_json(join(list_of_input_folders[0], 'dataset.json'))\n\n    if plans_json_file_or_dict is not None:\n        if isinstance(plans_json_file_or_dict, str):\n            plans = load_json(plans_json_file_or_dict)\n        else:\n            plans = plans_json_file_or_dict\n    else:\n        plans = load_json(join(list_of_input_folders[0], 'plans.json'))\n\n    plans_manager = PlansManager(plans)\n\n    # now collect the files in each of the folders and enforce that all files are present in all folders\n    files_per_folder = [set(subfiles(i, suffix='.npz', join=False)) for i in list_of_input_folders]\n    # first build a set with all files\n    s = deepcopy(files_per_folder[0])\n    for f in files_per_folder[1:]:\n        s.update(f)\n    for f in files_per_folder:\n        assert len(s.difference(f)) == 0, \"Not all folders contain the same files for ensembling. Please only \" \\\n                                          \"provide folders that contain the predictions\"\n    lists_of_lists_of_files = [[join(fl, fi) for fl in list_of_input_folders] for fi in s]\n    output_files_truncated = [join(output_folder, fi[:-4]) for fi in s]\n\n    image_reader_writer = plans_manager.image_reader_writer_class()\n    label_manager = plans_manager.get_label_manager(dataset_json)\n\n    maybe_mkdir_p(output_folder)\n    shutil.copy(join(list_of_input_folders[0], 'dataset.json'), output_folder)\n\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as pool:\n        num_preds = len(s)\n        _ = pool.starmap(\n            merge_files,\n            zip(\n                lists_of_lists_of_files,\n                output_files_truncated,\n                [dataset_json['file_ending']] * num_preds,\n                [image_reader_writer] * num_preds,\n                [label_manager] * num_preds,\n                [save_merged_probabilities] * num_preds\n            )\n        )\n\n\ndef entry_point_ensemble_folders():\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-i', nargs='+', type=str, required=True,\n                        help='list of input folders')\n    parser.add_argument('-o', type=str, required=True, help='output folder')\n    parser.add_argument('-np', type=int, required=False, default=default_num_processes,\n                        help=f\"Numbers of processes used for ensembling. Default: {default_num_processes}\")\n    parser.add_argument('--save_npz', action='store_true', required=False, help='Set this flag to store output '\n                                                                                'probabilities in separate .npz files')\n\n    args = parser.parse_args()\n    ensemble_folders(args.i, args.o, args.save_npz, args.np)\n\n\ndef ensemble_crossvalidations(list_of_trained_model_folders: List[str],\n                              output_folder: str,\n                              folds: Union[Tuple[int, ...], List[int]] = (0, 1, 2, 3, 4),\n                              num_processes: int = default_num_processes,\n                              overwrite: bool = True) -> None:\n    \"\"\"\n    Feature: different configurations can now have different splits\n    \"\"\"\n    dataset_json = load_json(join(list_of_trained_model_folders[0], 'dataset.json'))\n    plans_manager = PlansManager(join(list_of_trained_model_folders[0], 'plans.json'))\n\n    # first collect all unique filenames\n    files_per_folder = {}\n    unique_filenames = set()\n    for tr in list_of_trained_model_folders:\n        files_per_folder[tr] = {}\n        for f in folds:\n            if not isdir(join(tr, f'fold_{f}', 'validation')):\n                raise RuntimeError(f'Expected model output directory does not exist. You must train all requested '\n                                   f'folds of the specified model.\\nModel: {tr}\\nFold: {f}')\n            files_here = subfiles(join(tr, f'fold_{f}', 'validation'), suffix='.npz', join=False)\n            if len(files_here) == 0:\n                raise RuntimeError(f\"No .npz files found in folder {join(tr, f'fold_{f}', 'validation')}. Rerun your \"\n                                   f\"validation with the --npz flag. Use nnUNetv2_train [...] --val --npz.\")\n            files_per_folder[tr][f] = subfiles(join(tr, f'fold_{f}', 'validation'), suffix='.npz', join=False)\n            unique_filenames.update(files_per_folder[tr][f])\n\n    # verify that all trained_model_folders have all predictions\n    ok = True\n    for tr, fi in files_per_folder.items():\n        all_files_here = set()\n        for f in folds:\n            all_files_here.update(fi[f])\n        diff = unique_filenames.difference(all_files_here)\n        if len(diff) > 0:\n            ok = False\n            print(f'model {tr} does not seem to contain all predictions. Missing: {diff}')\n        if not ok:\n            raise RuntimeError('There were missing files, see print statements above this one')\n\n    # now we need to collect where these files are\n    file_mapping = []\n    for tr in list_of_trained_model_folders:\n        file_mapping.append({})\n        for f in folds:\n            for fi in files_per_folder[tr][f]:\n                # check for duplicates\n                assert fi not in file_mapping[-1].keys(), f\"Duplicate detected. Case {fi} is present in more than \" \\\n                                                          f\"one fold of model {tr}.\"\n                file_mapping[-1][fi] = join(tr, f'fold_{f}', 'validation', fi)\n\n    lists_of_lists_of_files = [[fm[i] for fm in file_mapping] for i in unique_filenames]\n    output_files_truncated = [join(output_folder, fi[:-4]) for fi in unique_filenames]\n\n    image_reader_writer = plans_manager.image_reader_writer_class()\n    maybe_mkdir_p(output_folder)\n    label_manager = plans_manager.get_label_manager(dataset_json)\n\n    if not overwrite:\n        tmp = [isfile(i + dataset_json['file_ending']) for i in output_files_truncated]\n        lists_of_lists_of_files = [lists_of_lists_of_files[i] for i in range(len(tmp)) if not tmp[i]]\n        output_files_truncated = [output_files_truncated[i] for i in range(len(tmp)) if not tmp[i]]\n\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as pool:\n        num_preds = len(lists_of_lists_of_files)\n        _ = pool.starmap(\n            merge_files,\n            zip(\n                lists_of_lists_of_files,\n                output_files_truncated,\n                [dataset_json['file_ending']] * num_preds,\n                [image_reader_writer] * num_preds,\n                [label_manager] * num_preds,\n                [False] * num_preds\n            )\n        )\n\n    shutil.copy(join(list_of_trained_model_folders[0], 'plans.json'), join(output_folder, 'plans.json'))\n    shutil.copy(join(list_of_trained_model_folders[0], 'dataset.json'), join(output_folder, 'dataset.json'))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/evaluation/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/evaluation/accumulate_cv_results.py",
    "content": "import shutil\nfrom typing import Union, List, Tuple\n\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join, isdir, maybe_mkdir_p, subfiles, isfile\n\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.evaluation.evaluate_predictions import compute_metrics_on_folder\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\n\n\ndef accumulate_cv_results(trained_model_folder,\n                          merged_output_folder: str,\n                          folds: Union[List[int], Tuple[int, ...]],\n                          num_processes: int = default_num_processes,\n                          overwrite: bool = True):\n    \"\"\"\n    There are a lot of things that can get fucked up, so the simplest way to deal with potential problems is to\n    collect the cv results into a separate folder and then evaluate them again. No messing with summary_json files!\n    \"\"\"\n\n    if overwrite and isdir(merged_output_folder):\n        shutil.rmtree(merged_output_folder)\n    maybe_mkdir_p(merged_output_folder)\n\n    dataset_json = load_json(join(trained_model_folder, 'dataset.json'))\n    plans_manager = PlansManager(join(trained_model_folder, 'plans.json'))\n    rw = plans_manager.image_reader_writer_class()\n    shutil.copy(join(trained_model_folder, 'dataset.json'), join(merged_output_folder, 'dataset.json'))\n    shutil.copy(join(trained_model_folder, 'plans.json'), join(merged_output_folder, 'plans.json'))\n\n    did_we_copy_something = False\n    for f in folds:\n        expected_validation_folder = join(trained_model_folder, f'fold_{f}', 'validation')\n        if not isdir(expected_validation_folder):\n            raise RuntimeError(f\"fold {f} of model {trained_model_folder} is missing. Please train it!\")\n        predicted_files = subfiles(expected_validation_folder, suffix=dataset_json['file_ending'], join=False)\n        for pf in predicted_files:\n            if overwrite and isfile(join(merged_output_folder, pf)):\n                raise RuntimeError(f'More than one of your folds has a prediction for case {pf}')\n            if overwrite or not isfile(join(merged_output_folder, pf)):\n                shutil.copy(join(expected_validation_folder, pf), join(merged_output_folder, pf))\n                did_we_copy_something = True\n\n    if did_we_copy_something or not isfile(join(merged_output_folder, 'summary.json')):\n        label_manager = plans_manager.get_label_manager(dataset_json)\n        gt_folder = join(nnUNet_raw, plans_manager.dataset_name, 'labelsTr')\n        if not isdir(gt_folder):\n            gt_folder = join(nnUNet_preprocessed, plans_manager.dataset_name, 'gt_segmentations')\n        compute_metrics_on_folder(gt_folder,\n                                  merged_output_folder,\n                                  join(merged_output_folder, 'summary.json'),\n                                  rw,\n                                  dataset_json['file_ending'],\n                                  label_manager.foreground_regions if label_manager.has_regions else\n                                  label_manager.foreground_labels,\n                                  label_manager.ignore_label,\n                                  num_processes)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/evaluation/evaluate_predictions.py",
    "content": "import multiprocessing\nimport os\nfrom copy import deepcopy\nfrom multiprocessing import Pool\nfrom typing import Tuple, List, Union, Optional\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import subfiles, join, save_json, load_json, \\\n    isfile\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json, \\\n    determine_reader_writer_from_file_ending\nfrom nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO\n# the Evaluator class of the previous nnU-Net was great and all but man was it overengineered. Keep it simple\nfrom nnunetv2.utilities.json_export import recursive_fix_for_json_export\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\n\n\ndef label_or_region_to_key(label_or_region: Union[int, Tuple[int]]):\n    return str(label_or_region)\n\n\ndef key_to_label_or_region(key: str):\n    try:\n        return int(key)\n    except ValueError:\n        key = key.replace('(', '')\n        key = key.replace(')', '')\n        split = key.split(',')\n        return tuple([int(i) for i in split if len(i) > 0])\n\n\ndef save_summary_json(results: dict, output_file: str):\n    \"\"\"\n    stupid json does not support tuples as keys (why does it have to be so shitty) so we need to convert that shit\n    ourselves\n    \"\"\"\n    results_converted = deepcopy(results)\n    # convert keys in mean metrics\n    results_converted['mean'] = {label_or_region_to_key(k): results['mean'][k] for k in results['mean'].keys()}\n    # convert metric_per_case\n    for i in range(len(results_converted[\"metric_per_case\"])):\n        results_converted[\"metric_per_case\"][i]['metrics'] = \\\n            {label_or_region_to_key(k): results[\"metric_per_case\"][i]['metrics'][k]\n             for k in results[\"metric_per_case\"][i]['metrics'].keys()}\n    # sort_keys=True will make foreground_mean the first entry and thus easy to spot\n    save_json(results_converted, output_file, sort_keys=True)\n\n\ndef load_summary_json(filename: str):\n    results = load_json(filename)\n    # convert keys in mean metrics\n    results['mean'] = {key_to_label_or_region(k): results['mean'][k] for k in results['mean'].keys()}\n    # convert metric_per_case\n    for i in range(len(results[\"metric_per_case\"])):\n        results[\"metric_per_case\"][i]['metrics'] = \\\n            {key_to_label_or_region(k): results[\"metric_per_case\"][i]['metrics'][k]\n             for k in results[\"metric_per_case\"][i]['metrics'].keys()}\n    return results\n\n\ndef labels_to_list_of_regions(labels: List[int]):\n    return [(i,) for i in labels]\n\n\ndef region_or_label_to_mask(segmentation: np.ndarray, region_or_label: Union[int, Tuple[int, ...]]) -> np.ndarray:\n    if np.isscalar(region_or_label):\n        return segmentation == region_or_label\n    else:\n        mask = np.zeros_like(segmentation, dtype=bool)\n        for r in region_or_label:\n            mask[segmentation == r] = True\n    return mask\n\n\ndef compute_tp_fp_fn_tn(mask_ref: np.ndarray, mask_pred: np.ndarray, ignore_mask: np.ndarray = None):\n    if ignore_mask is None:\n        use_mask = np.ones_like(mask_ref, dtype=bool)\n    else:\n        use_mask = ~ignore_mask\n    tp = np.sum((mask_ref & mask_pred) & use_mask)\n    fp = np.sum(((~mask_ref) & mask_pred) & use_mask)\n    fn = np.sum((mask_ref & (~mask_pred)) & use_mask)\n    tn = np.sum(((~mask_ref) & (~mask_pred)) & use_mask)\n    return tp, fp, fn, tn\n\n\ndef compute_metrics(reference_file: str, prediction_file: str, image_reader_writer: BaseReaderWriter,\n                    labels_or_regions: Union[List[int], List[Union[int, Tuple[int, ...]]]],\n                    ignore_label: int = None) -> dict:\n    # load images\n    seg_ref, seg_ref_dict = image_reader_writer.read_seg(reference_file)\n    seg_pred, seg_pred_dict = image_reader_writer.read_seg(prediction_file)\n    # spacing = seg_ref_dict['spacing']\n\n    ignore_mask = seg_ref == ignore_label if ignore_label is not None else None\n\n    results = {}\n    results['reference_file'] = reference_file\n    results['prediction_file'] = prediction_file\n    results['metrics'] = {}\n    for r in labels_or_regions:\n        results['metrics'][r] = {}\n        mask_ref = region_or_label_to_mask(seg_ref, r)\n        mask_pred = region_or_label_to_mask(seg_pred, r)\n        tp, fp, fn, tn = compute_tp_fp_fn_tn(mask_ref, mask_pred, ignore_mask)\n        if tp + fp + fn == 0:\n            results['metrics'][r]['Dice'] = np.nan\n            results['metrics'][r]['IoU'] = np.nan\n        else:\n            results['metrics'][r]['Dice'] = 2 * tp / (2 * tp + fp + fn)\n            results['metrics'][r]['IoU'] = tp / (tp + fp + fn)\n        results['metrics'][r]['FP'] = fp\n        results['metrics'][r]['TP'] = tp\n        results['metrics'][r]['FN'] = fn\n        results['metrics'][r]['TN'] = tn\n        results['metrics'][r]['n_pred'] = fp + tp\n        results['metrics'][r]['n_ref'] = fn + tp\n    return results\n\n\ndef compute_metrics_on_folder(folder_ref: str, folder_pred: str, output_file: str,\n                              image_reader_writer: BaseReaderWriter,\n                              file_ending: str,\n                              regions_or_labels: Union[List[int], List[Union[int, Tuple[int, ...]]]],\n                              ignore_label: int = None,\n                              num_processes: int = default_num_processes,\n                              chill: bool = True) -> dict:\n    \"\"\"\n    output_file must end with .json; can be None\n    \"\"\"\n    if output_file is not None:\n        assert output_file.endswith('.json'), 'output_file should end with .json'\n    files_pred = subfiles(folder_pred, suffix=file_ending, join=False)\n    files_ref = subfiles(folder_ref, suffix=file_ending, join=False)\n    if not chill:\n        present = [isfile(join(folder_pred, i)) for i in files_ref]\n        assert all(present), \"Not all files in folder_pred exist in folder_ref\"\n    files_ref = [join(folder_ref, i) for i in files_pred]\n    files_pred = [join(folder_pred, i) for i in files_pred]\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as pool:\n        # for i in list(zip(files_ref, files_pred, [image_reader_writer] * len(files_pred), [regions_or_labels] * len(files_pred), [ignore_label] * len(files_pred))):\n        #     compute_metrics(*i)\n        results = pool.starmap(\n            compute_metrics,\n            list(zip(files_ref, files_pred, [image_reader_writer] * len(files_pred), [regions_or_labels] * len(files_pred),\n                     [ignore_label] * len(files_pred)))\n        )\n\n    # mean metric per class\n    metric_list = list(results[0]['metrics'][regions_or_labels[0]].keys())\n    means = {}\n    for r in regions_or_labels:\n        means[r] = {}\n        for m in metric_list:\n            means[r][m] = np.nanmean([i['metrics'][r][m] for i in results])\n\n    # foreground mean\n    foreground_mean = {}\n    for m in metric_list:\n        values = []\n        for k in means.keys():\n            if k == 0 or k == '0':\n                continue\n            values.append(means[k][m])\n        foreground_mean[m] = np.mean(values)\n\n    [recursive_fix_for_json_export(i) for i in results]\n    recursive_fix_for_json_export(means)\n    recursive_fix_for_json_export(foreground_mean)\n    result = {'metric_per_case': results, 'mean': means, 'foreground_mean': foreground_mean}\n    if output_file is not None:\n        save_summary_json(result, output_file)\n    return result\n    # print('DONE')\n\n\ndef compute_metrics_on_folder2(folder_ref: str, folder_pred: str, dataset_json_file: str, plans_file: str,\n                               output_file: str = None,\n                               num_processes: int = default_num_processes,\n                               chill: bool = False):\n    dataset_json = load_json(dataset_json_file)\n    # get file ending\n    file_ending = dataset_json['file_ending']\n\n    # get reader writer class\n    example_file = subfiles(folder_ref, suffix=file_ending, join=True)[0]\n    rw = determine_reader_writer_from_dataset_json(dataset_json, example_file)()\n\n    # maybe auto set output file\n    if output_file is None:\n        output_file = join(folder_pred, 'summary.json')\n\n    lm = PlansManager(plans_file).get_label_manager(dataset_json)\n    compute_metrics_on_folder(folder_ref, folder_pred, output_file, rw, file_ending,\n                              lm.foreground_regions if lm.has_regions else lm.foreground_labels, lm.ignore_label,\n                              num_processes, chill=chill)\n\n\ndef compute_metrics_on_folder_simple(folder_ref: str, folder_pred: str, labels: Union[Tuple[int, ...], List[int]],\n                                     output_file: str = None,\n                                     num_processes: int = default_num_processes,\n                                     ignore_label: int = None,\n                                     chill: bool = False):\n    example_file = subfiles(folder_ref, join=True)[0]\n    file_ending = os.path.splitext(example_file)[-1]\n    rw = determine_reader_writer_from_file_ending(file_ending, example_file, allow_nonmatching_filename=True,\n                                                  verbose=False)()\n    # maybe auto set output file\n    if output_file is None:\n        output_file = join(folder_pred, 'summary.json')\n    compute_metrics_on_folder(folder_ref, folder_pred, output_file, rw, file_ending,\n                              labels, ignore_label=ignore_label, num_processes=num_processes, chill=chill)\n\n\ndef evaluate_folder_entry_point():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('gt_folder', type=str, help='folder with gt segmentations')\n    parser.add_argument('pred_folder', type=str, help='folder with predicted segmentations')\n    parser.add_argument('-djfile', type=str, required=True,\n                        help='dataset.json file')\n    parser.add_argument('-pfile', type=str, required=True,\n                        help='plans.json file')\n    parser.add_argument('-o', type=str, required=False, default=None,\n                        help='Output file. Optional. Default: pred_folder/summary.json')\n    parser.add_argument('-np', type=int, required=False, default=default_num_processes,\n                        help=f'number of processes used. Optional. Default: {default_num_processes}')\n    parser.add_argument('--chill', action='store_true', help='dont crash if folder_pred does not have all files that are present in folder_gt')\n    args = parser.parse_args()\n    compute_metrics_on_folder2(args.gt_folder, args.pred_folder, args.djfile, args.pfile, args.o, args.np, chill=args.chill)\n\n\ndef evaluate_simple_entry_point():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('gt_folder', type=str, help='folder with gt segmentations')\n    parser.add_argument('pred_folder', type=str, help='folder with predicted segmentations')\n    parser.add_argument('-l', type=int, nargs='+', required=True,\n                        help='list of labels')\n    parser.add_argument('-il', type=int, required=False, default=None,\n                        help='ignore label')\n    parser.add_argument('-o', type=str, required=False, default=None,\n                        help='Output file. Optional. Default: pred_folder/summary.json')\n    parser.add_argument('-np', type=int, required=False, default=default_num_processes,\n                        help=f'number of processes used. Optional. Default: {default_num_processes}')\n    parser.add_argument('--chill', action='store_true', help='dont crash if folder_pred does not have all files that are present in folder_gt')\n\n    args = parser.parse_args()\n    compute_metrics_on_folder_simple(args.gt_folder, args.pred_folder, args.l, args.o, args.np, args.il, chill=args.chill)\n\n\nif __name__ == '__main__':\n    folder_ref = '/media/fabian/data/nnUNet_raw/Dataset004_Hippocampus/labelsTr'\n    folder_pred = '/home/fabian/results/nnUNet_remake/Dataset004_Hippocampus/nnUNetModule__nnUNetPlans__3d_fullres/fold_0/validation'\n    output_file = '/home/fabian/results/nnUNet_remake/Dataset004_Hippocampus/nnUNetModule__nnUNetPlans__3d_fullres/fold_0/validation/summary.json'\n    image_reader_writer = SimpleITKIO()\n    file_ending = '.nii.gz'\n    regions = labels_to_list_of_regions([1, 2])\n    ignore_label = None\n    num_processes = 12\n    compute_metrics_on_folder(folder_ref, folder_pred, output_file, image_reader_writer, file_ending, regions, ignore_label,\n                              num_processes)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/evaluation/find_best_configuration.py",
    "content": "import argparse\nimport os.path\nfrom copy import deepcopy\nfrom typing import Union, List, Tuple\n\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join, isdir, save_json\n\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.ensembling.ensemble import ensemble_crossvalidations\nfrom nnunetv2.evaluation.accumulate_cv_results import accumulate_cv_results\nfrom nnunetv2.evaluation.evaluate_predictions import compute_metrics_on_folder, load_summary_json\nfrom nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw, nnUNet_results\nfrom nnunetv2.postprocessing.remove_connected_components import determine_postprocessing\nfrom nnunetv2.utilities.file_path_utilities import maybe_convert_to_dataset_name, get_output_folder, \\\n    convert_identifier_to_trainer_plans_config, get_ensemble_name, folds_tuple_to_string\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\n\ndefault_trained_models = tuple([\n    {'plans': 'nnUNetPlans', 'configuration': '2d', 'trainer': 'nnUNetTrainer'},\n    {'plans': 'nnUNetPlans', 'configuration': '3d_fullres', 'trainer': 'nnUNetTrainer'},\n    {'plans': 'nnUNetPlans', 'configuration': '3d_lowres', 'trainer': 'nnUNetTrainer'},\n    {'plans': 'nnUNetPlans', 'configuration': '3d_cascade_fullres', 'trainer': 'nnUNetTrainer'},\n])\n\n\ndef filter_available_models(model_dict: Union[List[dict], Tuple[dict, ...]], dataset_name_or_id: Union[str, int]):\n    valid = []\n    for trained_model in model_dict:\n        plans_manager = PlansManager(join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id),\n                               trained_model['plans'] + '.json'))\n        # check if configuration exists\n        # 3d_cascade_fullres and 3d_lowres do not exist for each dataset so we allow them to be absent IF they are not\n        # specified in the plans file\n        if trained_model['configuration'] not in plans_manager.available_configurations:\n            print(f\"Configuration {trained_model['configuration']} not found in plans {trained_model['plans']}.\\n\"\n                  f\"Inferred plans file: {join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id), trained_model['plans'] + '.json')}.\")\n            continue\n\n        # check if trained model output folder exists. This is a requirement. No mercy here.\n        expected_output_folder = get_output_folder(dataset_name_or_id, trained_model['trainer'], trained_model['plans'],\n                                                   trained_model['configuration'], fold=None)\n        if not isdir(expected_output_folder):\n            raise RuntimeError(f\"Trained model {trained_model} does not have an output folder. \"\n                  f\"Expected: {expected_output_folder}. Please run the training for this model! (don't forget \"\n                  f\"the --npz flag if you want to ensemble multiple configurations)\")\n\n        valid.append(trained_model)\n    return valid\n\n\ndef generate_inference_command(dataset_name_or_id: Union[int, str], configuration_name: str,\n                               plans_identifier: str = 'nnUNetPlans', trainer_name: str = 'nnUNetTrainer',\n                               folds: Union[List[int], Tuple[int, ...]] = (0, 1, 2, 3, 4),\n                               folder_with_segs_from_prev_stage: str = None,\n                               input_folder: str = 'INPUT_FOLDER',\n                               output_folder: str = 'OUTPUT_FOLDER',\n                               save_npz: bool = False):\n    fold_str = ''\n    for f in folds:\n        fold_str += f' {f}'\n\n    predict_command = ''\n    trained_model_folder = get_output_folder(dataset_name_or_id, trainer_name, plans_identifier, configuration_name, fold=None)\n    plans_manager = PlansManager(join(trained_model_folder, 'plans.json'))\n    configuration_manager = plans_manager.get_configuration(configuration_name)\n    if 'previous_stage' in plans_manager.available_configurations:\n        prev_stage = configuration_manager.previous_stage_name\n        predict_command += generate_inference_command(dataset_name_or_id, prev_stage, plans_identifier, trainer_name,\n                                                      folds, None, output_folder='OUTPUT_FOLDER_PREV_STAGE') + '\\n'\n        folder_with_segs_from_prev_stage = 'OUTPUT_FOLDER_PREV_STAGE'\n\n    predict_command = f'nnUNetv2_predict -d {dataset_name_or_id} -i {input_folder} -o {output_folder} -f {fold_str} ' \\\n                      f'-tr {trainer_name} -c {configuration_name} -p {plans_identifier}'\n    if folder_with_segs_from_prev_stage is not None:\n        predict_command += f' -prev_stage_predictions {folder_with_segs_from_prev_stage}'\n    if save_npz:\n        predict_command += ' --save_probabilities'\n    return predict_command\n\n\ndef find_best_configuration(dataset_name_or_id,\n                            allowed_trained_models: Union[List[dict], Tuple[dict, ...]] = default_trained_models,\n                            allow_ensembling: bool = True,\n                            num_processes: int = default_num_processes,\n                            overwrite: bool = True,\n                            folds: Union[List[int], Tuple[int, ...]] = (0, 1, 2, 3, 4),\n                            strict: bool = False):\n    dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n    all_results = {}\n\n    allowed_trained_models = filter_available_models(deepcopy(allowed_trained_models), dataset_name_or_id)\n\n    for m in allowed_trained_models:\n        output_folder = get_output_folder(dataset_name_or_id, m['trainer'], m['plans'], m['configuration'], fold=None)\n        if not isdir(output_folder) and strict:\n            raise RuntimeError(f'{dataset_name}: The output folder of plans {m[\"plans\"]} configuration '\n                               f'{m[\"configuration\"]} is missing. Please train the model (all requested folds!) first!')\n        identifier = os.path.basename(output_folder)\n        merged_output_folder = join(output_folder, f'crossval_results_folds_{folds_tuple_to_string(folds)}')\n        accumulate_cv_results(output_folder, merged_output_folder, folds, num_processes, overwrite)\n        all_results[identifier] = {\n            'source': merged_output_folder,\n            'result': load_summary_json(join(merged_output_folder, 'summary.json'))['foreground_mean']['Dice']\n        }\n\n    if allow_ensembling:\n        for i in range(len(allowed_trained_models)):\n            for j in range(i + 1, len(allowed_trained_models)):\n                m1, m2 = allowed_trained_models[i], allowed_trained_models[j]\n\n                output_folder_1 = get_output_folder(dataset_name_or_id, m1['trainer'], m1['plans'], m1['configuration'], fold=None)\n                output_folder_2 = get_output_folder(dataset_name_or_id, m2['trainer'], m2['plans'], m2['configuration'], fold=None)\n                identifier = get_ensemble_name(output_folder_1, output_folder_2, folds)\n\n                output_folder_ensemble = join(nnUNet_results, dataset_name, 'ensembles', identifier)\n\n                ensemble_crossvalidations([output_folder_1, output_folder_2], output_folder_ensemble, folds,\n                                          num_processes, overwrite=overwrite)\n\n                # evaluate ensembled predictions\n                plans_manager = PlansManager(join(output_folder_1, 'plans.json'))\n                dataset_json = load_json(join(output_folder_1, 'dataset.json'))\n                label_manager = plans_manager.get_label_manager(dataset_json)\n                rw = plans_manager.image_reader_writer_class()\n\n                compute_metrics_on_folder(join(nnUNet_preprocessed, dataset_name, 'gt_segmentations'),\n                                          output_folder_ensemble,\n                                          join(output_folder_ensemble, 'summary.json'),\n                                          rw,\n                                          dataset_json['file_ending'],\n                                          label_manager.foreground_regions if label_manager.has_regions else\n                                          label_manager.foreground_labels,\n                                          label_manager.ignore_label,\n                                          num_processes)\n                all_results[identifier] = \\\n                    {\n                    'source': output_folder_ensemble,\n                    'result': load_summary_json(join(output_folder_ensemble, 'summary.json'))['foreground_mean']['Dice']\n                    }\n\n    # pick best and report inference command\n    best_score = max([i['result'] for i in all_results.values()])\n    best_keys = [k for k in all_results.keys() if all_results[k]['result'] == best_score]  # may never happen but theoretically\n    # there can be a tie. Let's pick the first model in this case because it's going to be the simpler one (ensembles\n    # come after single configs)\n    best_key = best_keys[0]\n\n    print()\n    print('***All results:***')\n    for k, v in all_results.items():\n        print(f'{k}: {v[\"result\"]}')\n    print(f'\\n*Best*: {best_key}: {all_results[best_key][\"result\"]}')\n    print()\n\n    print('***Determining postprocessing for best model/ensemble***')\n    determine_postprocessing(all_results[best_key]['source'], join(nnUNet_preprocessed, dataset_name, 'gt_segmentations'),\n                             plans_file_or_dict=join(all_results[best_key]['source'], 'plans.json'),\n                             dataset_json_file_or_dict=join(all_results[best_key]['source'], 'dataset.json'),\n                             num_processes=num_processes, keep_postprocessed_files=True)\n\n    # in addition to just reading the console output (how it was previously) we should return the information\n    # needed to run the full inference via API\n    return_dict = {\n        'folds': folds,\n        'dataset_name_or_id': dataset_name_or_id,\n        'considered_models': allowed_trained_models,\n        'ensembling_allowed': allow_ensembling,\n        'all_results': {i: j['result'] for i, j in all_results.items()},\n        'best_model_or_ensemble': {\n            'result_on_crossval_pre_pp': all_results[best_key][\"result\"],\n            'result_on_crossval_post_pp': load_json(join(all_results[best_key]['source'], 'postprocessed', 'summary.json'))['foreground_mean']['Dice'],\n            'postprocessing_file': join(all_results[best_key]['source'], 'postprocessing.pkl'),\n            'some_plans_file': join(all_results[best_key]['source'], 'plans.json'),\n            # just needed for label handling, can\n            # come from any of the ensemble members (if any)\n            'selected_model_or_models': []\n        }\n    }\n    # convert best key to inference command:\n    if best_key.startswith('ensemble___'):\n        prefix, m1, m2, folds_string = best_key.split('___')\n        tr1, pl1, c1 = convert_identifier_to_trainer_plans_config(m1)\n        tr2, pl2, c2 = convert_identifier_to_trainer_plans_config(m2)\n        return_dict['best_model_or_ensemble']['selected_model_or_models'].append(\n            {\n                'configuration': c1,\n                'trainer': tr1,\n                'plans_identifier': pl1,\n            })\n        return_dict['best_model_or_ensemble']['selected_model_or_models'].append(\n            {\n                'configuration': c2,\n                'trainer': tr2,\n                'plans_identifier': pl2,\n            })\n    else:\n        tr, pl, c = convert_identifier_to_trainer_plans_config(best_key)\n        return_dict['best_model_or_ensemble']['selected_model_or_models'].append(\n            {\n                'configuration': c,\n                'trainer': tr,\n                'plans_identifier': pl,\n            })\n\n    save_json(return_dict, join(nnUNet_results, dataset_name, 'inference_information.json'))  # save this so that we don't have to run this\n    # everything someone wants to be reminded of the inference commands. They can just load this and give it to\n    # print_inference_instructions\n\n    # print it\n    print_inference_instructions(return_dict, instructions_file=join(nnUNet_results, dataset_name, 'inference_instructions.txt'))\n    return return_dict\n\n\ndef print_inference_instructions(inference_info_dict: dict, instructions_file: str = None):\n    def _print_and_maybe_write_to_file(string):\n        print(string)\n        if f_handle is not None:\n            f_handle.write(f'{string}\\n')\n\n    f_handle = open(instructions_file, 'w') if instructions_file is not None else None\n    print()\n    _print_and_maybe_write_to_file('***Run inference like this:***\\n')\n    output_folders = []\n\n    dataset_name_or_id = inference_info_dict['dataset_name_or_id']\n    if len(inference_info_dict['best_model_or_ensemble']['selected_model_or_models']) > 1:\n        is_ensemble = True\n        _print_and_maybe_write_to_file('An ensemble won! What a surprise! Run the following commands to run predictions with the ensemble members:\\n')\n    else:\n        is_ensemble = False\n\n    for j, i in enumerate(inference_info_dict['best_model_or_ensemble']['selected_model_or_models']):\n        tr, c, pl = i['trainer'], i['configuration'], i['plans_identifier']\n        if is_ensemble:\n            output_folder_name = f\"OUTPUT_FOLDER_MODEL_{j+1}\"\n        else:\n            output_folder_name = f\"OUTPUT_FOLDER\"\n        output_folders.append(output_folder_name)\n\n        _print_and_maybe_write_to_file(generate_inference_command(dataset_name_or_id, c, pl, tr, inference_info_dict['folds'],\n                                         save_npz=is_ensemble, output_folder=output_folder_name))\n\n    if is_ensemble:\n        output_folder_str = output_folders[0]\n        for o in output_folders[1:]:\n            output_folder_str += f' {o}'\n        output_ensemble = f\"OUTPUT_FOLDER\"\n        _print_and_maybe_write_to_file('\\nThe run ensembling with:\\n')\n        _print_and_maybe_write_to_file(f\"nnUNetv2_ensemble -i {output_folder_str} -o {output_ensemble} -np {default_num_processes}\")\n\n    _print_and_maybe_write_to_file(\"\\n***Once inference is completed, run postprocessing like this:***\\n\")\n    _print_and_maybe_write_to_file(f\"nnUNetv2_apply_postprocessing -i OUTPUT_FOLDER -o OUTPUT_FOLDER_PP \"\n          f\"-pp_pkl_file {inference_info_dict['best_model_or_ensemble']['postprocessing_file']} -np {default_num_processes} \"\n          f\"-plans_json {inference_info_dict['best_model_or_ensemble']['some_plans_file']}\")\n\n\ndef dumb_trainer_config_plans_to_trained_models_dict(trainers: List[str], configs: List[str], plans: List[str]):\n    \"\"\"\n    function is called dumb because it's dumb\n    \"\"\"\n    ret = []\n    for t in trainers:\n        for c in configs:\n            for p in plans:\n                ret.append(\n                    {'plans': p, 'configuration': c, 'trainer': t}\n                )\n    return tuple(ret)\n\n\ndef find_best_configuration_entry_point():\n    parser = argparse.ArgumentParser()\n    parser.add_argument('dataset_name_or_id', type=str, help='Dataset Name or id')\n    parser.add_argument('-p', nargs='+', required=False, default=['nnUNetPlans'],\n                        help='List of plan identifiers. Default: nnUNetPlans')\n    parser.add_argument('-c', nargs='+', required=False, default=['2d', '3d_fullres', '3d_lowres', '3d_cascade_fullres'],\n                        help=\"List of configurations. Default: ['2d', '3d_fullres', '3d_lowres', '3d_cascade_fullres']\")\n    parser.add_argument('-tr', nargs='+', required=False, default=['nnUNetTrainer'],\n                        help='List of trainers. Default: nnUNetTrainer')\n    parser.add_argument('-np', required=False, default=default_num_processes, type=int,\n                        help='Number of processes to use for ensembling, postprocessing etc')\n    parser.add_argument('-f', nargs='+', type=int, default=(0, 1, 2, 3, 4),\n                        help='Folds to use. Default: 0 1 2 3 4')\n    parser.add_argument('--disable_ensembling', action='store_true', required=False,\n                        help='Set this flag to disable ensembling')\n    parser.add_argument('--no_overwrite', action='store_true',\n                        help='If set we will not overwrite already ensembled files etc. May speed up concecutive '\n                             'runs of this command (why would you want to do that?) at the risk of not updating '\n                             'outdated results.')\n    args = parser.parse_args()\n\n    model_dict = dumb_trainer_config_plans_to_trained_models_dict(args.tr, args.c, args.p)\n    dataset_name = maybe_convert_to_dataset_name(args.dataset_name_or_id)\n\n    find_best_configuration(dataset_name, model_dict, allow_ensembling=not args.disable_ensembling,\n                            num_processes=args.np, overwrite=not args.no_overwrite, folds=args.f,\n                            strict=False)\n\n\ndef accumulate_crossval_results_entry_point():\n    parser = argparse.ArgumentParser('Copies all predicted segmentations from the individual folds into one joint '\n                                     'folder and evaluates them')\n    parser.add_argument('dataset_name_or_id', type=str, help='Dataset Name or id')\n    parser.add_argument('-c', type=str, required=True,\n                        default='3d_fullres',\n                        help=\"Configuration\")\n    parser.add_argument('-o', type=str, required=False, default=None,\n                        help=\"Output folder. If not specified, the output folder will be located in the trained \" \\\n                             \"model directory (named crossval_results_folds_XXX).\")\n    parser.add_argument('-f', nargs='+', type=int, default=(0, 1, 2, 3, 4),\n                        help='Folds to use. Default: 0 1 2 3 4')\n    parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',\n                        help='Plan identifier in which to search for the specified configuration. Default: nnUNetPlans')\n    parser.add_argument('-tr', type=str, required=False, default='nnUNetTrainer',\n                        help='Trainer class. Default: nnUNetTrainer')\n    args = parser.parse_args()\n    trained_model_folder = get_output_folder(args.dataset_name_or_id, args.tr, args.p, args.c)\n\n    if args.o is None:\n        merged_output_folder = join(trained_model_folder, f'crossval_results_folds_{folds_tuple_to_string(args.f)}')\n    else:\n        merged_output_folder = args.o\n\n    accumulate_cv_results(trained_model_folder, merged_output_folder, args.f)\n\n\nif __name__ == '__main__':\n    find_best_configuration(4,\n                            default_trained_models,\n                            True,\n                            8,\n                            False,\n                            (0, 1, 2, 3, 4))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/dataset_fingerprint/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/dataset_fingerprint/fingerprint_extractor.py",
    "content": "import multiprocessing\nimport os\nfrom time import sleep\nfrom typing import List, Type, Union\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join, save_json, isfile, maybe_mkdir_p\nfrom tqdm import tqdm\n\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\nfrom nnunetv2.preprocessing.cropping.cropping import crop_to_nonzero\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets\n\n\nclass DatasetFingerprintExtractor(object):\n    def __init__(self, dataset_name_or_id: Union[str, int], num_processes: int = 8, verbose: bool = False):\n        \"\"\"\n        extracts the dataset fingerprint used for experiment planning. The dataset fingerprint will be saved as a\n        json file in the input_folder\n\n        Philosophy here is to do only what we really need. Don't store stuff that we can easily read from somewhere\n        else. Don't compute stuff we don't need (except for intensity_statistics_per_channel)\n        \"\"\"\n        dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n        self.verbose = verbose\n\n        self.dataset_name = dataset_name\n        self.input_folder = join(nnUNet_raw, dataset_name)\n        self.num_processes = num_processes\n        self.dataset_json = load_json(join(self.input_folder, 'dataset.json'))\n        self.dataset = get_filenames_of_train_images_and_targets(self.input_folder, self.dataset_json)\n\n        # We don't want to use all foreground voxels because that can accumulate a lot of data (out of memory). It is\n        # also not critically important to get all pixels as long as there are enough. Let's use 10e7 voxels in total\n        # (for the entire dataset)\n        self.num_foreground_voxels_for_intensitystats = 10e7\n\n    @staticmethod\n    def collect_foreground_intensities(segmentation: np.ndarray, images: np.ndarray, seed: int = 1234,\n                                       num_samples: int = 10000):\n        \"\"\"\n        images=image with multiple channels = shape (c, x, y(, z))\n        \"\"\"\n        assert images.ndim == 4\n        assert segmentation.ndim == 4\n\n        assert not np.any(np.isnan(segmentation)), \"Segmentation contains NaN values. grrrr.... :-(\"\n        assert not np.any(np.isnan(images)), \"Images contains NaN values. grrrr.... :-(\"\n\n        rs = np.random.RandomState(seed)\n\n        intensities_per_channel = []\n        # we don't use the intensity_statistics_per_channel at all, it's just something that might be nice to have\n        intensity_statistics_per_channel = []\n\n        # segmentation is 4d: 1,x,y,z. We need to remove the empty dimension for the following code to work\n        foreground_mask = segmentation[0] > 0\n\n        for i in range(len(images)):\n            foreground_pixels = images[i][foreground_mask]\n            num_fg = len(foreground_pixels)\n            # sample with replacement so that we don't get issues with cases that have less than num_samples\n            # foreground_pixels. We could also just sample less in those cases but that would than cause these\n            # training cases to be underrepresented\n            intensities_per_channel.append(\n                rs.choice(foreground_pixels, num_samples, replace=True) if num_fg > 0 else [])\n            intensity_statistics_per_channel.append({\n                'mean': np.mean(foreground_pixels) if num_fg > 0 else np.nan,\n                'median': np.median(foreground_pixels) if num_fg > 0 else np.nan,\n                'min': np.min(foreground_pixels) if num_fg > 0 else np.nan,\n                'max': np.max(foreground_pixels) if num_fg > 0 else np.nan,\n                'percentile_99_5': np.percentile(foreground_pixels, 99.5) if num_fg > 0 else np.nan,\n                'percentile_00_5': np.percentile(foreground_pixels, 0.5) if num_fg > 0 else np.nan,\n\n            })\n\n        return intensities_per_channel, intensity_statistics_per_channel\n\n    @staticmethod\n    def analyze_case(image_files: List[str], segmentation_file: str, reader_writer_class: Type[BaseReaderWriter],\n                     num_samples: int = 10000):\n        rw = reader_writer_class()\n        images, properties_images = rw.read_images(image_files)\n        segmentation, properties_seg = rw.read_seg(segmentation_file)\n\n        # we no longer crop and save the cropped images before this is run. Instead we run the cropping on the fly.\n        # Downside is that we need to do this twice (once here and once during preprocessing). Upside is that we don't\n        # need to save the cropped data anymore. Given that cropping is not too expensive it makes sense to do it this\n        # way. This is only possible because we are now using our new input/output interface.\n        data_cropped, seg_cropped, bbox = crop_to_nonzero(images, segmentation)\n\n        foreground_intensities_per_channel, foreground_intensity_stats_per_channel = \\\n            DatasetFingerprintExtractor.collect_foreground_intensities(seg_cropped, data_cropped,\n                                                                       num_samples=num_samples)\n\n        spacing = properties_images['spacing']\n\n        shape_before_crop = images.shape[1:]\n        shape_after_crop = data_cropped.shape[1:]\n        relative_size_after_cropping = np.prod(shape_after_crop) / np.prod(shape_before_crop)\n        return shape_after_crop, spacing, foreground_intensities_per_channel, foreground_intensity_stats_per_channel, \\\n               relative_size_after_cropping\n\n    def run(self, overwrite_existing: bool = False) -> dict:\n        # we do not save the properties file in self.input_folder because that folder might be read-only. We can only\n        # reliably write in nnUNet_preprocessed and nnUNet_results, so nnUNet_preprocessed it is\n        preprocessed_output_folder = join(nnUNet_preprocessed, self.dataset_name)\n        maybe_mkdir_p(preprocessed_output_folder)\n        properties_file = join(preprocessed_output_folder, 'dataset_fingerprint.json')\n\n        if not isfile(properties_file) or overwrite_existing:\n            reader_writer_class = determine_reader_writer_from_dataset_json(self.dataset_json,\n                                                                            # yikes. Rip the following line\n                                                                            self.dataset[self.dataset.keys().__iter__().__next__()]['images'][0])\n\n            # determine how many foreground voxels we need to sample per training case\n            num_foreground_samples_per_case = int(self.num_foreground_voxels_for_intensitystats //\n                                                  len(self.dataset))\n\n            r = []\n            with multiprocessing.get_context(\"spawn\").Pool(self.num_processes) as p:\n                for k in self.dataset.keys():\n                    r.append(p.starmap_async(DatasetFingerprintExtractor.analyze_case,\n                                             ((self.dataset[k]['images'], self.dataset[k]['label'], reader_writer_class,\n                                               num_foreground_samples_per_case),)))\n                remaining = list(range(len(self.dataset)))\n                # p is pretty nifti. If we kill workers they just respawn but don't do any work.\n                # So we need to store the original pool of workers.\n                workers = [j for j in p._pool]\n                with tqdm(desc=None, total=len(self.dataset), disable=self.verbose) as pbar:\n                    while len(remaining) > 0:\n                        all_alive = all([j.is_alive() for j in workers])\n                        if not all_alive:\n                            raise RuntimeError('Some background worker is 6 feet under. Yuck. \\n'\n                                               'OK jokes aside.\\n'\n                                               'One of your background processes is missing. This could be because of '\n                                               'an error (look for an error message) or because it was killed '\n                                               'by your OS due to running out of RAM. If you don\\'t see '\n                                               'an error message, out of RAM is likely the problem. In that case '\n                                               'reducing the number of workers might help')\n                        done = [i for i in remaining if r[i].ready()]\n                        for _ in done:\n                            pbar.update()\n                        remaining = [i for i in remaining if i not in done]\n                        sleep(0.1)\n\n            # results = ptqdm(DatasetFingerprintExtractor.analyze_case,\n            #                 (training_images_per_case, training_labels_per_case),\n            #                 processes=self.num_processes, zipped=True, reader_writer_class=reader_writer_class,\n            #                 num_samples=num_foreground_samples_per_case, disable=self.verbose)\n            results = [i.get()[0] for i in r]\n\n            shapes_after_crop = [r[0] for r in results]\n            spacings = [r[1] for r in results]\n            foreground_intensities_per_channel = [np.concatenate([r[2][i] for r in results]) for i in\n                                                  range(len(results[0][2]))]\n            # we drop this so that the json file is somewhat human readable\n            # foreground_intensity_stats_by_case_and_modality = [r[3] for r in results]\n            median_relative_size_after_cropping = np.median([r[4] for r in results], 0)\n\n            num_channels = len(self.dataset_json['channel_names'].keys()\n                                 if 'channel_names' in self.dataset_json.keys()\n                                 else self.dataset_json['modality'].keys())\n            intensity_statistics_per_channel = {}\n            for i in range(num_channels):\n                intensity_statistics_per_channel[i] = {\n                    'mean': float(np.mean(foreground_intensities_per_channel[i])),\n                    'median': float(np.median(foreground_intensities_per_channel[i])),\n                    'std': float(np.std(foreground_intensities_per_channel[i])),\n                    'min': float(np.min(foreground_intensities_per_channel[i])),\n                    'max': float(np.max(foreground_intensities_per_channel[i])),\n                    'percentile_99_5': float(np.percentile(foreground_intensities_per_channel[i], 99.5)),\n                    'percentile_00_5': float(np.percentile(foreground_intensities_per_channel[i], 0.5)),\n                }\n\n            fingerprint = {\n                    \"spacings\": spacings,\n                    \"shapes_after_crop\": shapes_after_crop,\n                    'foreground_intensity_properties_per_channel': intensity_statistics_per_channel,\n                    \"median_relative_size_after_cropping\": median_relative_size_after_cropping\n                }\n\n            try:\n                save_json(fingerprint, properties_file)\n            except Exception as e:\n                if isfile(properties_file):\n                    os.remove(properties_file)\n                raise e\n        else:\n            fingerprint = load_json(properties_file)\n        return fingerprint\n\n\nif __name__ == '__main__':\n    dfe = DatasetFingerprintExtractor(2, 8)\n    dfe.run(overwrite_existing=False)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py",
    "content": "import os.path\nimport shutil\nfrom copy import deepcopy\nfrom functools import lru_cache\nfrom typing import List, Union, Tuple, Type\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join, save_json, isfile, maybe_mkdir_p\nfrom dynamic_network_architectures.architectures.unet import PlainConvUNet, ResidualEncoderUNet\nfrom dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_instancenorm\n\nfrom nnunetv2.configuration import ANISO_THRESHOLD\nfrom nnunetv2.experiment_planning.experiment_planners.network_topology import get_pool_and_conv_props\nfrom nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\nfrom nnunetv2.preprocessing.normalization.map_channel_name_to_normalization import get_normalization_scheme\nfrom nnunetv2.preprocessing.resampling.default_resampling import resample_data_or_seg_to_shape, compute_new_shape\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.json_export import recursive_fix_for_json_export\nfrom nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \\\n    get_filenames_of_train_images_and_targets\n\n\nclass ExperimentPlanner(object):\n    def __init__(self, dataset_name_or_id: Union[str, int],\n                 gpu_memory_target_in_gb: float = 8,\n                 preprocessor_name: str = 'DefaultPreprocessor', plans_name: str = 'nnUNetPlans',\n                 overwrite_target_spacing: Union[List[float], Tuple[float, ...]] = None,\n                 suppress_transpose: bool = False):\n        \"\"\"\n        overwrite_target_spacing only affects 3d_fullres! (but by extension 3d_lowres which starts with fullres may\n        also be affected\n        \"\"\"\n\n        self.dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n        self.suppress_transpose = suppress_transpose\n        self.raw_dataset_folder = join(nnUNet_raw, self.dataset_name)\n        preprocessed_folder = join(nnUNet_preprocessed, self.dataset_name)\n        self.dataset_json = load_json(join(self.raw_dataset_folder, 'dataset.json'))\n        self.dataset = get_filenames_of_train_images_and_targets(self.raw_dataset_folder, self.dataset_json)\n\n        # load dataset fingerprint\n        if not isfile(join(preprocessed_folder, 'dataset_fingerprint.json')):\n            raise RuntimeError('Fingerprint missing for this dataset. Please run nnUNet_extract_dataset_fingerprint')\n\n        self.dataset_fingerprint = load_json(join(preprocessed_folder, 'dataset_fingerprint.json'))\n\n        self.anisotropy_threshold = ANISO_THRESHOLD\n\n        self.UNet_base_num_features = 32\n        self.UNet_class = PlainConvUNet\n        # the following two numbers are really arbitrary and were set to reproduce nnU-Net v1's configurations as\n        # much as possible\n        self.UNet_reference_val_3d = 560000000  # 455600128  550000000\n        self.UNet_reference_val_2d = 85000000  # 83252480\n        self.UNet_reference_com_nfeatures = 32\n        self.UNet_reference_val_corresp_GB = 8\n        self.UNet_reference_val_corresp_bs_2d = 12\n        self.UNet_reference_val_corresp_bs_3d = 2\n        self.UNet_vram_target_GB = gpu_memory_target_in_gb\n        self.UNet_featuremap_min_edge_length = 4\n        self.UNet_blocks_per_stage_encoder = (2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)\n        self.UNet_blocks_per_stage_decoder = (2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)\n        self.UNet_min_batch_size = 2\n        self.UNet_max_features_2d = 512\n        self.UNet_max_features_3d = 320\n\n        self.lowres_creation_threshold = 0.25  # if the patch size of fullres is less than 25% of the voxels in the\n        # median shape then we need a lowres config as well\n\n        self.preprocessor_name = preprocessor_name\n        self.plans_identifier = plans_name\n        self.overwrite_target_spacing = overwrite_target_spacing\n        assert overwrite_target_spacing is None or len(overwrite_target_spacing), 'if overwrite_target_spacing is ' \\\n                                                                                  'used then three floats must be ' \\\n                                                                                  'given (as list or tuple)'\n        assert overwrite_target_spacing is None or all([isinstance(i, float) for i in overwrite_target_spacing]), \\\n            'if overwrite_target_spacing is used then three floats must be given (as list or tuple)'\n\n        self.plans = None\n\n    def determine_reader_writer(self):\n        example_image = self.dataset[self.dataset.keys().__iter__().__next__()]['images'][0]\n        return determine_reader_writer_from_dataset_json(self.dataset_json, example_image)\n\n    @staticmethod\n    @lru_cache(maxsize=None)\n    def static_estimate_VRAM_usage(patch_size: Tuple[int],\n                                   n_stages: int,\n                                   strides: Union[int, List[int], Tuple[int, ...]],\n                                   UNet_class: Union[Type[PlainConvUNet], Type[ResidualEncoderUNet]],\n                                   num_input_channels: int,\n                                   features_per_stage: Tuple[int],\n                                   blocks_per_stage_encoder: Union[int, Tuple[int]],\n                                   blocks_per_stage_decoder: Union[int, Tuple[int]],\n                                   num_labels: int):\n        \"\"\"\n        Works for PlainConvUNet, ResidualEncoderUNet\n        \"\"\"\n        dim = len(patch_size)\n        conv_op = convert_dim_to_conv_op(dim)\n        norm_op = get_matching_instancenorm(conv_op)\n        net = UNet_class(num_input_channels, n_stages,\n                         features_per_stage,\n                         conv_op,\n                         3,\n                         strides,\n                         blocks_per_stage_encoder,\n                         num_labels,\n                         blocks_per_stage_decoder,\n                         norm_op=norm_op)\n        return net.compute_conv_feature_map_size(patch_size)\n\n    def determine_resampling(self, *args, **kwargs):\n        \"\"\"\n        returns what functions to use for resampling data and seg, respectively. Also returns kwargs\n        resampling function must be callable(data, current_spacing, new_spacing, **kwargs)\n\n        determine_resampling is called within get_plans_for_configuration to allow for different functions for each\n        configuration\n        \"\"\"\n        resampling_data = resample_data_or_seg_to_shape\n        resampling_data_kwargs = {\n            \"is_seg\": False,\n            \"order\": 3,\n            \"order_z\": 0,\n            \"force_separate_z\": None,\n        }\n        resampling_seg = resample_data_or_seg_to_shape\n        resampling_seg_kwargs = {\n            \"is_seg\": True,\n            \"order\": 1,\n            \"order_z\": 0,\n            \"force_separate_z\": None,\n        }\n        return resampling_data, resampling_data_kwargs, resampling_seg, resampling_seg_kwargs\n\n    def determine_segmentation_softmax_export_fn(self, *args, **kwargs):\n        \"\"\"\n        function must be callable(data, new_shape, current_spacing, new_spacing, **kwargs). The new_shape should be\n        used as target. current_spacing and new_spacing are merely there in case we want to use it somehow\n\n        determine_segmentation_softmax_export_fn is called within get_plans_for_configuration to allow for different\n        functions for each configuration\n\n        \"\"\"\n        resampling_fn = resample_data_or_seg_to_shape\n        resampling_fn_kwargs = {\n            \"is_seg\": False,\n            \"order\": 1,\n            \"order_z\": 0,\n            \"force_separate_z\": None,\n        }\n        return resampling_fn, resampling_fn_kwargs\n\n    def determine_fullres_target_spacing(self) -> np.ndarray:\n        \"\"\"\n        per default we use the 50th percentile=median for the target spacing. Higher spacing results in smaller data\n        and thus faster and easier training. Smaller spacing results in larger data and thus longer and harder training\n\n        For some datasets the median is not a good choice. Those are the datasets where the spacing is very anisotropic\n        (for example ACDC with (10, 1.5, 1.5)). These datasets still have examples with a spacing of 5 or 6 mm in the low\n        resolution axis. Choosing the median here will result in bad interpolation artifacts that can substantially\n        impact performance (due to the low number of slices).\n        \"\"\"\n        if self.overwrite_target_spacing is not None:\n            return np.array(self.overwrite_target_spacing)\n\n        spacings = self.dataset_fingerprint['spacings']\n        sizes = self.dataset_fingerprint['shapes_after_crop']\n\n        target = np.percentile(np.vstack(spacings), 50, 0)\n\n        # todo sizes_after_resampling = [compute_new_shape(j, i, target) for i, j in zip(spacings, sizes)]\n\n        target_size = np.percentile(np.vstack(sizes), 50, 0)\n        # we need to identify datasets for which a different target spacing could be beneficial. These datasets have\n        # the following properties:\n        # - one axis which much lower resolution than the others\n        # - the lowres axis has much less voxels than the others\n        # - (the size in mm of the lowres axis is also reduced)\n        worst_spacing_axis = np.argmax(target)\n        other_axes = [i for i in range(len(target)) if i != worst_spacing_axis]\n        other_spacings = [target[i] for i in other_axes]\n        other_sizes = [target_size[i] for i in other_axes]\n\n        has_aniso_spacing = target[worst_spacing_axis] > (self.anisotropy_threshold * max(other_spacings))\n        has_aniso_voxels = target_size[worst_spacing_axis] * self.anisotropy_threshold < min(other_sizes)\n\n        if has_aniso_spacing and has_aniso_voxels:\n            spacings_of_that_axis = np.vstack(spacings)[:, worst_spacing_axis]\n            target_spacing_of_that_axis = np.percentile(spacings_of_that_axis, 10)\n            # don't let the spacing of that axis get higher than the other axes\n            if target_spacing_of_that_axis < max(other_spacings):\n                target_spacing_of_that_axis = max(max(other_spacings), target_spacing_of_that_axis) + 1e-5\n            target[worst_spacing_axis] = target_spacing_of_that_axis\n        return target\n\n    def determine_normalization_scheme_and_whether_mask_is_used_for_norm(self) -> Tuple[List[str], List[bool]]:\n        if 'channel_names' not in self.dataset_json.keys():\n            print('WARNING: \"modalities\" should be renamed to \"channel_names\" in dataset.json. This will be '\n                  'enforced soon!')\n        modalities = self.dataset_json['channel_names'] if 'channel_names' in self.dataset_json.keys() else \\\n            self.dataset_json['modality']\n        normalization_schemes = [get_normalization_scheme(m) for m in modalities.values()]\n        if self.dataset_fingerprint['median_relative_size_after_cropping'] < (3 / 4.):\n            use_nonzero_mask_for_norm = [i.leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true for i in\n                                         normalization_schemes]\n        else:\n            use_nonzero_mask_for_norm = [False] * len(normalization_schemes)\n            assert all([i in (True, False) for i in use_nonzero_mask_for_norm]), 'use_nonzero_mask_for_norm must be ' \\\n                                                                                 'True or False and cannot be None'\n        normalization_schemes = [i.__name__ for i in normalization_schemes]\n        return normalization_schemes, use_nonzero_mask_for_norm\n\n    def determine_transpose(self):\n        if self.suppress_transpose:\n            return [0, 1, 2], [0, 1, 2]\n\n        # todo we should use shapes for that as well. Not quite sure how yet\n        target_spacing = self.determine_fullres_target_spacing()\n\n        max_spacing_axis = np.argmax(target_spacing)\n        remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis]\n        transpose_forward = [max_spacing_axis] + remaining_axes\n        transpose_backward = [np.argwhere(np.array(transpose_forward) == i)[0][0] for i in range(3)]\n        return transpose_forward, transpose_backward\n\n    def get_plans_for_configuration(self,\n                                    spacing: Union[np.ndarray, Tuple[float, ...], List[float]],\n                                    median_shape: Union[np.ndarray, Tuple[int, ...], List[int]],\n                                    data_identifier: str,\n                                    approximate_n_voxels_dataset: float) -> dict:\n        assert all([i > 0 for i in spacing]), f\"Spacing must be > 0! Spacing: {spacing}\"\n        # print(spacing, median_shape, approximate_n_voxels_dataset)\n        # find an initial patch size\n        # we first use the spacing to get an aspect ratio\n        tmp = 1 / np.array(spacing)\n\n        # we then upscale it so that it initially is certainly larger than what we need (rescale to have the same\n        # volume as a patch of size 256 ** 3)\n        # this may need to be adapted when using absurdly large GPU memory targets. Increasing this now would not be\n        # ideal because large initial patch sizes increase computation time because more iterations in the while loop\n        # further down may be required.\n        if len(spacing) == 3:\n            initial_patch_size = [round(i) for i in tmp * (256 ** 3 / np.prod(tmp)) ** (1 / 3)]\n        elif len(spacing) == 2:\n            initial_patch_size = [round(i) for i in tmp * (2048 ** 2 / np.prod(tmp)) ** (1 / 2)]\n        else:\n            raise RuntimeError()\n\n        # clip initial patch size to median_shape. It makes little sense to have it be larger than that. Note that\n        # this is different from how nnU-Net v1 does it!\n        # todo patch size can still get too large because we pad the patch size to a multiple of 2**n\n        initial_patch_size = np.array([min(i, j) for i, j in zip(initial_patch_size, median_shape[:len(spacing)])])\n\n        # use that to get the network topology. Note that this changes the patch_size depending on the number of\n        # pooling operations (must be divisible by 2**num_pool in each axis)\n        network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, patch_size, \\\n        shape_must_be_divisible_by = get_pool_and_conv_props(spacing, initial_patch_size,\n                                                             self.UNet_featuremap_min_edge_length,\n                                                             999999)\n\n        # now estimate vram consumption\n        num_stages = len(pool_op_kernel_sizes)\n        estimate = self.static_estimate_VRAM_usage(tuple(patch_size),\n                                                   num_stages,\n                                                   tuple([tuple(i) for i in pool_op_kernel_sizes]),\n                                                   self.UNet_class,\n                                                   len(self.dataset_json['channel_names'].keys()\n                                                       if 'channel_names' in self.dataset_json.keys()\n                                                       else self.dataset_json['modality'].keys()),\n                                                   tuple([min(self.UNet_max_features_2d if len(patch_size) == 2 else\n                                                              self.UNet_max_features_3d,\n                                                              self.UNet_reference_com_nfeatures * 2 ** i) for\n                                                          i in range(len(pool_op_kernel_sizes))]),\n                                                   self.UNet_blocks_per_stage_encoder[:num_stages],\n                                                   self.UNet_blocks_per_stage_decoder[:num_stages - 1],\n                                                   len(self.dataset_json['labels'].keys()))\n\n        # how large is the reference for us here (batch size etc)?\n        # adapt for our vram target\n        reference = (self.UNet_reference_val_2d if len(spacing) == 2 else self.UNet_reference_val_3d) * \\\n                    (self.UNet_vram_target_GB / self.UNet_reference_val_corresp_GB)\n\n        while estimate > reference:\n            # print(patch_size)\n            # patch size seems to be too large, so we need to reduce it. Reduce the axis that currently violates the\n            # aspect ratio the most (that is the largest relative to median shape)\n            axis_to_be_reduced = np.argsort(patch_size / median_shape[:len(spacing)])[-1]\n\n            # we cannot simply reduce that axis by shape_must_be_divisible_by[axis_to_be_reduced] because this\n            # may cause us to skip some valid sizes, for example shape_must_be_divisible_by is 64 for a shape of 256.\n            # If we subtracted that we would end up with 192, skipping 224 which is also a valid patch size\n            # (224 / 2**5 = 7; 7 < 2 * self.UNet_featuremap_min_edge_length(4) so it's valid). So we need to first\n            # subtract shape_must_be_divisible_by, then recompute it and then subtract the\n            # recomputed shape_must_be_divisible_by. Annoying.\n            tmp = deepcopy(patch_size)\n            tmp[axis_to_be_reduced] -= shape_must_be_divisible_by[axis_to_be_reduced]\n            _, _, _, _, shape_must_be_divisible_by = \\\n                get_pool_and_conv_props(spacing, tmp,\n                                        self.UNet_featuremap_min_edge_length,\n                                        999999)\n            patch_size[axis_to_be_reduced] -= shape_must_be_divisible_by[axis_to_be_reduced]\n\n            # now recompute topology\n            network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, patch_size, \\\n            shape_must_be_divisible_by = get_pool_and_conv_props(spacing, patch_size,\n                                                                 self.UNet_featuremap_min_edge_length,\n                                                                 999999)\n\n            num_stages = len(pool_op_kernel_sizes)\n            estimate = self.static_estimate_VRAM_usage(tuple(patch_size),\n                                                       num_stages,\n                                                       tuple([tuple(i) for i in pool_op_kernel_sizes]),\n                                                       self.UNet_class,\n                                                       len(self.dataset_json['channel_names'].keys()\n                                                           if 'channel_names' in self.dataset_json.keys()\n                                                           else self.dataset_json['modality'].keys()),\n                                                       tuple([min(self.UNet_max_features_2d if len(patch_size) == 2 else\n                                                                  self.UNet_max_features_3d,\n                                                                  self.UNet_reference_com_nfeatures * 2 ** i) for\n                                                              i in range(len(pool_op_kernel_sizes))]),\n                                                       self.UNet_blocks_per_stage_encoder[:num_stages],\n                                                       self.UNet_blocks_per_stage_decoder[:num_stages - 1],\n                                                       len(self.dataset_json['labels'].keys()))\n\n        # alright now let's determine the batch size. This will give self.UNet_min_batch_size if the while loop was\n        # executed. If not, additional vram headroom is used to increase batch size\n        ref_bs = self.UNet_reference_val_corresp_bs_2d if len(spacing) == 2 else self.UNet_reference_val_corresp_bs_3d\n        batch_size = round((reference / estimate) * ref_bs)\n\n        # we need to cap the batch size to cover at most 5% of the entire dataset. Overfitting precaution. We cannot\n        # go smaller than self.UNet_min_batch_size though\n        bs_corresponding_to_5_percent = round(\n            approximate_n_voxels_dataset * 0.05 / np.prod(patch_size, dtype=np.float64))\n        batch_size = max(min(batch_size, bs_corresponding_to_5_percent), self.UNet_min_batch_size)\n\n        resampling_data, resampling_data_kwargs, resampling_seg, resampling_seg_kwargs = self.determine_resampling()\n        resampling_softmax, resampling_softmax_kwargs = self.determine_segmentation_softmax_export_fn()\n\n        normalization_schemes, mask_is_used_for_norm = \\\n            self.determine_normalization_scheme_and_whether_mask_is_used_for_norm()\n        num_stages = len(pool_op_kernel_sizes)\n        plan = {\n            'data_identifier': data_identifier,\n            'preprocessor_name': self.preprocessor_name,\n            'batch_size': batch_size,\n            'patch_size': patch_size,\n            'median_image_size_in_voxels': median_shape,\n            'spacing': spacing,\n            'normalization_schemes': normalization_schemes,\n            'use_mask_for_norm': mask_is_used_for_norm,\n            'UNet_class_name': self.UNet_class.__name__,\n            'UNet_base_num_features': self.UNet_base_num_features,\n            'n_conv_per_stage_encoder': self.UNet_blocks_per_stage_encoder[:num_stages],\n            'n_conv_per_stage_decoder': self.UNet_blocks_per_stage_decoder[:num_stages - 1],\n            'num_pool_per_axis': network_num_pool_per_axis,\n            'pool_op_kernel_sizes': pool_op_kernel_sizes,\n            'conv_kernel_sizes': conv_kernel_sizes,\n            'unet_max_num_features': self.UNet_max_features_3d if len(spacing) == 3 else self.UNet_max_features_2d,\n            'resampling_fn_data': resampling_data.__name__,\n            'resampling_fn_seg': resampling_seg.__name__,\n            'resampling_fn_data_kwargs': resampling_data_kwargs,\n            'resampling_fn_seg_kwargs': resampling_seg_kwargs,\n            'resampling_fn_probabilities': resampling_softmax.__name__,\n            'resampling_fn_probabilities_kwargs': resampling_softmax_kwargs,\n        }\n        return plan\n\n    def plan_experiment(self):\n        \"\"\"\n        MOVE EVERYTHING INTO THE PLANS. MAXIMUM FLEXIBILITY\n\n        Ideally I would like to move transpose_forward/backward into the configurations so that this can also be done\n        differently for each configuration but this would cause problems with identifying the correct axes for 2d. There\n        surely is a way around that but eh. I'm feeling lazy and featuritis must also not be pushed to the extremes.\n\n        So for now if you want a different transpose_forward/backward you need to create a new planner. Also not too\n        hard.\n        \"\"\"\n\n        # first get transpose\n        transpose_forward, transpose_backward = self.determine_transpose()\n\n        # get fullres spacing and transpose it\n        fullres_spacing = self.determine_fullres_target_spacing()\n        fullres_spacing_transposed = fullres_spacing[transpose_forward]\n\n        # get transposed new median shape (what we would have after resampling)\n        new_shapes = [compute_new_shape(j, i, fullres_spacing) for i, j in\n                      zip(self.dataset_fingerprint['spacings'], self.dataset_fingerprint['shapes_after_crop'])]\n        new_median_shape = np.median(new_shapes, 0)\n        new_median_shape_transposed = new_median_shape[transpose_forward]\n\n        approximate_n_voxels_dataset = float(np.prod(new_median_shape_transposed, dtype=np.float64) *\n                                             self.dataset_json['numTraining'])\n        # only run 3d if this is a 3d dataset\n        if new_median_shape_transposed[0] != 1:\n            plan_3d_fullres = self.get_plans_for_configuration(fullres_spacing_transposed,\n                                                               new_median_shape_transposed,\n                                                               self.generate_data_identifier('3d_fullres'),\n                                                               approximate_n_voxels_dataset)\n            # maybe add 3d_lowres as well\n            patch_size_fullres = plan_3d_fullres['patch_size']\n            median_num_voxels = np.prod(new_median_shape_transposed, dtype=np.float64)\n            num_voxels_in_patch = np.prod(patch_size_fullres, dtype=np.float64)\n\n            plan_3d_lowres = None\n            lowres_spacing = deepcopy(plan_3d_fullres['spacing'])\n\n            spacing_increase_factor = 1.03  # used to be 1.01 but that is slow with new GPU memory estimation!\n\n            while num_voxels_in_patch / median_num_voxels < self.lowres_creation_threshold:\n                # we incrementally increase the target spacing. We start with the anisotropic axis/axes until it/they\n                # is/are similar (factor 2) to the other ax(i/e)s.\n                max_spacing = max(lowres_spacing)\n                if np.any((max_spacing / lowres_spacing) > 2):\n                    lowres_spacing[(max_spacing / lowres_spacing) > 2] *= spacing_increase_factor\n                else:\n                    lowres_spacing *= spacing_increase_factor\n                median_num_voxels = np.prod(plan_3d_fullres['spacing'] / lowres_spacing * new_median_shape_transposed,\n                                            dtype=np.float64)\n                # print(lowres_spacing)\n                plan_3d_lowres = self.get_plans_for_configuration(lowres_spacing,\n                                                                  [round(i) for i in plan_3d_fullres['spacing'] /\n                                                                   lowres_spacing * new_median_shape_transposed],\n                                                                  self.generate_data_identifier('3d_lowres'),\n                                                                  float(np.prod(median_num_voxels) *\n                                                                        self.dataset_json['numTraining']))\n                num_voxels_in_patch = np.prod(plan_3d_lowres['patch_size'], dtype=np.int64)\n                print(f'Attempting to find 3d_lowres config. '\n                      f'\\nCurrent spacing: {lowres_spacing}. '\n                      f'\\nCurrent patch size: {plan_3d_lowres[\"patch_size\"]}. '\n                      f'\\nCurrent median shape: {plan_3d_fullres[\"spacing\"] / lowres_spacing * new_median_shape_transposed}')\n            if plan_3d_lowres is not None:\n                plan_3d_lowres['batch_dice'] = False\n                plan_3d_fullres['batch_dice'] = True\n            else:\n                plan_3d_fullres['batch_dice'] = False\n        else:\n            plan_3d_fullres = None\n            plan_3d_lowres = None\n\n        # 2D configuration\n        plan_2d = self.get_plans_for_configuration(fullres_spacing_transposed[1:],\n                                                   new_median_shape_transposed[1:],\n                                                   self.generate_data_identifier('2d'), approximate_n_voxels_dataset)\n        plan_2d['batch_dice'] = True\n\n        print('2D U-Net configuration:')\n        print(plan_2d)\n        print()\n\n        # median spacing and shape, just for reference when printing the plans\n        median_spacing = np.median(self.dataset_fingerprint['spacings'], 0)[transpose_forward]\n        median_shape = np.median(self.dataset_fingerprint['shapes_after_crop'], 0)[transpose_forward]\n\n        # instead of writing all that into the plans we just copy the original file. More files, but less crowded\n        # per file.\n        shutil.copy(join(self.raw_dataset_folder, 'dataset.json'),\n                    join(nnUNet_preprocessed, self.dataset_name, 'dataset.json'))\n\n        # json is stupid and I hate it... \"Object of type int64 is not JSON serializable\" -> my ass\n        plans = {\n            'dataset_name': self.dataset_name,\n            'plans_name': self.plans_identifier,\n            'original_median_spacing_after_transp': [float(i) for i in median_spacing],\n            'original_median_shape_after_transp': [int(round(i)) for i in median_shape],\n            'image_reader_writer': self.determine_reader_writer().__name__,\n            'transpose_forward': [int(i) for i in transpose_forward],\n            'transpose_backward': [int(i) for i in transpose_backward],\n            'configurations': {'2d': plan_2d},\n            'experiment_planner_used': self.__class__.__name__,\n            'label_manager': 'LabelManager',\n            'foreground_intensity_properties_per_channel': self.dataset_fingerprint[\n                'foreground_intensity_properties_per_channel']\n        }\n\n        if plan_3d_lowres is not None:\n            plans['configurations']['3d_lowres'] = plan_3d_lowres\n            if plan_3d_fullres is not None:\n                plans['configurations']['3d_lowres']['next_stage'] = '3d_cascade_fullres'\n            print('3D lowres U-Net configuration:')\n            print(plan_3d_lowres)\n            print()\n        if plan_3d_fullres is not None:\n            plans['configurations']['3d_fullres'] = plan_3d_fullres\n            print('3D fullres U-Net configuration:')\n            print(plan_3d_fullres)\n            print()\n            if plan_3d_lowres is not None:\n                plans['configurations']['3d_cascade_fullres'] = {\n                    'inherits_from': '3d_fullres',\n                    'previous_stage': '3d_lowres'\n                }\n\n        self.plans = plans\n        self.save_plans(plans)\n        return plans\n\n    def save_plans(self, plans):\n        recursive_fix_for_json_export(plans)\n\n        plans_file = join(nnUNet_preprocessed, self.dataset_name, self.plans_identifier + '.json')\n\n        # we don't want to overwrite potentially existing custom configurations every time this is executed. So let's\n        # read the plans file if it already exists and keep any non-default configurations\n        if isfile(plans_file):\n            old_plans = load_json(plans_file)\n            old_configurations = old_plans['configurations']\n            for c in plans['configurations'].keys():\n                if c in old_configurations.keys():\n                    del (old_configurations[c])\n            plans['configurations'].update(old_configurations)\n\n        maybe_mkdir_p(join(nnUNet_preprocessed, self.dataset_name))\n        save_json(plans, plans_file, sort_keys=False)\n        print(f\"Plans were saved to {join(nnUNet_preprocessed, self.dataset_name, self.plans_identifier + '.json')}\")\n\n    def generate_data_identifier(self, configuration_name: str) -> str:\n        \"\"\"\n        configurations are unique within each plans file but different plans file can have configurations with the\n        same name. In order to distinguish the associated data we need a data identifier that reflects not just the\n        config but also the plans it originates from\n        \"\"\"\n        return self.plans_identifier + '_' + configuration_name\n\n    def load_plans(self, fname: str):\n        self.plans = load_json(fname)\n\n\nif __name__ == '__main__':\n    ExperimentPlanner(2, 8).plan_experiment()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/network_topology.py",
    "content": "from copy import deepcopy\nimport numpy as np\n\n\ndef get_shape_must_be_divisible_by(net_numpool_per_axis):\n    return 2 ** np.array(net_numpool_per_axis)\n\n\ndef pad_shape(shape, must_be_divisible_by):\n    \"\"\"\n    pads shape so that it is divisible by must_be_divisible_by\n    :param shape:\n    :param must_be_divisible_by:\n    :return:\n    \"\"\"\n    if not isinstance(must_be_divisible_by, (tuple, list, np.ndarray)):\n        must_be_divisible_by = [must_be_divisible_by] * len(shape)\n    else:\n        assert len(must_be_divisible_by) == len(shape)\n\n    new_shp = [shape[i] + must_be_divisible_by[i] - shape[i] % must_be_divisible_by[i] for i in range(len(shape))]\n\n    for i in range(len(shape)):\n        if shape[i] % must_be_divisible_by[i] == 0:\n            new_shp[i] -= must_be_divisible_by[i]\n    new_shp = np.array(new_shp).astype(int)\n    return new_shp\n\n\ndef get_pool_and_conv_props(spacing, patch_size, min_feature_map_size, max_numpool):\n    \"\"\"\n    this is the same as get_pool_and_conv_props_v2 from old nnunet\n\n    :param spacing:\n    :param patch_size:\n    :param min_feature_map_size: min edge length of feature maps in bottleneck\n    :param max_numpool:\n    :return:\n    \"\"\"\n    # todo review this code\n    dim = len(spacing)\n\n    current_spacing = deepcopy(list(spacing))\n    current_size = deepcopy(list(patch_size))\n\n    pool_op_kernel_sizes = [[1] * len(spacing)]\n    conv_kernel_sizes = []\n\n    num_pool_per_axis = [0] * dim\n    kernel_size = [1] * dim\n\n    while True:\n        # exclude axes that we cannot pool further because of min_feature_map_size constraint\n        valid_axes_for_pool = [i for i in range(dim) if current_size[i] >= 2*min_feature_map_size]\n        if len(valid_axes_for_pool) < 1:\n            break\n\n        spacings_of_axes = [current_spacing[i] for i in valid_axes_for_pool]\n\n        # find axis that are within factor of 2 within smallest spacing\n        min_spacing_of_valid = min(spacings_of_axes)\n        valid_axes_for_pool = [i for i in valid_axes_for_pool if current_spacing[i] / min_spacing_of_valid < 2]\n\n        # max_numpool constraint\n        valid_axes_for_pool = [i for i in valid_axes_for_pool if num_pool_per_axis[i] < max_numpool]\n\n        if len(valid_axes_for_pool) == 1:\n            if current_size[valid_axes_for_pool[0]] >= 3 * min_feature_map_size:\n                pass\n            else:\n                break\n        if len(valid_axes_for_pool) < 1:\n            break\n\n        # now we need to find kernel sizes\n        # kernel sizes are initialized to 1. They are successively set to 3 when their associated axis becomes within\n        # factor 2 of min_spacing. Once they are 3 they remain 3\n        for d in range(dim):\n            if kernel_size[d] == 3:\n                continue\n            else:\n                if current_spacing[d] / min(current_spacing) < 2:\n                    kernel_size[d] = 3\n\n        other_axes = [i for i in range(dim) if i not in valid_axes_for_pool]\n\n        pool_kernel_sizes = [0] * dim\n        for v in valid_axes_for_pool:\n            pool_kernel_sizes[v] = 2\n            num_pool_per_axis[v] += 1\n            current_spacing[v] *= 2\n            current_size[v] = np.ceil(current_size[v] / 2)\n        for nv in other_axes:\n            pool_kernel_sizes[nv] = 1\n\n        pool_op_kernel_sizes.append(pool_kernel_sizes)\n        conv_kernel_sizes.append(deepcopy(kernel_size))\n        #print(conv_kernel_sizes)\n\n    must_be_divisible_by = get_shape_must_be_divisible_by(num_pool_per_axis)\n    patch_size = pad_shape(patch_size, must_be_divisible_by)\n\n    # we need to add one more conv_kernel_size for the bottleneck. We always use 3x3(x3) conv here\n    conv_kernel_sizes.append([3]*dim)\n    return num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, patch_size, must_be_divisible_by\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/readme.md",
    "content": "What do experiment planners need to do (these are notes for myself while rewriting nnU-Net, they are provided as is \nwithout further explanations. These notes also include new features):\n- (done) preprocessor name should be configurable via cli\n- (done) gpu memory target should be configurable via cli\n- (done) plans name should be configurable via cli\n- (done) data name should be specified in plans (plans specify the data they want to use, this will allow us to manually \n  edit plans files without having to copy the data folders)\n- plans must contain:\n    - (done) transpose forward/backward\n    - (done) preprocessor name (can differ for each config)\n    - (done) spacing\n    - (done) normalization scheme\n    - (done) target spacing\n    - (done) conv and pool op kernel sizes\n    - (done) base num features for architecture\n    - (done) data identifier\n    - num conv per stage?\n    - (done) use mask for norm\n    - [NO. Handled by LabelManager & dataset.json] num segmentation outputs\n    - [NO. Handled by LabelManager & dataset.json] ignore class\n    - [NO. Handled by LabelManager & dataset.json] list of regions or classes\n    - [NO. Handled by LabelManager & dataset.json] regions class order, if applicable\n    - (done) resampling function to be used\n    - (done) the image reader writer class that should be used\n\n\ndataset.json\nmandatory:\n- numTraining\n- labels (value 'ignore' has special meaning. Cannot have more than one ignore_label)\n- modalities\n- file_ending\n\noptional\n- overwrite_image_reader_writer (if absent, auto)\n- regions\n- region_class_order\n- "
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/resencUNet_planner.py",
    "content": "from typing import Union, List, Tuple\n\nfrom torch import nn\n\nfrom nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner\nfrom dynamic_network_architectures.architectures.unet import ResidualEncoderUNet\n\n\nclass ResEncUNetPlanner(ExperimentPlanner):\n    def __init__(self, dataset_name_or_id: Union[str, int],\n                 gpu_memory_target_in_gb: float = 8,\n                 preprocessor_name: str = 'DefaultPreprocessor', plans_name: str = 'nnUNetResEncUNetPlans',\n                 overwrite_target_spacing: Union[List[float], Tuple[float, ...]] = None,\n                 suppress_transpose: bool = False):\n        super().__init__(dataset_name_or_id, gpu_memory_target_in_gb, preprocessor_name, plans_name,\n                         overwrite_target_spacing, suppress_transpose)\n\n        self.UNet_base_num_features = 32\n        self.UNet_class = ResidualEncoderUNet\n        # the following two numbers are really arbitrary and were set to reproduce default nnU-Net's configurations as\n        # much as possible\n        self.UNet_reference_val_3d = 680000000\n        self.UNet_reference_val_2d = 135000000\n        self.UNet_reference_com_nfeatures = 32\n        self.UNet_reference_val_corresp_GB = 8\n        self.UNet_reference_val_corresp_bs_2d = 12\n        self.UNet_reference_val_corresp_bs_3d = 2\n        self.UNet_featuremap_min_edge_length = 4\n        self.UNet_blocks_per_stage_encoder = (1, 3, 4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6)\n        self.UNet_blocks_per_stage_decoder = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)\n        self.UNet_min_batch_size = 2\n        self.UNet_max_features_2d = 512\n        self.UNet_max_features_3d = 320\n\n\nif __name__ == '__main__':\n    # we know both of these networks run with batch size 2 and 12 on ~8-10GB, respectively\n    net = ResidualEncoderUNet(input_channels=1, n_stages=6, features_per_stage=(32, 64, 128, 256, 320, 320),\n                              conv_op=nn.Conv3d, kernel_sizes=3, strides=(1, 2, 2, 2, 2, 2),\n                              n_blocks_per_stage=(1, 3, 4, 6, 6, 6), num_classes=3,\n                              n_conv_per_stage_decoder=(1, 1, 1, 1, 1),\n                              conv_bias=True, norm_op=nn.InstanceNorm3d, norm_op_kwargs={}, dropout_op=None,\n                              nonlin=nn.LeakyReLU, nonlin_kwargs={'inplace': True}, deep_supervision=True)\n    print(net.compute_conv_feature_map_size((128, 128, 128)))  # -> 558319104. The value you see above was finetuned\n    # from this one to match the regular nnunetplans more closely\n\n    net = ResidualEncoderUNet(input_channels=1, n_stages=7, features_per_stage=(32, 64, 128, 256, 512, 512, 512),\n                              conv_op=nn.Conv2d, kernel_sizes=3, strides=(1, 2, 2, 2, 2, 2, 2),\n                              n_blocks_per_stage=(1, 3, 4, 6, 6, 6, 6), num_classes=3,\n                              n_conv_per_stage_decoder=(1, 1, 1, 1, 1, 1),\n                              conv_bias=True, norm_op=nn.InstanceNorm2d, norm_op_kwargs={}, dropout_op=None,\n                              nonlin=nn.LeakyReLU, nonlin_kwargs={'inplace': True}, deep_supervision=True)\n    print(net.compute_conv_feature_map_size((512, 512)))  # -> 129793792\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/plan_and_preprocess_api.py",
    "content": "import shutil\nfrom typing import List, Type, Optional, Tuple, Union\n\nimport nnunetv2\nfrom batchgenerators.utilities.file_and_folder_operations import join, maybe_mkdir_p, subfiles, load_json\n\nfrom nnunetv2.experiment_planning.dataset_fingerprint.fingerprint_extractor import DatasetFingerprintExtractor\nfrom nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner\nfrom nnunetv2.experiment_planning.verify_dataset_integrity import verify_dataset_integrity\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\nfrom nnunetv2.utilities.dataset_name_id_conversion import convert_id_to_dataset_name, maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets\n\n\ndef extract_fingerprint_dataset(dataset_id: int,\n                                fingerprint_extractor_class: Type[\n                                    DatasetFingerprintExtractor] = DatasetFingerprintExtractor,\n                                num_processes: int = default_num_processes, check_dataset_integrity: bool = False,\n                                clean: bool = True, verbose: bool = True):\n    \"\"\"\n    Returns the fingerprint as a dictionary (additionally to saving it)\n    \"\"\"\n    dataset_name = convert_id_to_dataset_name(dataset_id)\n    print(dataset_name)\n\n    if check_dataset_integrity:\n        verify_dataset_integrity(join(nnUNet_raw, dataset_name), num_processes)\n\n    fpe = fingerprint_extractor_class(dataset_id, num_processes, verbose=verbose)\n    return fpe.run(overwrite_existing=clean)\n\n\ndef extract_fingerprints(dataset_ids: List[int], fingerprint_extractor_class_name: str = 'DatasetFingerprintExtractor',\n                         num_processes: int = default_num_processes, check_dataset_integrity: bool = False,\n                         clean: bool = True, verbose: bool = True):\n    \"\"\"\n    clean = False will not actually run this. This is just a switch for use with nnUNetv2_plan_and_preprocess where\n    we don't want to rerun fingerprint extraction every time.\n    \"\"\"\n    fingerprint_extractor_class = recursive_find_python_class(join(nnunetv2.__path__[0], \"experiment_planning\"),\n                                                              fingerprint_extractor_class_name,\n                                                              current_module=\"nnunetv2.experiment_planning\")\n    for d in dataset_ids:\n        extract_fingerprint_dataset(d, fingerprint_extractor_class, num_processes, check_dataset_integrity, clean,\n                                    verbose)\n\n\ndef plan_experiment_dataset(dataset_id: int,\n                            experiment_planner_class: Type[ExperimentPlanner] = ExperimentPlanner,\n                            gpu_memory_target_in_gb: float = 8, preprocess_class_name: str = 'DefaultPreprocessor',\n                            overwrite_target_spacing: Optional[Tuple[float, ...]] = None,\n                            overwrite_plans_name: Optional[str] = None) -> dict:\n    \"\"\"\n    overwrite_target_spacing ONLY applies to 3d_fullres and 3d_cascade fullres!\n    \"\"\"\n    kwargs = {}\n    if overwrite_plans_name is not None:\n        kwargs['plans_name'] = overwrite_plans_name\n    return experiment_planner_class(dataset_id,\n                                    gpu_memory_target_in_gb=gpu_memory_target_in_gb,\n                                    preprocessor_name=preprocess_class_name,\n                                    overwrite_target_spacing=[float(i) for i in overwrite_target_spacing] if\n                                    overwrite_target_spacing is not None else overwrite_target_spacing,\n                                    suppress_transpose=False,  # might expose this later,\n                                    **kwargs\n                                    ).plan_experiment()\n\n\ndef plan_experiments(dataset_ids: List[int], experiment_planner_class_name: str = 'ExperimentPlanner',\n                     gpu_memory_target_in_gb: float = 8, preprocess_class_name: str = 'DefaultPreprocessor',\n                     overwrite_target_spacing: Optional[Tuple[float, ...]] = None,\n                     overwrite_plans_name: Optional[str] = None):\n    \"\"\"\n    overwrite_target_spacing ONLY applies to 3d_fullres and 3d_cascade fullres!\n    \"\"\"\n    experiment_planner = recursive_find_python_class(join(nnunetv2.__path__[0], \"experiment_planning\"),\n                                                     experiment_planner_class_name,\n                                                     current_module=\"nnunetv2.experiment_planning\")\n    for d in dataset_ids:\n        plan_experiment_dataset(d, experiment_planner, gpu_memory_target_in_gb, preprocess_class_name,\n                                overwrite_target_spacing, overwrite_plans_name)\n\n\ndef preprocess_dataset(dataset_id: int,\n                       plans_identifier: str = 'nnUNetPlans',\n                       configurations: Union[Tuple[str], List[str]] = ('2d', '3d_fullres', '3d_lowres'),\n                       num_processes: Union[int, Tuple[int, ...], List[int]] = (8, 4, 8),\n                       verbose: bool = False) -> None:\n    if not isinstance(num_processes, list):\n        num_processes = list(num_processes)\n    if len(num_processes) == 1:\n        num_processes = num_processes * len(configurations)\n    if len(num_processes) != len(configurations):\n        raise RuntimeError(\n            f'The list provided with num_processes must either have len 1 or as many elements as there are '\n            f'configurations (see --help). Number of configurations: {len(configurations)}, length '\n            f'of num_processes: '\n            f'{len(num_processes)}')\n\n    dataset_name = convert_id_to_dataset_name(dataset_id)\n    print(f'Preprocessing dataset {dataset_name}')\n    plans_file = join(nnUNet_preprocessed, dataset_name, plans_identifier + '.json')\n    plans_manager = PlansManager(plans_file)\n    for n, c in zip(num_processes, configurations):\n        print(f'Configuration: {c}...')\n        if c not in plans_manager.available_configurations:\n            print(\n                f\"INFO: Configuration {c} not found in plans file {plans_identifier + '.json'} of \"\n                f\"dataset {dataset_name}. Skipping.\")\n            continue\n        configuration_manager = plans_manager.get_configuration(c)\n        preprocessor = configuration_manager.preprocessor_class(verbose=verbose)\n        preprocessor.run(dataset_id, c, plans_identifier, num_processes=n)\n\n    # copy the gt to a folder in the nnUNet_preprocessed so that we can do validation even if the raw data is no\n    # longer there (useful for compute cluster where only the preprocessed data is available)\n    from distutils.file_util import copy_file\n    maybe_mkdir_p(join(nnUNet_preprocessed, dataset_name, 'gt_segmentations'))\n    dataset_json = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))\n    dataset = get_filenames_of_train_images_and_targets(join(nnUNet_raw, dataset_name), dataset_json)\n    # only copy files that are newer than the ones already present\n    for k in dataset:\n        copy_file(dataset[k]['label'],\n                  join(nnUNet_preprocessed, dataset_name, 'gt_segmentations', k + dataset_json['file_ending']),\n                  update=True)\n\n\n\ndef preprocess(dataset_ids: List[int],\n               plans_identifier: str = 'nnUNetPlans',\n               configurations: Union[Tuple[str], List[str]] = ('2d', '3d_fullres', '3d_lowres'),\n               num_processes: Union[int, Tuple[int, ...], List[int]] = (8, 4, 8),\n               verbose: bool = False):\n    for d in dataset_ids:\n        preprocess_dataset(d, plans_identifier, configurations, num_processes, verbose)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/plan_and_preprocess_entrypoints.py",
    "content": "from nnunetv2.configuration import default_num_processes\nfrom nnunetv2.experiment_planning.plan_and_preprocess_api import extract_fingerprints, plan_experiments, preprocess\n\n\ndef extract_fingerprint_entry():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-d', nargs='+', type=int,\n                        help=\"[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment \"\n                             \"planning and preprocessing for these datasets. Can of course also be just one dataset\")\n    parser.add_argument('-fpe', type=str, required=False, default='DatasetFingerprintExtractor',\n                        help='[OPTIONAL] Name of the Dataset Fingerprint Extractor class that should be used. Default is '\n                             '\\'DatasetFingerprintExtractor\\'.')\n    parser.add_argument('-np', type=int, default=default_num_processes, required=False,\n                        help=f'[OPTIONAL] Number of processes used for fingerprint extraction. '\n                             f'Default: {default_num_processes}')\n    parser.add_argument(\"--verify_dataset_integrity\", required=False, default=False, action=\"store_true\",\n                        help=\"[RECOMMENDED] set this flag to check the dataset integrity. This is useful and should be done once for \"\n                             \"each dataset!\")\n    parser.add_argument(\"--clean\", required=False, default=False, action=\"store_true\",\n                        help='[OPTIONAL] Set this flag to overwrite existing fingerprints. If this flag is not set and a '\n                             'fingerprint already exists, the fingerprint extractor will not run.')\n    parser.add_argument('--verbose', required=False, action='store_true',\n                        help='Set this to print a lot of stuff. Useful for debugging. Will disable progress bar! '\n                             'Recommended for cluster environments')\n    args, unrecognized_args = parser.parse_known_args()\n    extract_fingerprints(args.d, args.fpe, args.np, args.verify_dataset_integrity, args.clean, args.verbose)\n\n\ndef plan_experiment_entry():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-d', nargs='+', type=int,\n                        help=\"[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment \"\n                             \"planning and preprocessing for these datasets. Can of course also be just one dataset\")\n    parser.add_argument('-pl', type=str, default='ExperimentPlanner', required=False,\n                        help='[OPTIONAL] Name of the Experiment Planner class that should be used. Default is '\n                             '\\'ExperimentPlanner\\'. Note: There is no longer a distinction between 2d and 3d planner. '\n                             'It\\'s an all in one solution now. Wuch. Such amazing.')\n    parser.add_argument('-gpu_memory_target', default=8, type=float, required=False,\n                        help='[OPTIONAL] DANGER ZONE! Sets a custom GPU memory target. Default: 8 [GB]. Changing this will '\n                             'affect patch and batch size and will '\n                             'definitely affect your models performance! Only use this if you really know what you '\n                             'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')\n    parser.add_argument('-preprocessor_name', default='DefaultPreprocessor', type=str, required=False,\n                        help='[OPTIONAL] DANGER ZONE! Sets a custom preprocessor class. This class must be located in '\n                             'nnunetv2.preprocessing. Default: \\'DefaultPreprocessor\\'. Changing this may affect your '\n                             'models performance! Only use this if you really know what you '\n                             'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')\n    parser.add_argument('-overwrite_target_spacing', default=None, nargs='+', required=False,\n                        help='[OPTIONAL] DANGER ZONE! Sets a custom target spacing for the 3d_fullres and 3d_cascade_fullres '\n                             'configurations. Default: None [no changes]. Changing this will affect image size and '\n                             'potentially patch and batch '\n                             'size. This will definitely affect your models performance! Only use this if you really '\n                             'know what you are doing and NEVER use this without running the default nnU-Net first '\n                             '(as a baseline). Changing the target spacing for the other configurations is currently '\n                             'not implemented. New target spacing must be a list of three numbers!')\n    parser.add_argument('-overwrite_plans_name', default=None, required=False,\n                        help='[OPTIONAL] DANGER ZONE! If you used -gpu_memory_target, -preprocessor_name or '\n                             '-overwrite_target_spacing it is best practice to use -overwrite_plans_name to generate a '\n                             'differently named plans file such that the nnunet default plans are not '\n                             'overwritten. You will then need to specify your custom plans file with -p whenever '\n                             'running other nnunet commands (training, inference etc)')\n    args, unrecognized_args = parser.parse_known_args()\n    plan_experiments(args.d, args.pl, args.gpu_memory_target, args.preprocessor_name, args.overwrite_target_spacing,\n                     args.overwrite_plans_name)\n\n\ndef preprocess_entry():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-d', nargs='+', type=int,\n                        help=\"[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment \"\n                             \"planning and preprocessing for these datasets. Can of course also be just one dataset\")\n    parser.add_argument('-plans_name', default='nnUNetPlans', required=False,\n                        help='[OPTIONAL] You can use this to specify a custom plans file that you may have generated')\n    parser.add_argument('-c', required=False, default=['2d', '3d_fullres', '3d_lowres'], nargs='+',\n                        help='[OPTIONAL] Configurations for which the preprocessing should be run. Default: 2d 3d_fullres '\n                             '3d_lowres. 3d_cascade_fullres does not need to be specified because it uses the data '\n                             'from 3d_fullres. Configurations that do not exist for some dataset will be skipped.')\n    parser.add_argument('-np', type=int, nargs='+', default=[8, 4, 8], required=False,\n                        help=\"[OPTIONAL] Use this to define how many processes are to be used. If this is just one number then \"\n                             \"this number of processes is used for all configurations specified with -c. If it's a \"\n                             \"list of numbers this list must have as many elements as there are configurations. We \"\n                             \"then iterate over zip(configs, num_processes) to determine then umber of processes \"\n                             \"used for each configuration. More processes is always faster (up to the number of \"\n                             \"threads your PC can support, so 8 for a 4 core CPU with hyperthreading. If you don't \"\n                             \"know what that is then dont touch it, or at least don't increase it!). DANGER: More \"\n                             \"often than not the number of processes that can be used is limited by the amount of \"\n                             \"RAM available. Image resampling takes up a lot of RAM. MONITOR RAM USAGE AND \"\n                             \"DECREASE -np IF YOUR RAM FILLS UP TOO MUCH!. Default: 8 processes for 2d, 4 \"\n                             \"for 3d_fullres, 8 for 3d_lowres and 4 for everything else\")\n    parser.add_argument('--verbose', required=False, action='store_true',\n                        help='Set this to print a lot of stuff. Useful for debugging. Will disable progress bar! '\n                             'Recommended for cluster environments')\n    args, unrecognized_args = parser.parse_known_args()\n    if args.np is None:\n        default_np = {\n            '2d': 4,\n            '3d_lowres': 8,\n            '3d_fullres': 4\n        }\n        np = {default_np[c] if c in default_np.keys() else 4 for c in args.c}\n    else:\n        np = args.np\n    preprocess(args.d, args.plans_name, configurations=args.c, num_processes=np, verbose=args.verbose)\n\n\ndef plan_and_preprocess_entry():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-d', nargs='+', type=int,\n                        help=\"[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment \"\n                             \"planning and preprocessing for these datasets. Can of course also be just one dataset\")\n    parser.add_argument('-fpe', type=str, required=False, default='DatasetFingerprintExtractor',\n                        help='[OPTIONAL] Name of the Dataset Fingerprint Extractor class that should be used. Default is '\n                             '\\'DatasetFingerprintExtractor\\'.')\n    parser.add_argument('-npfp', type=int, default=8, required=False,\n                        help='[OPTIONAL] Number of processes used for fingerprint extraction. Default: 8')\n    parser.add_argument(\"--verify_dataset_integrity\", required=False, default=False, action=\"store_true\",\n                        help=\"[RECOMMENDED] set this flag to check the dataset integrity. This is useful and should be done once for \"\n                             \"each dataset!\")\n    parser.add_argument('--no_pp', default=False, action='store_true', required=False,\n                        help='[OPTIONAL] Set this to only run fingerprint extraction and experiment planning (no '\n                             'preprocesing). Useful for debugging.')\n    parser.add_argument(\"--clean\", required=False, default=False, action=\"store_true\",\n                        help='[OPTIONAL] Set this flag to overwrite existing fingerprints. If this flag is not set and a '\n                             'fingerprint already exists, the fingerprint extractor will not run. REQUIRED IF YOU '\n                             'CHANGE THE DATASET FINGERPRINT EXTRACTOR OR MAKE CHANGES TO THE DATASET!')\n    parser.add_argument('-pl', type=str, default='ExperimentPlanner', required=False,\n                        help='[OPTIONAL] Name of the Experiment Planner class that should be used. Default is '\n                             '\\'ExperimentPlanner\\'. Note: There is no longer a distinction between 2d and 3d planner. '\n                             'It\\'s an all in one solution now. Wuch. Such amazing.')\n    parser.add_argument('-gpu_memory_target', default=8, type=int, required=False,\n                        help='[OPTIONAL] DANGER ZONE! Sets a custom GPU memory target. Default: 8 [GB]. Changing this will '\n                             'affect patch and batch size and will '\n                             'definitely affect your models performance! Only use this if you really know what you '\n                             'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')\n    parser.add_argument('-preprocessor_name', default='DefaultPreprocessor', type=str, required=False,\n                        help='[OPTIONAL] DANGER ZONE! Sets a custom preprocessor class. This class must be located in '\n                             'nnunetv2.preprocessing. Default: \\'DefaultPreprocessor\\'. Changing this may affect your '\n                             'models performance! Only use this if you really know what you '\n                             'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')\n    parser.add_argument('-overwrite_target_spacing', default=None, nargs='+', required=False,\n                        help='[OPTIONAL] DANGER ZONE! Sets a custom target spacing for the 3d_fullres and 3d_cascade_fullres '\n                             'configurations. Default: None [no changes]. Changing this will affect image size and '\n                             'potentially patch and batch '\n                             'size. This will definitely affect your models performance! Only use this if you really '\n                             'know what you are doing and NEVER use this without running the default nnU-Net first '\n                             '(as a baseline). Changing the target spacing for the other configurations is currently '\n                             'not implemented. New target spacing must be a list of three numbers!')\n    parser.add_argument('-overwrite_plans_name', default='nnUNetPlans', required=False,\n                        help='[OPTIONAL] uSE A CUSTOM PLANS IDENTIFIER. If you used -gpu_memory_target, '\n                             '-preprocessor_name or '\n                             '-overwrite_target_spacing it is best practice to use -overwrite_plans_name to generate a '\n                             'differently named plans file such that the nnunet default plans are not '\n                             'overwritten. You will then need to specify your custom plans file with -p whenever '\n                             'running other nnunet commands (training, inference etc)')\n    parser.add_argument('-c', required=False, default=['2d', '3d_fullres', '3d_lowres'], nargs='+',\n                        help='[OPTIONAL] Configurations for which the preprocessing should be run. Default: 2d 3d_fullres '\n                             '3d_lowres. 3d_cascade_fullres does not need to be specified because it uses the data '\n                             'from 3d_fullres. Configurations that do not exist for some dataset will be skipped.')\n    parser.add_argument('-np', type=int, nargs='+', default=None, required=False,\n                        help=\"[OPTIONAL] Use this to define how many processes are to be used. If this is just one number then \"\n                             \"this number of processes is used for all configurations specified with -c. If it's a \"\n                             \"list of numbers this list must have as many elements as there are configurations. We \"\n                             \"then iterate over zip(configs, num_processes) to determine then umber of processes \"\n                             \"used for each configuration. More processes is always faster (up to the number of \"\n                             \"threads your PC can support, so 8 for a 4 core CPU with hyperthreading. If you don't \"\n                             \"know what that is then dont touch it, or at least don't increase it!). DANGER: More \"\n                             \"often than not the number of processes that can be used is limited by the amount of \"\n                             \"RAM available. Image resampling takes up a lot of RAM. MONITOR RAM USAGE AND \"\n                             \"DECREASE -np IF YOUR RAM FILLS UP TOO MUCH!. Default: 8 processes for 2d, 4 \"\n                             \"for 3d_fullres, 8 for 3d_lowres and 4 for everything else\")\n    parser.add_argument('--verbose', required=False, action='store_true',\n                        help='Set this to print a lot of stuff. Useful for debugging. Will disable progress bar! '\n                             'Recommended for cluster environments')\n    args = parser.parse_args()\n\n    # fingerprint extraction\n    print(\"Fingerprint extraction...\")\n    extract_fingerprints(args.d, args.fpe, args.npfp, args.verify_dataset_integrity, args.clean, args.verbose)\n\n    # experiment planning\n    print('Experiment planning...')\n    plan_experiments(args.d, args.pl, args.gpu_memory_target, args.preprocessor_name, args.overwrite_target_spacing, args.overwrite_plans_name)\n\n    # manage default np\n    if args.np is None:\n        default_np = {\"2d\": 8, \"3d_fullres\": 4, \"3d_lowres\": 8}\n        np = [default_np[c] if c in default_np.keys() else 4 for c in args.c]\n    else:\n        np = args.np\n    # preprocessing\n    if not args.no_pp:\n        print('Preprocessing...')\n        preprocess(args.d, args.overwrite_plans_name, args.c, np, args.verbose)\n\n\nif __name__ == '__main__':\n    plan_and_preprocess_entry()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/plans_for_pretraining/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/plans_for_pretraining/move_plans_between_datasets.py",
    "content": "import argparse\nfrom typing import Union\n\nfrom batchgenerators.utilities.file_and_folder_operations import join, isdir, isfile, load_json, subfiles, save_json\n\nfrom nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json\nfrom nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw\nfrom nnunetv2.utilities.file_path_utilities import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\nfrom nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets\n\n\ndef move_plans_between_datasets(\n        source_dataset_name_or_id: Union[int, str],\n        target_dataset_name_or_id: Union[int, str],\n        source_plans_identifier: str,\n        target_plans_identifier: str = None):\n    source_dataset_name = maybe_convert_to_dataset_name(source_dataset_name_or_id)\n    target_dataset_name = maybe_convert_to_dataset_name(target_dataset_name_or_id)\n\n    if target_plans_identifier is None:\n        target_plans_identifier = source_plans_identifier\n\n    source_folder = join(nnUNet_preprocessed, source_dataset_name)\n    assert isdir(source_folder), f\"Cannot move plans because preprocessed directory of source dataset is missing. \" \\\n                                 f\"Run nnUNetv2_plan_and_preprocess for source dataset first!\"\n\n    source_plans_file = join(source_folder, source_plans_identifier + '.json')\n    assert isfile(source_plans_file), f\"Source plans are missing. Run the corresponding experiment planning first! \" \\\n                                      f\"Expected file: {source_plans_file}\"\n\n    source_plans = load_json(source_plans_file)\n    source_plans['dataset_name'] = target_dataset_name\n\n    # we need to change data_identifier to use target_plans_identifier\n    if target_plans_identifier != source_plans_identifier:\n        for c in source_plans['configurations'].keys():\n            if 'data_identifier' in source_plans['configurations'][c].keys():\n                old_identifier = source_plans['configurations'][c][\"data_identifier\"]\n                if old_identifier.startswith(source_plans_identifier):\n                    new_identifier = target_plans_identifier + old_identifier[len(source_plans_identifier):]\n                else:\n                    new_identifier = target_plans_identifier + '_' + old_identifier\n                source_plans['configurations'][c][\"data_identifier\"] = new_identifier\n\n    # we need to change the reader writer class!\n    target_raw_data_dir = join(nnUNet_raw, target_dataset_name)\n    target_dataset_json = load_json(join(target_raw_data_dir, 'dataset.json'))\n\n    # we may need to change the reader/writer\n    # pick any file from the source dataset\n    dataset = get_filenames_of_train_images_and_targets(target_raw_data_dir, target_dataset_json)\n    example_image = dataset[dataset.keys().__iter__().__next__()]['images'][0]\n    rw = determine_reader_writer_from_dataset_json(target_dataset_json, example_image, allow_nonmatching_filename=True,\n                                                   verbose=False)\n\n    source_plans[\"image_reader_writer\"] = rw.__name__\n    if target_plans_identifier is not None:\n        source_plans[\"plans_name\"] = target_plans_identifier\n\n    save_json(source_plans, join(nnUNet_preprocessed, target_dataset_name, target_plans_identifier + '.json'),\n              sort_keys=False)\n\n\ndef entry_point_move_plans_between_datasets():\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-s', type=str, required=True,\n                        help='Source dataset name or id')\n    parser.add_argument('-t', type=str, required=True,\n                        help='Target dataset name or id')\n    parser.add_argument('-sp', type=str, required=True,\n                        help='Source plans identifier. If your plans are named \"nnUNetPlans.json\" then the '\n                             'identifier would be nnUNetPlans')\n    parser.add_argument('-tp', type=str, required=False, default=None,\n                        help='Target plans identifier. Default is None meaning the source plans identifier will '\n                             'be kept. Not recommended if the source plans identifier is a default nnU-Net identifier '\n                             'such as nnUNetPlans!!!')\n    args = parser.parse_args()\n    move_plans_between_datasets(args.s, args.t, args.sp, args.tp)\n\n\nif __name__ == '__main__':\n    move_plans_between_datasets(2, 4, 'nnUNetPlans', 'nnUNetPlansFrom2')\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/experiment_planning/verify_dataset_integrity.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nimport multiprocessing\nimport re\nfrom multiprocessing import Pool\nfrom typing import Type\n\nimport numpy as np\nimport pandas as pd\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json\nfrom nnunetv2.paths import nnUNet_raw\nfrom nnunetv2.utilities.label_handling.label_handling import LabelManager\nfrom nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \\\n    get_filenames_of_train_images_and_targets\n\n\ndef verify_labels(label_file: str, readerclass: Type[BaseReaderWriter], expected_labels: List[int]) -> bool:\n    rw = readerclass()\n    seg, properties = rw.read_seg(label_file)\n    found_labels = np.sort(pd.unique(seg.ravel()))  # np.unique(seg)\n    unexpected_labels = [i for i in found_labels if i not in expected_labels]\n    if len(found_labels) == 0 and found_labels[0] == 0:\n        print('WARNING: File %s only has label 0 (which should be background). This may be intentional or not, '\n              'up to you.' % label_file)\n    if len(unexpected_labels) > 0:\n        print(\"Error: Unexpected labels found in file %s.\\nExpected: %s\\nFound: %s\" % (label_file, expected_labels,\n                                                                                       found_labels))\n        return False\n    return True\n\n\ndef check_cases(image_files: List[str], label_file: str, expected_num_channels: int,\n                readerclass: Type[BaseReaderWriter]) -> bool:\n    rw = readerclass()\n    ret = True\n\n    images, properties_image = rw.read_images(image_files)\n    segmentation, properties_seg = rw.read_seg(label_file)\n\n    # check for nans\n    if np.any(np.isnan(images)):\n        print(f'Images contain NaN pixel values. You need to fix that by '\n              f'replacing NaN values with something that makes sense for your images!\\nImages:\\n{image_files}')\n        ret = False\n    if np.any(np.isnan(segmentation)):\n        print(f'Segmentation contains NaN pixel values. You need to fix that.\\nSegmentation:\\n{label_file}')\n        ret = False\n\n    # check shapes\n    shape_image = images.shape[1:]\n    shape_seg = segmentation.shape[1:]\n    if shape_image != shape_seg:\n        print('Error: Shape mismatch between segmentation and corresponding images. \\nShape images: %s. '\n              '\\nShape seg: %s. \\nImage files: %s. \\nSeg file: %s\\n' %\n              (shape_image, shape_seg, image_files, label_file))\n        ret = False\n\n    # check spacings\n    spacing_images = properties_image['spacing']\n    spacing_seg = properties_seg['spacing']\n    if not np.allclose(spacing_seg, spacing_images):\n        print('Error: Spacing mismatch between segmentation and corresponding images. \\nSpacing images: %s. '\n              '\\nSpacing seg: %s. \\nImage files: %s. \\nSeg file: %s\\n' %\n              (shape_image, shape_seg, image_files, label_file))\n        ret = False\n\n    # check modalities\n    if not len(images) == expected_num_channels:\n        print('Error: Unexpected number of modalities. \\nExpected: %d. \\nGot: %d. \\nImages: %s\\n'\n              % (expected_num_channels, len(images), image_files))\n        ret = False\n\n    # nibabel checks\n    if 'nibabel_stuff' in properties_image.keys():\n        # this image was read with NibabelIO\n        affine_image = properties_image['nibabel_stuff']['original_affine']\n        affine_seg = properties_seg['nibabel_stuff']['original_affine']\n        if not np.allclose(affine_image, affine_seg):\n            print('WARNING: Affine is not the same for image and seg! \\nAffine image: %s \\nAffine seg: %s\\n'\n                  'Image files: %s. \\nSeg file: %s.\\nThis can be a problem but doesn\\'t have to be. Please run '\n                  'nnUNet_plot_dataset_pngs to verify if everything is OK!\\n'\n                  % (affine_image, affine_seg, image_files, label_file))\n\n    # sitk checks\n    if 'sitk_stuff' in properties_image.keys():\n        # this image was read with SimpleITKIO\n        # spacing has already been checked, only check direction and origin\n        origin_image = properties_image['sitk_stuff']['origin']\n        origin_seg = properties_seg['sitk_stuff']['origin']\n        if not np.allclose(origin_image, origin_seg):\n            print('Warning: Origin mismatch between segmentation and corresponding images. \\nOrigin images: %s. '\n                  '\\nOrigin seg: %s. \\nImage files: %s. \\nSeg file: %s\\n' %\n                  (origin_image, origin_seg, image_files, label_file))\n        direction_image = properties_image['sitk_stuff']['direction']\n        direction_seg = properties_seg['sitk_stuff']['direction']\n        if not np.allclose(direction_image, direction_seg):\n            print('Warning: Direction mismatch between segmentation and corresponding images. \\nDirection images: %s. '\n                  '\\nDirection seg: %s. \\nImage files: %s. \\nSeg file: %s\\n' %\n                  (direction_image, direction_seg, image_files, label_file))\n\n    return ret\n\n\ndef verify_dataset_integrity(folder: str, num_processes: int = 8) -> None:\n    \"\"\"\n    folder needs the imagesTr, imagesTs and labelsTr subfolders. There also needs to be a dataset.json\n    checks if the expected number of training cases and labels are present\n    for each case, if possible, checks whether the pixel grids are aligned\n    checks whether the labels really only contain values they should\n    :param folder:\n    :return:\n    \"\"\"\n    assert isfile(join(folder, \"dataset.json\")), f\"There needs to be a dataset.json file in folder, folder={folder}\"\n    dataset_json = load_json(join(folder, \"dataset.json\"))\n\n    if not 'dataset' in dataset_json.keys():\n        assert isdir(join(folder, \"imagesTr\")), f\"There needs to be a imagesTr subfolder in folder, folder={folder}\"\n        assert isdir(join(folder, \"labelsTr\")), f\"There needs to be a labelsTr subfolder in folder, folder={folder}\"\n\n    # make sure all required keys are there\n    dataset_keys = list(dataset_json.keys())\n    required_keys = ['labels', \"channel_names\", \"numTraining\", \"file_ending\"]\n    assert all([i in dataset_keys for i in required_keys]), 'not all required keys are present in dataset.json.' \\\n                                                            '\\n\\nRequired: \\n%s\\n\\nPresent: \\n%s\\n\\nMissing: ' \\\n                                                            '\\n%s\\n\\nUnused by nnU-Net:\\n%s' % \\\n                                                            (str(required_keys),\n                                                             str(dataset_keys),\n                                                             str([i for i in required_keys if i not in dataset_keys]),\n                                                             str([i for i in dataset_keys if i not in required_keys]))\n\n    expected_num_training = dataset_json['numTraining']\n    num_modalities = len(dataset_json['channel_names'].keys()\n                         if 'channel_names' in dataset_json.keys()\n                         else dataset_json['modality'].keys())\n    file_ending = dataset_json['file_ending']\n\n    dataset = get_filenames_of_train_images_and_targets(folder, dataset_json)\n\n    # check if the right number of training cases is present\n    assert len(dataset) == expected_num_training, 'Did not find the expected number of training cases ' \\\n                                                               '(%d). Found %d instead.\\nExamples: %s' % \\\n                                                               (expected_num_training, len(dataset),\n                                                                list(dataset.keys())[:5])\n\n    # check if corresponding labels are present\n    if 'dataset' in dataset_json.keys():\n        # just check if everything is there\n        ok = True\n        missing_images = []\n        missing_labels = []\n        for k in dataset:\n            for i in dataset[k]['images']:\n                if not isfile(i):\n                    missing_images.append(i)\n                    ok = False\n            if not isfile(dataset[k]['label']):\n                missing_labels.append(dataset[k]['label'])\n                ok = False\n        if not ok:\n            raise FileNotFoundError(f\"Some expected files were missing. Make sure you are properly referencing them \"\n                                    f\"in the dataset.json. Or use imagesTr & labelsTr folders!\\nMissing images:\"\n                                    f\"\\n{missing_images}\\n\\nMissing labels:\\n{missing_labels}\")\n    else:\n        # old code that uses imagestr and labelstr folders\n        labelfiles = subfiles(join(folder, 'labelsTr'), suffix=file_ending, join=False)\n        label_identifiers = [i[:-len(file_ending)] for i in labelfiles]\n        labels_present = [i in label_identifiers for i in dataset.keys()]\n        missing = [i for j, i in enumerate(dataset.keys()) if not labels_present[j]]\n        assert all(labels_present), f'not all training cases have a label file in labelsTr. Fix that. Missing: {missing}'\n\n    labelfiles = [v['label'] for v in dataset.values()]\n    image_files = [v['images'] for v in dataset.values()]\n\n    # no plans exist yet, so we can't use PlansManager and gotta roll with the default. It's unlikely to cause\n    # problems anyway\n    label_manager = LabelManager(dataset_json['labels'], regions_class_order=dataset_json.get('regions_class_order'))\n    expected_labels = label_manager.all_labels\n    if label_manager.has_ignore_label:\n        expected_labels.append(label_manager.ignore_label)\n    labels_valid_consecutive = np.ediff1d(expected_labels) == 1\n    assert all(\n        labels_valid_consecutive), f'Labels must be in consecutive order (0, 1, 2, ...). The labels {np.array(expected_labels)[1:][~labels_valid_consecutive]} do not satisfy this restriction'\n\n    # determine reader/writer class\n    reader_writer_class = determine_reader_writer_from_dataset_json(dataset_json, dataset[dataset.keys().__iter__().__next__()]['images'][0])\n\n    # check whether only the desired labels are present\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        result = p.starmap(\n            verify_labels,\n            zip([join(folder, 'labelsTr', i) for i in labelfiles], [reader_writer_class] * len(labelfiles),\n                [expected_labels] * len(labelfiles))\n        )\n        if not all(result):\n            raise RuntimeError(\n                'Some segmentation images contained unexpected labels. Please check text output above to see which one(s).')\n\n        # check whether shapes and spacings match between images and labels\n        result = p.starmap(\n            check_cases,\n            zip(image_files, labelfiles, [num_modalities] * expected_num_training,\n                [reader_writer_class] * expected_num_training)\n        )\n        if not all(result):\n            raise RuntimeError(\n                'Some images have errors. Please check text output above to see which one(s) and what\\'s going on.')\n\n    # check for nans\n    # check all same orientation nibabel\n    print('\\n####################')\n    print('verify_dataset_integrity Done. \\nIf you didn\\'t see any error messages then your dataset is most likely OK!')\n    print('####################\\n')\n\n\nif __name__ == \"__main__\":\n    # investigate geometry issues\n    example_folder = join(nnUNet_raw, 'Dataset250_COMPUTING_it0')\n    num_processes = 6\n    verify_dataset_integrity(example_folder, num_processes)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/base_reader_writer.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\n\nfrom abc import ABC, abstractmethod\nfrom typing import Tuple, Union, List\nimport numpy as np\n\n\nclass BaseReaderWriter(ABC):\n    @staticmethod\n    def _check_all_same(input_list):\n        # compare all entries to the first\n        for i in input_list[1:]:\n            if i != input_list[0]:\n                return False\n        return True\n\n    @staticmethod\n    def _check_all_same_array(input_list):\n        # compare all entries to the first\n        for i in input_list[1:]:\n            if i.shape != input_list[0].shape or not np.allclose(i, input_list[0]):\n                return False\n        return True\n\n    @abstractmethod\n    def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:\n        \"\"\"\n        Reads a sequence of images and returns a 4d (!) np.ndarray along with a dictionary. The 4d array must have the\n        modalities (or color channels, or however you would like to call them) in its first axis, followed by the\n        spatial dimensions (so shape must be c,x,y,z where c is the number of modalities (can be 1)).\n        Use the dictionary to store necessary meta information that is lost when converting to numpy arrays, for\n        example the Spacing, Orientation and Direction of the image. This dictionary will be handed over to write_seg\n        for exporting the predicted segmentations, so make sure you have everything you need in there!\n\n        IMPORTANT: dict MUST have a 'spacing' key with a tuple/list of length 3 with the voxel spacing of the np.ndarray.\n        Example: my_dict = {'spacing': (3, 0.5, 0.5), ...}. This is needed for planning and\n        preprocessing. The ordering of the numbers must correspond to the axis ordering in the returned numpy array. So\n        if the array has shape c,x,y,z and the spacing is (a,b,c) then a must be the spacing of x, b the spacing of y\n        and c the spacing of z.\n\n        In the case of 2D images, the returned array should have shape (c, 1, x, y) and the spacing should be\n        (999, sp_x, sp_y). Make sure 999 is larger than sp_x and sp_y! Example: shape=(3, 1, 224, 224),\n        spacing=(999, 1, 1)\n\n        For images that don't have a spacing, set the spacing to 1 (2d exception with 999 for the first axis still applies!)\n\n        :param image_fnames:\n        :return:\n            1) a np.ndarray of shape (c, x, y, z) where c is the number of image channels (can be 1) and x, y, z are\n            the spatial dimensions (set x=1 for 2D! Example: (3, 1, 224, 224) for RGB image).\n            2) a dictionary with metadata. This can be anything. BUT it HAS to include a {'spacing': (a, b, c)} where a\n            is the spacing of x, b of y and c of z! If an image doesn't have spacing, just set this to 1. For 2D, set\n            a=999 (largest spacing value! Make it larger than b and c)\n\n        \"\"\"\n        pass\n\n    @abstractmethod\n    def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:\n        \"\"\"\n        Same requirements as BaseReaderWriter.read_image. Returned segmentations must have shape 1,x,y,z. Multiple\n        segmentations are not (yet?) allowed\n\n        If images and segmentations can be read the same way you can just `return self.read_image((image_fname,))`\n        :param seg_fname:\n        :return:\n            1) a np.ndarray of shape (1, x, y, z) where x, y, z are\n            the spatial dimensions (set x=1 for 2D! Example: (1, 1, 224, 224) for 2D segmentation).\n            2) a dictionary with metadata. This can be anything. BUT it HAS to include a {'spacing': (a, b, c)} where a\n            is the spacing of x, b of y and c of z! If an image doesn't have spacing, just set this to 1. For 2D, set\n            a=999 (largest spacing value! Make it larger than b and c)\n        \"\"\"\n        pass\n\n    @abstractmethod\n    def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:\n        \"\"\"\n        Export the predicted segmentation to the desired file format. The given seg array will have the same shape and\n        orientation as the corresponding image data, so you don't need to do any resampling or whatever. Just save :-)\n\n        properties is the same dictionary you created during read_images/read_seg so you can use the information here\n        to restore metadata\n\n        IMPORTANT: Segmentations are always 3D! If your input images were 2d then the segmentation will have shape\n        1,x,y. You need to catch that and export accordingly (for 2d images you need to convert the 3d segmentation\n        to 2d via seg = seg[0])!\n\n        :param seg: A segmentation (np.ndarray, integer) of shape (x, y, z). For 2D segmentations this will be (1, y, z)!\n        :param output_fname:\n        :param properties: the dictionary that you created in read_images (the ones this segmentation is based on).\n        Use this to restore metadata\n        :return:\n        \"\"\"\n        pass"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/natural_image_reader_writer.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\n\nfrom typing import Tuple, Union, List\nimport numpy as np\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom skimage import io\n\n\nclass NaturalImage2DIO(BaseReaderWriter):\n    \"\"\"\n    ONLY SUPPORTS 2D IMAGES!!!\n    \"\"\"\n\n    # there are surely more we could add here. Everything that can be read by skimage.io should be supported\n    supported_file_endings = [\n        '.png',\n        # '.jpg',\n        # '.jpeg', # jpg not supported because we cannot allow lossy compression! segmentation maps!\n        '.bmp',\n        '.tif'\n    ]\n\n    def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:\n        images = []\n        for f in image_fnames:\n            npy_img = io.imread(f)\n            if npy_img.ndim == 3:\n                # rgb image, last dimension should be the color channel and the size of that channel should be 3\n                # (or 4 if we have alpha)\n                assert npy_img.shape[-1] == 3 or npy_img.shape[-1] == 4, \"If image has three dimensions then the last \" \\\n                                                                         \"dimension must have shape 3 or 4 \" \\\n                                                                         f\"(RGB or RGBA). Image shape here is {npy_img.shape}\"\n                # move RGB(A) to front, add additional dim so that we have shape (1, c, X, Y), where c is either 3 or 4\n                images.append(npy_img.transpose((2, 0, 1))[:, None])\n            elif npy_img.ndim == 2:\n                # grayscale image\n                images.append(npy_img[None, None])\n\n        if not self._check_all_same([i.shape for i in images]):\n            print('ERROR! Not all input images have the same shape!')\n            print('Shapes:')\n            print([i.shape for i in images])\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n        return np.vstack(images).astype(np.float32), {'spacing': (999, 1, 1)}\n\n    def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:\n        return self.read_images((seg_fname, ))\n\n    def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:\n        io.imsave(output_fname, seg[0].astype(np.uint8), check_contrast=False)\n\n\nif __name__ == '__main__':\n    images = ('/media/fabian/data/nnUNet_raw/Dataset120_RoadSegmentation/imagesTr/img-11_0000.png',)\n    segmentation = '/media/fabian/data/nnUNet_raw/Dataset120_RoadSegmentation/labelsTr/img-11.png'\n    imgio = NaturalImage2DIO()\n    img, props = imgio.read_images(images)\n    seg, segprops = imgio.read_seg(segmentation)"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/nibabel_reader_writer.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\n\nfrom typing import Tuple, Union, List\nimport numpy as np\nfrom nibabel import io_orientation\n\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nimport nibabel\n\n\nclass NibabelIO(BaseReaderWriter):\n    \"\"\"\n    Nibabel loads the images in a different order than sitk. We convert the axes to the sitk order to be\n    consistent. This is of course considered properly in segmentation export as well.\n\n    IMPORTANT: Run nnUNet_plot_dataset_pngs to verify that this did not destroy the alignment of data and seg!\n    \"\"\"\n    supported_file_endings = [\n        '.nii.gz',\n        '.nrrd',\n        '.mha'\n    ]\n\n    def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:\n        images = []\n        original_affines = []\n\n        spacings_for_nnunet = []\n        for f in image_fnames:\n            nib_image = nibabel.load(f)\n            assert nib_image.ndim == 3, 'only 3d images are supported by NibabelIO'\n            original_affine = nib_image.affine\n\n            original_affines.append(original_affine)\n\n            # spacing is taken in reverse order to be consistent with SimpleITK axis ordering (confusing, I know...)\n            spacings_for_nnunet.append(\n                    [float(i) for i in nib_image.header.get_zooms()[::-1]]\n            )\n\n            # transpose image to be consistent with the way SimpleITk reads images. Yeah. Annoying.\n            images.append(nib_image.get_fdata().transpose((2, 1, 0))[None])\n\n        if not self._check_all_same([i.shape for i in images]):\n            print('ERROR! Not all input images have the same shape!')\n            print('Shapes:')\n            print([i.shape for i in images])\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n        if not self._check_all_same_array(original_affines):\n            print('WARNING! Not all input images have the same original_affines!')\n            print('Affines:')\n            print(original_affines)\n            print('Image files:')\n            print(image_fnames)\n            print('It is up to you to decide whether that\\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '\n                  'that segmentations and data overlap.')\n        if not self._check_all_same(spacings_for_nnunet):\n            print('ERROR! Not all input images have the same spacing_for_nnunet! This might be caused by them not '\n                  'having the same affine')\n            print('spacings_for_nnunet:')\n            print(spacings_for_nnunet)\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n\n        stacked_images = np.vstack(images)\n        dict = {\n            'nibabel_stuff': {\n                'original_affine': original_affines[0],\n            },\n            'spacing': spacings_for_nnunet[0]\n        }\n        return stacked_images.astype(np.float32), dict\n\n    def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:\n        return self.read_images((seg_fname, ))\n\n    def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:\n        # revert transpose\n        seg = seg.transpose((2, 1, 0)).astype(np.uint8)\n        seg_nib = nibabel.Nifti1Image(seg, affine=properties['nibabel_stuff']['original_affine'])\n        nibabel.save(seg_nib, output_fname)\n\n\nclass NibabelIOWithReorient(BaseReaderWriter):\n    \"\"\"\n    Reorients images to RAS\n\n    Nibabel loads the images in a different order than sitk. We convert the axes to the sitk order to be\n    consistent. This is of course considered properly in segmentation export as well.\n\n    IMPORTANT: Run nnUNet_plot_dataset_pngs to verify that this did not destroy the alignment of data and seg!\n    \"\"\"\n    supported_file_endings = [\n        '.nii.gz',\n        '.nrrd',\n        '.mha'\n    ]\n\n    def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:\n        images = []\n        original_affines = []\n        reoriented_affines = []\n\n        spacings_for_nnunet = []\n        for f in image_fnames:\n            nib_image = nibabel.load(f)\n            assert nib_image.ndim == 3, 'only 3d images are supported by NibabelIO'\n            original_affine = nib_image.affine\n            reoriented_image = nib_image.as_reoriented(io_orientation(original_affine))\n            reoriented_affine = reoriented_image.affine\n\n            original_affines.append(original_affine)\n            reoriented_affines.append(reoriented_affine)\n\n            # spacing is taken in reverse order to be consistent with SimpleITK axis ordering (confusing, I know...)\n            spacings_for_nnunet.append(\n                    [float(i) for i in reoriented_image.header.get_zooms()[::-1]]\n            )\n\n            # transpose image to be consistent with the way SimpleITk reads images. Yeah. Annoying.\n            images.append(reoriented_image.get_fdata().transpose((2, 1, 0))[None])\n\n        if not self._check_all_same([i.shape for i in images]):\n            print('ERROR! Not all input images have the same shape!')\n            print('Shapes:')\n            print([i.shape for i in images])\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n        if not self._check_all_same_array(reoriented_affines):\n            print('WARNING! Not all input images have the same reoriented_affines!')\n            print('Affines:')\n            print(reoriented_affines)\n            print('Image files:')\n            print(image_fnames)\n            print('It is up to you to decide whether that\\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '\n                  'that segmentations and data overlap.')\n        if not self._check_all_same(spacings_for_nnunet):\n            print('ERROR! Not all input images have the same spacing_for_nnunet! This might be caused by them not '\n                  'having the same affine')\n            print('spacings_for_nnunet:')\n            print(spacings_for_nnunet)\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n\n        stacked_images = np.vstack(images)\n        dict = {\n            'nibabel_stuff': {\n                'original_affine': original_affines[0],\n                'reoriented_affine': reoriented_affines[0],\n            },\n            'spacing': spacings_for_nnunet[0]\n        }\n        return stacked_images.astype(np.float32), dict\n\n    def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:\n        return self.read_images((seg_fname, ))\n\n    def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:\n        # revert transpose\n        seg = seg.transpose((2, 1, 0)).astype(np.uint8)\n\n        seg_nib = nibabel.Nifti1Image(seg, affine=properties['nibabel_stuff']['reoriented_affine'])\n        seg_nib_reoriented = seg_nib.as_reoriented(io_orientation(properties['nibabel_stuff']['original_affine']))\n        assert np.allclose(properties['nibabel_stuff']['original_affine'], seg_nib_reoriented.affine), \\\n            'restored affine does not match original affine'\n        nibabel.save(seg_nib_reoriented, output_fname)\n\n\nif __name__ == '__main__':\n    img_file = 'patient028_frame01_0000.nii.gz'\n    seg_file = 'patient028_frame01.nii.gz'\n\n    nibio = NibabelIO()\n    images, dct = nibio.read_images([img_file])\n    seg, dctseg = nibio.read_seg(seg_file)\n\n    nibio_r = NibabelIOWithReorient()\n    images_r, dct_r = nibio_r.read_images([img_file])\n    seg_r, dctseg_r = nibio_r.read_seg(seg_file)\n\n    nibio.write_seg(seg[0], '/home/isensee/seg_nibio.nii.gz', dctseg)\n    nibio_r.write_seg(seg_r[0], '/home/isensee/seg_nibio_r.nii.gz', dctseg_r)\n\n    s_orig = nibabel.load(seg_file).get_fdata()\n    s_nibio = nibabel.load('/home/isensee/seg_nibio.nii.gz').get_fdata()\n    s_nibio_r = nibabel.load('/home/isensee/seg_nibio_r.nii.gz').get_fdata()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/reader_writer_registry.py",
    "content": "import traceback\nfrom typing import Type\n\nfrom batchgenerators.utilities.file_and_folder_operations import join\n\nimport nnunetv2\nfrom nnunetv2.imageio.natural_image_reader_writer import NaturalImage2DIO\nfrom nnunetv2.imageio.nibabel_reader_writer import NibabelIO, NibabelIOWithReorient\nfrom nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO\nfrom nnunetv2.imageio.tif_reader_writer import Tiff3DIO\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\n\nLIST_OF_IO_CLASSES = [\n    NaturalImage2DIO,\n    SimpleITKIO,\n    Tiff3DIO,\n    NibabelIO,\n    NibabelIOWithReorient\n]\n\n\ndef determine_reader_writer_from_dataset_json(dataset_json_content: dict, example_file: str = None,\n                                              allow_nonmatching_filename: bool = False, verbose: bool = True\n                                              ) -> Type[BaseReaderWriter]:\n    if 'overwrite_image_reader_writer' in dataset_json_content.keys() and \\\n            dataset_json_content['overwrite_image_reader_writer'] != 'None':\n        ioclass_name = dataset_json_content['overwrite_image_reader_writer']\n        # trying to find that class in the nnunetv2.imageio module\n        try:\n            ret = recursive_find_reader_writer_by_name(ioclass_name)\n            if verbose: print(f'Using {ret} reader/writer')\n            return ret\n        except RuntimeError:\n            if verbose: print(f'Warning: Unable to find ioclass specified in dataset.json: {ioclass_name}')\n            if verbose: print('Trying to automatically determine desired class')\n    return determine_reader_writer_from_file_ending(dataset_json_content['file_ending'], example_file,\n                                                    allow_nonmatching_filename, verbose)\n\n\ndef determine_reader_writer_from_file_ending(file_ending: str, example_file: str = None, allow_nonmatching_filename: bool = False,\n                                             verbose: bool = True):\n    for rw in LIST_OF_IO_CLASSES:\n        if file_ending.lower() in rw.supported_file_endings:\n            if example_file is not None:\n                # if an example file is provided, try if we can actually read it. If not move on to the next reader\n                try:\n                    tmp = rw()\n                    _ = tmp.read_images((example_file,))\n                    if verbose: print(f'Using {rw} as reader/writer')\n                    return rw\n                except:\n                    if verbose: print(f'Failed to open file {example_file} with reader {rw}:')\n                    traceback.print_exc()\n                    pass\n            else:\n                if verbose: print(f'Using {rw} as reader/writer')\n                return rw\n        else:\n            if allow_nonmatching_filename and example_file is not None:\n                try:\n                    tmp = rw()\n                    _ = tmp.read_images((example_file,))\n                    if verbose: print(f'Using {rw} as reader/writer')\n                    return rw\n                except:\n                    if verbose: print(f'Failed to open file {example_file} with reader {rw}:')\n                    if verbose: traceback.print_exc()\n                    pass\n    raise RuntimeError(f\"Unable to determine a reader for file ending {file_ending} and file {example_file} (file None means no file provided).\")\n\n\ndef recursive_find_reader_writer_by_name(rw_class_name: str) -> Type[BaseReaderWriter]:\n    ret = recursive_find_python_class(join(nnunetv2.__path__[0], \"imageio\"), rw_class_name, 'nnunetv2.imageio')\n    if ret is None:\n        raise RuntimeError(\"Unable to find reader writer class '%s'. Please make sure this class is located in the \"\n                           \"nnunetv2.imageio module.\" % rw_class_name)\n    else:\n        return ret\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/readme.md",
    "content": "- Derive your adapter from `BaseReaderWriter`. \n- Reimplement all abstractmethods. \n- make sure to support 2d and 3d input images (or raise some error).\n- place it in this folder or nnU-Net won't find it!\n- add it to LIST_OF_IO_CLASSES in `reader_writer_registry.py`\n\nBam, you're done!"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/simpleitk_reader_writer.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\n\nfrom typing import Tuple, Union, List\nimport numpy as np\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nimport SimpleITK as sitk\n\n\nclass SimpleITKIO(BaseReaderWriter):\n    supported_file_endings = [\n        '.nii.gz',\n        '.nrrd',\n        '.mha'\n    ]\n\n    def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:\n        images = []\n        spacings = []\n        origins = []\n        directions = []\n\n        spacings_for_nnunet = []\n        for f in image_fnames:\n            itk_image = sitk.ReadImage(f)\n            spacings.append(itk_image.GetSpacing())\n            origins.append(itk_image.GetOrigin())\n            directions.append(itk_image.GetDirection())\n            npy_image = sitk.GetArrayFromImage(itk_image)\n            if npy_image.ndim == 2:\n                # 2d\n                npy_image = npy_image[None, None]\n                max_spacing = max(spacings[-1])\n                spacings_for_nnunet.append((max_spacing * 999, *list(spacings[-1])[::-1]))\n            elif npy_image.ndim == 3:\n                # 3d, as in original nnunet\n                npy_image = npy_image[None]\n                spacings_for_nnunet.append(list(spacings[-1])[::-1])\n            elif npy_image.ndim == 4:\n                # 4d, multiple modalities in one file\n                spacings_for_nnunet.append(list(spacings[-1])[::-1][1:])\n                pass\n            else:\n                raise RuntimeError(f\"Unexpected number of dimensions: {npy_image.ndim} in file {f}\")\n\n            images.append(npy_image)\n            spacings_for_nnunet[-1] = list(np.abs(spacings_for_nnunet[-1]))\n\n        if not self._check_all_same([i.shape for i in images]):\n            print('ERROR! Not all input images have the same shape!')\n            print('Shapes:')\n            print([i.shape for i in images])\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n        if not self._check_all_same(spacings):\n            print('ERROR! Not all input images have the same spacing!')\n            print('Spacings:')\n            print(spacings)\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n        if not self._check_all_same(origins):\n            print('WARNING! Not all input images have the same origin!')\n            print('Origins:')\n            print(origins)\n            print('Image files:')\n            print(image_fnames)\n            print('It is up to you to decide whether that\\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '\n                  'that segmentations and data overlap.')\n        if not self._check_all_same(directions):\n            print('WARNING! Not all input images have the same direction!')\n            print('Directions:')\n            print(directions)\n            print('Image files:')\n            print(image_fnames)\n            print('It is up to you to decide whether that\\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '\n                  'that segmentations and data overlap.')\n        if not self._check_all_same(spacings_for_nnunet):\n            print('ERROR! Not all input images have the same spacing_for_nnunet! (This should not happen and must be a '\n                  'bug. Please report!')\n            print('spacings_for_nnunet:')\n            print(spacings_for_nnunet)\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n\n        stacked_images = np.vstack(images)\n        dict = {\n            'sitk_stuff': {\n                # this saves the sitk geometry information. This part is NOT used by nnU-Net!\n                'spacing': spacings[0],\n                'origin': origins[0],\n                'direction': directions[0]\n            },\n            # the spacing is inverted with [::-1] because sitk returns the spacing in the wrong order lol. Image arrays\n            # are returned x,y,z but spacing is returned z,y,x. Duh.\n            'spacing': spacings_for_nnunet[0]\n        }\n        return stacked_images.astype(np.float32), dict\n\n    def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:\n        return self.read_images((seg_fname, ))\n\n    def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:\n        assert seg.ndim == 3, 'segmentation must be 3d. If you are exporting a 2d segmentation, please provide it as shape 1,x,y'\n        output_dimension = len(properties['sitk_stuff']['spacing'])\n        assert 1 < output_dimension < 4\n        if output_dimension == 2:\n            seg = seg[0]\n\n        itk_image = sitk.GetImageFromArray(seg.astype(np.uint8))\n        itk_image.SetSpacing(properties['sitk_stuff']['spacing'])\n        itk_image.SetOrigin(properties['sitk_stuff']['origin'])\n        itk_image.SetDirection(properties['sitk_stuff']['direction'])\n\n        sitk.WriteImage(itk_image, output_fname, True)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/imageio/tif_reader_writer.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nimport os.path\nfrom typing import Tuple, Union, List\nimport numpy as np\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nimport tifffile\nfrom batchgenerators.utilities.file_and_folder_operations import isfile, load_json, save_json, split_path, join\n\n\nclass Tiff3DIO(BaseReaderWriter):\n    \"\"\"\n    reads and writes 3D tif(f) images. Uses tifffile package. Ignores metadata (for now)!\n\n    If you have 2D tiffs, use NaturalImage2DIO\n\n    Supports the use of auxiliary files for spacing information. If used, the auxiliary files are expected to end\n    with .json and omit the channel identifier. So, for example, the corresponding of image image1_0000.tif is\n    expected to be image1.json)!\n    \"\"\"\n    supported_file_endings = [\n        '.tif',\n        '.tiff',\n    ]\n\n    def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:\n        # figure out file ending used here\n        ending = '.' + image_fnames[0].split('.')[-1]\n        assert ending.lower() in self.supported_file_endings, f'Ending {ending} not supported by {self.__class__.__name__}'\n        ending_length = len(ending)\n        truncate_length = ending_length + 5 # 5 comes from len(_0000)\n\n        images = []\n        for f in image_fnames:\n            image = tifffile.imread(f)\n            if image.ndim != 3:\n                raise RuntimeError(f\"Only 3D images are supported! File: {f}\")\n            images.append(image[None])\n\n        # see if aux file can be found\n        expected_aux_file = image_fnames[0][:-truncate_length] + '.json'\n        if isfile(expected_aux_file):\n            spacing = load_json(expected_aux_file)['spacing']\n            assert len(spacing) == 3, f'spacing must have 3 entries, one for each dimension of the image. File: {expected_aux_file}'\n        else:\n            print(f'WARNING no spacing file found for images {image_fnames}\\nAssuming spacing (1, 1, 1).')\n            spacing = (1, 1, 1)\n\n        if not self._check_all_same([i.shape for i in images]):\n            print('ERROR! Not all input images have the same shape!')\n            print('Shapes:')\n            print([i.shape for i in images])\n            print('Image files:')\n            print(image_fnames)\n            raise RuntimeError()\n\n        return np.vstack(images).astype(np.float32), {'spacing': spacing}\n\n    def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:\n        # not ideal but I really have no clue how to set spacing/resolution information properly in tif files haha\n        tifffile.imwrite(output_fname, data=seg.astype(np.uint8), compression='zlib')\n        file = os.path.basename(output_fname)\n        out_dir = os.path.dirname(output_fname)\n        ending = file.split('.')[-1]\n        save_json({'spacing': properties['spacing']}, join(out_dir, file[:-(len(ending) + 1)] + '.json'))\n\n    def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:\n        # figure out file ending used here\n        ending = '.' + seg_fname.split('.')[-1]\n        assert ending.lower() in self.supported_file_endings, f'Ending {ending} not supported by {self.__class__.__name__}'\n        ending_length = len(ending)\n\n        seg = tifffile.imread(seg_fname)\n        if seg.ndim != 3:\n            raise RuntimeError(f\"Only 3D images are supported! File: {seg_fname}\")\n        seg = seg[None]\n\n        # see if aux file can be found\n        expected_aux_file = seg_fname[:-ending_length] + '.json'\n        if isfile(expected_aux_file):\n            spacing = load_json(expected_aux_file)['spacing']\n            assert len(spacing) == 3, f'spacing must have 3 entries, one for each dimension of the image. File: {expected_aux_file}'\n            assert all([i > 0 for i in spacing]), f\"Spacing must be > 0, spacing: {spacing}\"\n        else:\n            print(f'WARNING no spacing file found for segmentation {seg_fname}\\nAssuming spacing (1, 1, 1).')\n            spacing = (1, 1, 1)\n\n        return seg.astype(np.float32), {'spacing': spacing}"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/data_iterators.py",
    "content": "import multiprocessing\nimport queue\nfrom torch.multiprocessing import Event, Process, Queue, Manager\n\nfrom time import sleep\nfrom typing import Union, List\n\nimport numpy as np\nimport torch\nfrom batchgenerators.dataloading.data_loader import DataLoader\n\nfrom nnunetv2.preprocessing.preprocessors.default_preprocessor import DefaultPreprocessor\nfrom nnunetv2.utilities.label_handling.label_handling import convert_labelmap_to_one_hot\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\n\n\ndef preprocess_fromfiles_save_to_queue(list_of_lists: List[List[str]],\n                                       list_of_segs_from_prev_stage_files: Union[None, List[str]],\n                                       output_filenames_truncated: Union[None, List[str]],\n                                       plans_manager: PlansManager,\n                                       dataset_json: dict,\n                                       configuration_manager: ConfigurationManager,\n                                       target_queue: Queue,\n                                       done_event: Event,\n                                       abort_event: Event,\n                                       verbose: bool = False):\n    try:\n        label_manager = plans_manager.get_label_manager(dataset_json)\n        preprocessor = configuration_manager.preprocessor_class(verbose=verbose)\n        for idx in range(len(list_of_lists)):\n            data, seg, data_properties = preprocessor.run_case(list_of_lists[idx],\n                                                               list_of_segs_from_prev_stage_files[\n                                                                   idx] if list_of_segs_from_prev_stage_files is not None else None,\n                                                               plans_manager,\n                                                               configuration_manager,\n                                                               dataset_json)\n            if list_of_segs_from_prev_stage_files is not None and list_of_segs_from_prev_stage_files[idx] is not None:\n                seg_onehot = convert_labelmap_to_one_hot(seg[0], label_manager.foreground_labels, data.dtype)\n                data = np.vstack((data, seg_onehot))\n\n            data = torch.from_numpy(data).contiguous().float()\n\n            item = {'data': data, 'data_properties': data_properties,\n                    'ofile': output_filenames_truncated[idx] if output_filenames_truncated is not None else None}\n            success = False\n            while not success:\n                try:\n                    if abort_event.is_set():\n                        return\n                    target_queue.put(item, timeout=0.01)\n                    success = True\n                except queue.Full:\n                    pass\n        done_event.set()\n    except Exception as e:\n        abort_event.set()\n        raise e\n\n\ndef preprocessing_iterator_fromfiles(list_of_lists: List[List[str]],\n                                     list_of_segs_from_prev_stage_files: Union[None, List[str]],\n                                     output_filenames_truncated: Union[None, List[str]],\n                                     plans_manager: PlansManager,\n                                     dataset_json: dict,\n                                     configuration_manager: ConfigurationManager,\n                                     num_processes: int,\n                                     pin_memory: bool = False,\n                                     verbose: bool = False):\n    context = multiprocessing.get_context('spawn')\n    manager = Manager()\n    num_processes = min(len(list_of_lists), num_processes)\n    assert num_processes >= 1\n    processes = []\n    done_events = []\n    target_queues = []\n    abort_event = manager.Event()\n    for i in range(num_processes):\n        event = manager.Event()\n        queue = Manager().Queue(maxsize=1)\n        pr = context.Process(target=preprocess_fromfiles_save_to_queue,\n                     args=(\n                         list_of_lists[i::num_processes],\n                         list_of_segs_from_prev_stage_files[\n                         i::num_processes] if list_of_segs_from_prev_stage_files is not None else None,\n                         output_filenames_truncated[\n                         i::num_processes] if output_filenames_truncated is not None else None,\n                         plans_manager,\n                         dataset_json,\n                         configuration_manager,\n                         queue,\n                         event,\n                         abort_event,\n                         verbose\n                     ), daemon=True)\n        pr.start()\n        target_queues.append(queue)\n        done_events.append(event)\n        processes.append(pr)\n\n    worker_ctr = 0\n    while (not done_events[worker_ctr].is_set()) or (not target_queues[worker_ctr].empty()):\n        if not target_queues[worker_ctr].empty():\n            item = target_queues[worker_ctr].get()\n            worker_ctr = (worker_ctr + 1) % num_processes\n        else:\n            all_ok = all(\n                [i.is_alive() or j.is_set() for i, j in zip(processes, done_events)]) and not abort_event.is_set()\n            if not all_ok:\n                raise RuntimeError('Background workers died. Look for the error message further up! If there is '\n                                   'none then your RAM was full and the worker was killed by the OS. Use fewer '\n                                   'workers or get more RAM in that case!')\n            sleep(0.01)\n            continue\n        if pin_memory:\n            [i.pin_memory() for i in item.values() if isinstance(i, torch.Tensor)]\n        yield item\n    [p.join() for p in processes]\n\nclass PreprocessAdapter(DataLoader):\n    def __init__(self, list_of_lists: List[List[str]],\n                 list_of_segs_from_prev_stage_files: Union[None, List[str]],\n                 preprocessor: DefaultPreprocessor,\n                 output_filenames_truncated: Union[None, List[str]],\n                 plans_manager: PlansManager,\n                 dataset_json: dict,\n                 configuration_manager: ConfigurationManager,\n                 num_threads_in_multithreaded: int = 1):\n        self.preprocessor, self.plans_manager, self.configuration_manager, self.dataset_json = \\\n            preprocessor, plans_manager, configuration_manager, dataset_json\n\n        self.label_manager = plans_manager.get_label_manager(dataset_json)\n\n        if list_of_segs_from_prev_stage_files is None:\n            list_of_segs_from_prev_stage_files = [None] * len(list_of_lists)\n        if output_filenames_truncated is None:\n            output_filenames_truncated = [None] * len(list_of_lists)\n\n        super().__init__(list(zip(list_of_lists, list_of_segs_from_prev_stage_files, output_filenames_truncated)),\n                         1, num_threads_in_multithreaded,\n                         seed_for_shuffle=1, return_incomplete=True,\n                         shuffle=False, infinite=False, sampling_probabilities=None)\n\n        self.indices = list(range(len(list_of_lists)))\n\n    def generate_train_batch(self):\n        idx = self.get_indices()[0]\n        files = self._data[idx][0]\n        seg_prev_stage = self._data[idx][1]\n        ofile = self._data[idx][2]\n        # if we have a segmentation from the previous stage we have to process it together with the images so that we\n        # can crop it appropriately (if needed). Otherwise it would just be resized to the shape of the data after\n        # preprocessing and then there might be misalignments\n        data, seg, data_properties = self.preprocessor.run_case(files, seg_prev_stage, self.plans_manager,\n                                                                self.configuration_manager,\n                                                                self.dataset_json)\n        if seg_prev_stage is not None:\n            seg_onehot = convert_labelmap_to_one_hot(seg[0], self.label_manager.foreground_labels, data.dtype)\n            data = np.vstack((data, seg_onehot))\n\n        data = torch.from_numpy(data)\n\n        return {'data': data, 'data_properties': data_properties, 'ofile': ofile}\n\n\nclass PreprocessAdapterFromNpy(DataLoader):\n    def __init__(self, list_of_images: List[np.ndarray],\n                 list_of_segs_from_prev_stage: Union[List[np.ndarray], None],\n                 list_of_image_properties: List[dict],\n                 truncated_ofnames: Union[List[str], None],\n                 plans_manager: PlansManager, dataset_json: dict, configuration_manager: ConfigurationManager,\n                 num_threads_in_multithreaded: int = 1, verbose: bool = False):\n        preprocessor = configuration_manager.preprocessor_class(verbose=verbose)\n        self.preprocessor, self.plans_manager, self.configuration_manager, self.dataset_json, self.truncated_ofnames = \\\n            preprocessor, plans_manager, configuration_manager, dataset_json, truncated_ofnames\n\n        self.label_manager = plans_manager.get_label_manager(dataset_json)\n\n        if list_of_segs_from_prev_stage is None:\n            list_of_segs_from_prev_stage = [None] * len(list_of_images)\n        if truncated_ofnames is None:\n            truncated_ofnames = [None] * len(list_of_images)\n\n        super().__init__(\n            list(zip(list_of_images, list_of_segs_from_prev_stage, list_of_image_properties, truncated_ofnames)),\n            1, num_threads_in_multithreaded,\n            seed_for_shuffle=1, return_incomplete=True,\n            shuffle=False, infinite=False, sampling_probabilities=None)\n\n        self.indices = list(range(len(list_of_images)))\n\n    def generate_train_batch(self):\n        idx = self.get_indices()[0]\n        image = self._data[idx][0]\n        seg_prev_stage = self._data[idx][1]\n        props = self._data[idx][2]\n        ofname = self._data[idx][3]\n        # if we have a segmentation from the previous stage we have to process it together with the images so that we\n        # can crop it appropriately (if needed). Otherwise it would just be resized to the shape of the data after\n        # preprocessing and then there might be misalignments\n        data, seg = self.preprocessor.run_case_npy(image, seg_prev_stage, props,\n                                                   self.plans_manager,\n                                                   self.configuration_manager,\n                                                   self.dataset_json)\n        if seg_prev_stage is not None:\n            seg_onehot = convert_labelmap_to_one_hot(seg[0], self.label_manager.foreground_labels, data.dtype)\n            data = np.vstack((data, seg_onehot))\n\n        data = torch.from_numpy(data)\n\n        return {'data': data, 'data_properties': props, 'ofile': ofname}\n\n\ndef preprocess_fromnpy_save_to_queue(list_of_images: List[np.ndarray],\n                                     list_of_segs_from_prev_stage: Union[List[np.ndarray], None],\n                                     list_of_image_properties: List[dict],\n                                     truncated_ofnames: Union[List[str], None],\n                                     plans_manager: PlansManager,\n                                     dataset_json: dict,\n                                     configuration_manager: ConfigurationManager,\n                                     target_queue: Queue,\n                                     done_event: Event,\n                                     abort_event: Event,\n                                     verbose: bool = False):\n    try:\n        label_manager = plans_manager.get_label_manager(dataset_json)\n        preprocessor = configuration_manager.preprocessor_class(verbose=verbose)\n        for idx in range(len(list_of_images)):\n            data, seg = preprocessor.run_case_npy(list_of_images[idx],\n                                                  list_of_segs_from_prev_stage[\n                                                      idx] if list_of_segs_from_prev_stage is not None else None,\n                                                  list_of_image_properties[idx],\n                                                  plans_manager,\n                                                  configuration_manager,\n                                                  dataset_json)\n            if list_of_segs_from_prev_stage is not None and list_of_segs_from_prev_stage[idx] is not None:\n                seg_onehot = convert_labelmap_to_one_hot(seg[0], label_manager.foreground_labels, data.dtype)\n                data = np.vstack((data, seg_onehot))\n\n            data = torch.from_numpy(data).contiguous().float()\n\n            item = {'data': data, 'data_properties': list_of_image_properties[idx],\n                    'ofile': truncated_ofnames[idx] if truncated_ofnames is not None else None}\n            success = False\n            while not success:\n                try:\n                    if abort_event.is_set():\n                        return\n                    target_queue.put(item, timeout=0.01)\n                    success = True\n                except queue.Full:\n                    pass\n        done_event.set()\n    except Exception as e:\n        abort_event.set()\n        raise e\n\n\ndef preprocessing_iterator_fromnpy(list_of_images: List[np.ndarray],\n                                   list_of_segs_from_prev_stage: Union[List[np.ndarray], None],\n                                   list_of_image_properties: List[dict],\n                                   truncated_ofnames: Union[List[str], None],\n                                   plans_manager: PlansManager,\n                                   dataset_json: dict,\n                                   configuration_manager: ConfigurationManager,\n                                   num_processes: int,\n                                   pin_memory: bool = False,\n                                   verbose: bool = False):\n    context = multiprocessing.get_context('spawn')\n    manager = Manager()\n    num_processes = min(len(list_of_images), num_processes)\n    assert num_processes >= 1\n    target_queues = []\n    processes = []\n    done_events = []\n    abort_event = manager.Event()\n    for i in range(num_processes):\n        event = manager.Event()\n        queue = manager.Queue(maxsize=1)\n        pr = context.Process(target=preprocess_fromnpy_save_to_queue,\n                     args=(\n                         list_of_images[i::num_processes],\n                         list_of_segs_from_prev_stage[\n                         i::num_processes] if list_of_segs_from_prev_stage is not None else None,\n                         list_of_image_properties[i::num_processes],\n                         truncated_ofnames[i::num_processes] if truncated_ofnames is not None else None,\n                         plans_manager,\n                         dataset_json,\n                         configuration_manager,\n                         queue,\n                         event,\n                         abort_event,\n                         verbose\n                     ), daemon=True)\n        pr.start()\n        done_events.append(event)\n        processes.append(pr)\n        target_queues.append(queue)\n\n    worker_ctr = 0\n    while (not done_events[worker_ctr].is_set()) or (not target_queues[worker_ctr].empty()):\n        if not target_queues[worker_ctr].empty():\n            item = target_queues[worker_ctr].get()\n            worker_ctr = (worker_ctr + 1) % num_processes\n        else:\n            all_ok = all(\n                [i.is_alive() or j.is_set() for i, j in zip(processes, done_events)]) and not abort_event.is_set()\n            if not all_ok:\n                raise RuntimeError('Background workers died. Look for the error message further up! If there is '\n                                   'none then your RAM was full and the worker was killed by the OS. Use fewer '\n                                   'workers or get more RAM in that case!')\n            sleep(0.01)\n            continue\n        if pin_memory:\n            [i.pin_memory() for i in item.values() if isinstance(i, torch.Tensor)]\n        yield item\n    [p.join() for p in processes]\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/examples.py",
    "content": "if __name__ == '__main__':\n    from nnunetv2.paths import nnUNet_results, nnUNet_raw\n    import torch\n    from batchgenerators.utilities.file_and_folder_operations import join\n    from nnunetv2.inference.predict_from_raw_data import nnUNetPredictor\n    from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO\n\n    # nnUNetv2_predict -d 3 -f 0 -c 3d_lowres -i imagesTs -o imagesTs_predlowres --continue_prediction\n\n    # instantiate the nnUNetPredictor\n    predictor = nnUNetPredictor(\n        tile_step_size=0.5,\n        use_gaussian=True,\n        use_mirroring=True,\n        perform_everything_on_gpu=True,\n        device=torch.device('cuda', 0),\n        verbose=False,\n        verbose_preprocessing=False,\n        allow_tqdm=True\n    )\n    # initializes the network architecture, loads the checkpoint\n    predictor.initialize_from_trained_model_folder(\n        join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_lowres'),\n        use_folds=(0,),\n        checkpoint_name='checkpoint_final.pth',\n    )\n    # variant 1: give input and output folders\n    predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),\n                                 join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres'),\n                                 save_probabilities=False, overwrite=False,\n                                 num_processes_preprocessing=2, num_processes_segmentation_export=2,\n                                 folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)\n\n    # variant 2, use list of files as inputs. Note how we use nested lists!!!\n    indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')\n    outdir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres')\n    predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],\n                                  [join(indir, 'liver_142_0000.nii.gz')]],\n                                 [join(outdir, 'liver_152.nii.gz'),\n                                  join(outdir, 'liver_142.nii.gz')],\n                                 save_probabilities=False, overwrite=True,\n                                 num_processes_preprocessing=2, num_processes_segmentation_export=2,\n                                 folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)\n\n    # variant 2.5, returns segmentations\n    indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')\n    predicted_segmentations = predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],\n                                                            [join(indir, 'liver_142_0000.nii.gz')]],\n                                                           None,\n                                                           save_probabilities=True, overwrite=True,\n                                                           num_processes_preprocessing=2,\n                                                           num_processes_segmentation_export=2,\n                                                           folder_with_segs_from_prev_stage=None, num_parts=1,\n                                                           part_id=0)\n\n    # predict several npy images\n    from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO\n\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])\n    img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])\n    img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])\n    img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])\n    # we do not set output files so that the segmentations will be returned. You can of course also specify output\n    # files instead (no return value on that case)\n    ret = predictor.predict_from_list_of_npy_arrays([img, img2, img3, img4],\n                                                    None,\n                                                    [props, props2, props3, props4],\n                                                    None, 2, save_probabilities=False,\n                                                    num_processes_segmentation_export=2)\n\n    # predict a single numpy array\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])\n    ret = predictor.predict_single_npy_array(img, props, None, None, True)\n\n    # custom iterator\n\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])\n    img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])\n    img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])\n    img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])\n\n\n    # each element returned by data_iterator must be a dict with 'data', 'ofile' and 'data_properties' keys!\n    # If 'ofile' is None, the result will be returned instead of written to a file\n    # the iterator is responsible for performing the correct preprocessing!\n    # note how the iterator here does not use multiprocessing -> preprocessing will be done in the main thread!\n    # take a look at the default iterators for predict_from_files and predict_from_list_of_npy_arrays\n    # (they both use predictor.predict_from_data_iterator) for inspiration!\n    def my_iterator(list_of_input_arrs, list_of_input_props):\n        preprocessor = predictor.configuration_manager.preprocessor_class(verbose=predictor.verbose)\n        for a, p in zip(list_of_input_arrs, list_of_input_props):\n            data, seg = preprocessor.run_case_npy(a,\n                                                  None,\n                                                  p,\n                                                  predictor.plans_manager,\n                                                  predictor.configuration_manager,\n                                                  predictor.dataset_json)\n            yield {'data': torch.from_numpy(data).contiguous().pin_memory(), 'data_properties': p, 'ofile': None}\n\n\n    ret = predictor.predict_from_data_iterator(my_iterator([img, img2, img3, img4], [props, props2, props3, props4]),\n                                               save_probabilities=False, num_processes_segmentation_export=3)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/export_prediction.py",
    "content": "import os\nfrom copy import deepcopy\nfrom typing import Union, List\n\nimport numpy as np\nimport torch\nfrom acvl_utils.cropping_and_padding.bounding_boxes import bounding_box_to_slice\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, isfile, save_pickle\n\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.utilities.label_handling.label_handling import LabelManager\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\n\n\ndef convert_predicted_logits_to_segmentation_with_correct_shape(predicted_logits: Union[torch.Tensor, np.ndarray],\n                                                                plans_manager: PlansManager,\n                                                                configuration_manager: ConfigurationManager,\n                                                                label_manager: LabelManager,\n                                                                properties_dict: dict,\n                                                                return_probabilities: bool = False,\n                                                                num_threads_torch: int = default_num_processes):\n    old_threads = torch.get_num_threads()\n    torch.set_num_threads(num_threads_torch)\n\n    # resample to original shape\n    current_spacing = configuration_manager.spacing if \\\n        len(configuration_manager.spacing) == \\\n        len(properties_dict['shape_after_cropping_and_before_resampling']) else \\\n        [properties_dict['spacing'][0], *configuration_manager.spacing]\n    predicted_logits = configuration_manager.resampling_fn_probabilities(predicted_logits,\n                                            properties_dict['shape_after_cropping_and_before_resampling'],\n                                            current_spacing,\n                                            properties_dict['spacing'])\n    # return value of resampling_fn_probabilities can be ndarray or Tensor but that does not matter because\n    # apply_inference_nonlin will convert to torch\n    predicted_probabilities = label_manager.apply_inference_nonlin(predicted_logits)\n    del predicted_logits\n    segmentation = label_manager.convert_probabilities_to_segmentation(predicted_probabilities)\n\n    # segmentation may be torch.Tensor but we continue with numpy\n    if isinstance(segmentation, torch.Tensor):\n        segmentation = segmentation.cpu().numpy()\n\n    # put segmentation in bbox (revert cropping)\n    segmentation_reverted_cropping = np.zeros(properties_dict['shape_before_cropping'],\n                                              dtype=np.uint8 if len(label_manager.foreground_labels) < 255 else np.uint16)\n    slicer = bounding_box_to_slice(properties_dict['bbox_used_for_cropping'])\n    segmentation_reverted_cropping[slicer] = segmentation\n    del segmentation\n\n    # revert transpose\n    segmentation_reverted_cropping = segmentation_reverted_cropping.transpose(plans_manager.transpose_backward)\n    if return_probabilities:\n        # revert cropping\n        predicted_probabilities = label_manager.revert_cropping_on_probabilities(predicted_probabilities,\n                                                                                 properties_dict[\n                                                                                     'bbox_used_for_cropping'],\n                                                                                 properties_dict[\n                                                                                     'shape_before_cropping'])\n        predicted_probabilities = predicted_probabilities.cpu().numpy()\n        # revert transpose\n        predicted_probabilities = predicted_probabilities.transpose([0] + [i + 1 for i in\n                                                                           plans_manager.transpose_backward])\n        torch.set_num_threads(old_threads)\n        return segmentation_reverted_cropping, predicted_probabilities\n    else:\n        torch.set_num_threads(old_threads)\n        return segmentation_reverted_cropping\n\n\ndef export_prediction_from_logits(predicted_array_or_file: Union[np.ndarray, torch.Tensor], properties_dict: dict,\n                                  configuration_manager: ConfigurationManager,\n                                  plans_manager: PlansManager,\n                                  dataset_json_dict_or_file: Union[dict, str], output_file_truncated: str,\n                                  save_probabilities: bool = False):\n    # if isinstance(predicted_array_or_file, str):\n    #     tmp = deepcopy(predicted_array_or_file)\n    #     if predicted_array_or_file.endswith('.npy'):\n    #         predicted_array_or_file = np.load(predicted_array_or_file)\n    #     elif predicted_array_or_file.endswith('.npz'):\n    #         predicted_array_or_file = np.load(predicted_array_or_file)['softmax']\n    #     os.remove(tmp)\n\n    if isinstance(dataset_json_dict_or_file, str):\n        dataset_json_dict_or_file = load_json(dataset_json_dict_or_file)\n\n    label_manager = plans_manager.get_label_manager(dataset_json_dict_or_file)\n    ret = convert_predicted_logits_to_segmentation_with_correct_shape(\n        predicted_array_or_file, plans_manager, configuration_manager, label_manager, properties_dict,\n        return_probabilities=save_probabilities\n    )\n    del predicted_array_or_file\n\n    # save\n    if save_probabilities:\n        segmentation_final, probabilities_final = ret\n        np.savez_compressed(output_file_truncated + '.npz', probabilities=probabilities_final)\n        save_pickle(properties_dict, output_file_truncated + '.pkl')\n        del probabilities_final, ret\n    else:\n        segmentation_final = ret\n        del ret\n\n    rw = plans_manager.image_reader_writer_class()\n    rw.write_seg(segmentation_final, output_file_truncated + dataset_json_dict_or_file['file_ending'],\n                 properties_dict)\n\n\ndef resample_and_save(predicted: Union[torch.Tensor, np.ndarray], target_shape: List[int], output_file: str,\n                      plans_manager: PlansManager, configuration_manager: ConfigurationManager, properties_dict: dict,\n                      dataset_json_dict_or_file: Union[dict, str], num_threads_torch: int = default_num_processes) \\\n        -> None:\n    # # needed for cascade\n    # if isinstance(predicted, str):\n    #     assert isfile(predicted), \"If isinstance(segmentation_softmax, str) then \" \\\n    #                               \"isfile(segmentation_softmax) must be True\"\n    #     del_file = deepcopy(predicted)\n    #     predicted = np.load(predicted)\n    #     os.remove(del_file)\n    old_threads = torch.get_num_threads()\n    torch.set_num_threads(num_threads_torch)\n\n    if isinstance(dataset_json_dict_or_file, str):\n        dataset_json_dict_or_file = load_json(dataset_json_dict_or_file)\n\n    # resample to original shape\n    current_spacing = configuration_manager.spacing if \\\n        len(configuration_manager.spacing) == len(properties_dict['shape_after_cropping_and_before_resampling']) else \\\n        [properties_dict['spacing'][0], *configuration_manager.spacing]\n    target_spacing = configuration_manager.spacing if len(configuration_manager.spacing) == \\\n        len(properties_dict['shape_after_cropping_and_before_resampling']) else \\\n        [properties_dict['spacing'][0], *configuration_manager.spacing]\n    predicted_array_or_file = configuration_manager.resampling_fn_probabilities(predicted,\n                                                                                target_shape,\n                                                                                current_spacing,\n                                                                                target_spacing)\n\n    # create segmentation (argmax, regions, etc)\n    label_manager = plans_manager.get_label_manager(dataset_json_dict_or_file)\n    segmentation = label_manager.convert_logits_to_segmentation(predicted_array_or_file)\n    # segmentation may be torch.Tensor but we continue with numpy\n    if isinstance(segmentation, torch.Tensor):\n        segmentation = segmentation.cpu().numpy()\n    np.savez_compressed(output_file, seg=segmentation.astype(np.uint8))\n    torch.set_num_threads(old_threads)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/predict_from_raw_data.py",
    "content": "import inspect\nimport itertools\nimport multiprocessing\nimport os\nimport traceback\nfrom copy import deepcopy\nfrom time import sleep\nfrom typing import Tuple, Union, List, Optional\n\nimport numpy as np\nimport torch\nfrom acvl_utils.cropping_and_padding.padding import pad_nd_image\nfrom batchgenerators.dataloading.multi_threaded_augmenter import MultiThreadedAugmenter\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join, isfile, maybe_mkdir_p, isdir, subdirs, \\\n    save_json\nfrom torch import nn\nfrom torch._dynamo import OptimizedModule\nfrom torch.nn.parallel import DistributedDataParallel\nfrom tqdm import tqdm\n\nimport nnunetv2\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.inference.data_iterators import PreprocessAdapterFromNpy, preprocessing_iterator_fromfiles, \\\n    preprocessing_iterator_fromnpy\nfrom nnunetv2.inference.export_prediction import export_prediction_from_logits, \\\n    convert_predicted_logits_to_segmentation_with_correct_shape\nfrom nnunetv2.inference.sliding_window_prediction import compute_gaussian, \\\n    compute_steps_for_sliding_window\nfrom nnunetv2.utilities.file_path_utilities import get_output_folder, check_workers_alive_and_busy\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\nfrom nnunetv2.utilities.helpers import empty_cache, dummy_context\nfrom nnunetv2.utilities.json_export import recursive_fix_for_json_export\nfrom nnunetv2.utilities.label_handling.label_handling import determine_num_input_channels\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\nfrom nnunetv2.utilities.utils import create_lists_from_splitted_dataset_folder\n\n\nclass nnUNetPredictor(object):\n    def __init__(self,\n                 tile_step_size: float = 0.5,\n                 use_gaussian: bool = True,\n                 use_mirroring: bool = True,\n                 perform_everything_on_gpu: bool = True,\n                 device: torch.device = torch.device('cuda'),\n                 verbose: bool = False,\n                 verbose_preprocessing: bool = False,\n                 allow_tqdm: bool = True):\n        self.verbose = verbose\n        self.verbose_preprocessing = verbose_preprocessing\n        self.allow_tqdm = allow_tqdm\n\n        self.plans_manager, self.configuration_manager, self.list_of_parameters, self.network, self.dataset_json, \\\n        self.trainer_name, self.allowed_mirroring_axes, self.label_manager = None, None, None, None, None, None, None, None\n\n        self.tile_step_size = tile_step_size\n        self.use_gaussian = use_gaussian\n        self.use_mirroring = use_mirroring\n        if device.type == 'cuda':\n            # device = torch.device(type='cuda', index=0)  # set the desired GPU with CUDA_VISIBLE_DEVICES!\n            # why would I ever want to do that. Stupid dobby. This kills DDP inference...\n            pass\n        if device.type != 'cuda':\n            print(f'perform_everything_on_gpu=True is only supported for cuda devices! Setting this to False')\n            perform_everything_on_gpu = False\n        self.device = device\n        self.perform_everything_on_gpu = perform_everything_on_gpu\n\n    def initialize_from_trained_model_folder(self, model_training_output_dir: str,\n                                             use_folds: Union[Tuple[Union[int, str]], None],\n                                             checkpoint_name: str = 'checkpoint_final.pth'):\n        \"\"\"\n        This is used when making predictions with a trained model\n        \"\"\"\n        if use_folds is None:\n            use_folds = nnUNetPredictor.auto_detect_available_folds(model_training_output_dir, checkpoint_name)\n\n        dataset_json = load_json(join(model_training_output_dir, 'dataset.json'))\n        plans = load_json(join(model_training_output_dir, 'plans.json'))\n        plans_manager = PlansManager(plans)\n\n        if isinstance(use_folds, str):\n            use_folds = [use_folds]\n\n        parameters = []\n        for i, f in enumerate(use_folds):\n            f = int(f) if f != 'all' else f\n            checkpoint = torch.load(join(model_training_output_dir, f'fold_{f}', checkpoint_name),\n                                    map_location=torch.device('cpu'))\n            if i == 0:\n                trainer_name = checkpoint['trainer_name']\n                configuration_name = checkpoint['init_args']['configuration']\n                inference_allowed_mirroring_axes = checkpoint['inference_allowed_mirroring_axes'] if \\\n                    'inference_allowed_mirroring_axes' in checkpoint.keys() else None\n\n            parameters.append(checkpoint['network_weights'])\n\n        configuration_manager = plans_manager.get_configuration(configuration_name)\n        # restore network\n        num_input_channels = determine_num_input_channels(plans_manager, configuration_manager, dataset_json)\n        trainer_class = recursive_find_python_class(join(nnunetv2.__path__[0], \"training\", \"nnUNetTrainer\"),\n                                                    trainer_name, 'nnunetv2.training.nnUNetTrainer')\n        network = trainer_class.build_network_architecture(plans_manager, dataset_json, configuration_manager,\n                                                           num_input_channels, enable_deep_supervision=False)\n        self.plans_manager = plans_manager\n        self.configuration_manager = configuration_manager\n        self.list_of_parameters = parameters\n        self.network = network\n        self.dataset_json = dataset_json\n        self.trainer_name = trainer_name\n        self.allowed_mirroring_axes = inference_allowed_mirroring_axes\n        self.label_manager = plans_manager.get_label_manager(dataset_json)\n        if ('nnUNet_compile' in os.environ.keys()) and (os.environ['nnUNet_compile'].lower() in ('true', '1', 't')) \\\n                and not isinstance(self.network, OptimizedModule):\n            print('compiling network')\n            self.network = torch.compile(self.network)\n\n    def manual_initialization(self, network: nn.Module, plans_manager: PlansManager,\n                              configuration_manager: ConfigurationManager, parameters: Optional[List[dict]],\n                              dataset_json: dict, trainer_name: str,\n                              inference_allowed_mirroring_axes: Optional[Tuple[int, ...]]):\n        \"\"\"\n        This is used by the nnUNetTrainer to initialize nnUNetPredictor for the final validation\n        \"\"\"\n        self.plans_manager = plans_manager\n        self.configuration_manager = configuration_manager\n        self.list_of_parameters = parameters\n        self.network = network\n        self.dataset_json = dataset_json\n        self.trainer_name = trainer_name\n        self.allowed_mirroring_axes = inference_allowed_mirroring_axes\n        self.label_manager = plans_manager.get_label_manager(dataset_json)\n        allow_compile = True\n        allow_compile = allow_compile and ('nnUNet_compile' in os.environ.keys()) and (os.environ['nnUNet_compile'].lower() in ('true', '1', 't'))\n        allow_compile = allow_compile and not isinstance(self.network, OptimizedModule)\n        if isinstance(self.network, DistributedDataParallel):\n            allow_compile = allow_compile and isinstance(self.network.module, OptimizedModule)\n        if allow_compile:\n            print('compiling network')\n            self.network = torch.compile(self.network)\n\n    @staticmethod\n    def auto_detect_available_folds(model_training_output_dir, checkpoint_name):\n        print('use_folds is None, attempting to auto detect available folds')\n        fold_folders = subdirs(model_training_output_dir, prefix='fold_', join=False)\n        fold_folders = [i for i in fold_folders if i != 'fold_all']\n        fold_folders = [i for i in fold_folders if isfile(join(model_training_output_dir, i, checkpoint_name))]\n        use_folds = [int(i.split('_')[-1]) for i in fold_folders]\n        print(f'found the following folds: {use_folds}')\n        return use_folds\n\n    def _manage_input_and_output_lists(self, list_of_lists_or_source_folder: Union[str, List[List[str]]],\n                                       output_folder_or_list_of_truncated_output_files: Union[None, str, List[str]],\n                                       folder_with_segs_from_prev_stage: str = None,\n                                       overwrite: bool = True,\n                                       part_id: int = 0,\n                                       num_parts: int = 1,\n                                       save_probabilities: bool = False):\n        if isinstance(list_of_lists_or_source_folder, str):\n            list_of_lists_or_source_folder = create_lists_from_splitted_dataset_folder(list_of_lists_or_source_folder,\n                                                                                       self.dataset_json['file_ending'])\n        print(f'There are {len(list_of_lists_or_source_folder)} cases in the source folder')\n        list_of_lists_or_source_folder = list_of_lists_or_source_folder[part_id::num_parts]\n        caseids = [os.path.basename(i[0])[:-(len(self.dataset_json['file_ending']) + 5)] for i in\n                   list_of_lists_or_source_folder]\n        print(\n            f'I am process {part_id} out of {num_parts} (max process ID is {num_parts - 1}, we start counting with 0!)')\n        print(f'There are {len(caseids)} cases that I would like to predict')\n\n        if isinstance(output_folder_or_list_of_truncated_output_files, str):\n            output_filename_truncated = [join(output_folder_or_list_of_truncated_output_files, i) for i in caseids]\n        else:\n            output_filename_truncated = output_folder_or_list_of_truncated_output_files\n\n        seg_from_prev_stage_files = [join(folder_with_segs_from_prev_stage, i + self.dataset_json['file_ending']) if\n                                     folder_with_segs_from_prev_stage is not None else None for i in caseids]\n        # remove already predicted files form the lists\n        if not overwrite and output_filename_truncated is not None:\n            tmp = [isfile(i + self.dataset_json['file_ending']) for i in output_filename_truncated]\n            if save_probabilities:\n                tmp2 = [isfile(i + '.npz') for i in output_filename_truncated]\n                tmp = [i and j for i, j in zip(tmp, tmp2)]\n            not_existing_indices = [i for i, j in enumerate(tmp) if not j]\n\n            output_filename_truncated = [output_filename_truncated[i] for i in not_existing_indices]\n            list_of_lists_or_source_folder = [list_of_lists_or_source_folder[i] for i in not_existing_indices]\n            seg_from_prev_stage_files = [seg_from_prev_stage_files[i] for i in not_existing_indices]\n            print(f'overwrite was set to {overwrite}, so I am only working on cases that haven\\'t been predicted yet. '\n                  f'That\\'s {len(not_existing_indices)} cases.')\n        return list_of_lists_or_source_folder, output_filename_truncated, seg_from_prev_stage_files\n\n    def predict_from_files(self,\n                           list_of_lists_or_source_folder: Union[str, List[List[str]]],\n                           output_folder_or_list_of_truncated_output_files: Union[str, None, List[str]],\n                           save_probabilities: bool = False,\n                           overwrite: bool = True,\n                           num_processes_preprocessing: int = default_num_processes,\n                           num_processes_segmentation_export: int = default_num_processes,\n                           folder_with_segs_from_prev_stage: str = None,\n                           num_parts: int = 1,\n                           part_id: int = 0):\n        \"\"\"\n        This is nnU-Net's default function for making predictions. It works best for batch predictions\n        (predicting many images at once).\n        \"\"\"\n        if isinstance(output_folder_or_list_of_truncated_output_files, str):\n            output_folder = output_folder_or_list_of_truncated_output_files\n        elif isinstance(output_folder_or_list_of_truncated_output_files, list):\n            output_folder = os.path.dirname(output_folder_or_list_of_truncated_output_files[0])\n        else:\n            output_folder = None\n\n        ########################\n        # let's store the input arguments so that its clear what was used to generate the prediction\n        if output_folder is not None:\n            my_init_kwargs = {}\n            for k in inspect.signature(self.predict_from_files).parameters.keys():\n                my_init_kwargs[k] = locals()[k]\n            my_init_kwargs = deepcopy(\n                my_init_kwargs)  # let's not unintentionally change anything in-place. Take this as a\n            recursive_fix_for_json_export(my_init_kwargs)\n            maybe_mkdir_p(output_folder)\n            save_json(my_init_kwargs, join(output_folder, 'predict_from_raw_data_args.json'))\n\n            # we need these two if we want to do things with the predictions like for example apply postprocessing\n            save_json(self.dataset_json, join(output_folder, 'dataset.json'), sort_keys=False)\n            save_json(self.plans_manager.plans, join(output_folder, 'plans.json'), sort_keys=False)\n        #######################\n\n        # check if we need a prediction from the previous stage\n        if self.configuration_manager.previous_stage_name is not None:\n            assert folder_with_segs_from_prev_stage is not None, \\\n                f'The requested configuration is a cascaded network. It requires the segmentations of the previous ' \\\n                f'stage ({self.configuration_manager.previous_stage_name}) as input. Please provide the folder where' \\\n                f' they are located via folder_with_segs_from_prev_stage'\n\n        # sort out input and output filenames\n        list_of_lists_or_source_folder, output_filename_truncated, seg_from_prev_stage_files = \\\n            self._manage_input_and_output_lists(list_of_lists_or_source_folder,\n                                                output_folder_or_list_of_truncated_output_files,\n                                                folder_with_segs_from_prev_stage, overwrite, part_id, num_parts,\n                                                save_probabilities)\n        if len(list_of_lists_or_source_folder) == 0:\n            return\n\n        data_iterator = self._internal_get_data_iterator_from_lists_of_filenames(list_of_lists_or_source_folder,\n                                                                                 seg_from_prev_stage_files,\n                                                                                 output_filename_truncated,\n                                                                                 num_processes_preprocessing)\n\n        return self.predict_from_data_iterator(data_iterator, save_probabilities, num_processes_segmentation_export)\n\n    def _internal_get_data_iterator_from_lists_of_filenames(self,\n                                                            input_list_of_lists: List[List[str]],\n                                                            seg_from_prev_stage_files: Union[List[str], None],\n                                                            output_filenames_truncated: Union[List[str], None],\n                                                            num_processes: int):\n        return preprocessing_iterator_fromfiles(input_list_of_lists, seg_from_prev_stage_files,\n                                                output_filenames_truncated, self.plans_manager, self.dataset_json,\n                                                self.configuration_manager, num_processes, self.device.type == 'cuda',\n                                                self.verbose_preprocessing)\n        # preprocessor = self.configuration_manager.preprocessor_class(verbose=self.verbose_preprocessing)\n        # # hijack batchgenerators, yo\n        # # we use the multiprocessing of the batchgenerators dataloader to handle all the background worker stuff. This\n        # # way we don't have to reinvent the wheel here.\n        # num_processes = max(1, min(num_processes, len(input_list_of_lists)))\n        # ppa = PreprocessAdapter(input_list_of_lists, seg_from_prev_stage_files, preprocessor,\n        #                         output_filenames_truncated, self.plans_manager, self.dataset_json,\n        #                         self.configuration_manager, num_processes)\n        # if num_processes == 0:\n        #     mta = SingleThreadedAugmenter(ppa, None)\n        # else:\n        #     mta = MultiThreadedAugmenter(ppa, None, num_processes, 1, None, pin_memory=pin_memory)\n        # return mta\n\n    def get_data_iterator_from_raw_npy_data(self,\n                                            image_or_list_of_images: Union[np.ndarray, List[np.ndarray]],\n                                            segs_from_prev_stage_or_list_of_segs_from_prev_stage: Union[None,\n                                                                                                        np.ndarray,\n                                                                                                        List[\n                                                                                                            np.ndarray]],\n                                            properties_or_list_of_properties: Union[dict, List[dict]],\n                                            truncated_ofname: Union[str, List[str], None],\n                                            num_processes: int = 3):\n\n        list_of_images = [image_or_list_of_images] if not isinstance(image_or_list_of_images, list) else \\\n            image_or_list_of_images\n\n        if isinstance(segs_from_prev_stage_or_list_of_segs_from_prev_stage, np.ndarray):\n            segs_from_prev_stage_or_list_of_segs_from_prev_stage = [\n                segs_from_prev_stage_or_list_of_segs_from_prev_stage]\n\n        if isinstance(truncated_ofname, str):\n            truncated_ofname = [truncated_ofname]\n\n        if isinstance(properties_or_list_of_properties, dict):\n            properties_or_list_of_properties = [properties_or_list_of_properties]\n\n        num_processes = min(num_processes, len(list_of_images))\n        pp = preprocessing_iterator_fromnpy(\n            list_of_images,\n            segs_from_prev_stage_or_list_of_segs_from_prev_stage,\n            properties_or_list_of_properties,\n            truncated_ofname,\n            self.plans_manager,\n            self.dataset_json,\n            self.configuration_manager,\n            num_processes,\n            self.device.type == 'cuda',\n            self.verbose_preprocessing\n        )\n\n        return pp\n\n    def predict_from_list_of_npy_arrays(self,\n                                        image_or_list_of_images: Union[np.ndarray, List[np.ndarray]],\n                                        segs_from_prev_stage_or_list_of_segs_from_prev_stage: Union[None,\n                                                                                                    np.ndarray,\n                                                                                                    List[\n                                                                                                        np.ndarray]],\n                                        properties_or_list_of_properties: Union[dict, List[dict]],\n                                        truncated_ofname: Union[str, List[str], None],\n                                        num_processes: int = 3,\n                                        save_probabilities: bool = False,\n                                        num_processes_segmentation_export: int = default_num_processes):\n        iterator = self.get_data_iterator_from_raw_npy_data(image_or_list_of_images,\n                                                            segs_from_prev_stage_or_list_of_segs_from_prev_stage,\n                                                            properties_or_list_of_properties,\n                                                            truncated_ofname,\n                                                            num_processes)\n        return self.predict_from_data_iterator(iterator, save_probabilities, num_processes_segmentation_export)\n\n    def predict_from_data_iterator(self,\n                                   data_iterator,\n                                   save_probabilities: bool = False,\n                                   num_processes_segmentation_export: int = default_num_processes):\n        \"\"\"\n        each element returned by data_iterator must be a dict with 'data', 'ofile' and 'data_properties' keys!\n        If 'ofile' is None, the result will be returned instead of written to a file\n        \"\"\"\n        with multiprocessing.get_context(\"spawn\").Pool(num_processes_segmentation_export) as export_pool:\n            worker_list = [i for i in export_pool._pool]\n            r = []\n            for preprocessed in data_iterator:\n                data = preprocessed['data']\n                if isinstance(data, str):\n                    delfile = data\n                    data = torch.from_numpy(np.load(data))\n                    os.remove(delfile)\n\n                ofile = preprocessed['ofile']\n                if ofile is not None:\n                    print(f'\\nPredicting {os.path.basename(ofile)}:')\n                else:\n                    print(f'\\nPredicting image of shape {data.shape}:')\n\n                print(f'perform_everything_on_gpu: {self.perform_everything_on_gpu}')\n\n                properties = preprocessed['data_properties']\n\n                # let's not get into a runaway situation where the GPU predicts so fast that the disk has to b swamped with\n                # npy files\n                proceed = not check_workers_alive_and_busy(export_pool, worker_list, r, allowed_num_queued=2)\n                while not proceed:\n                    # print('sleeping')\n                    sleep(0.1)\n                    proceed = not check_workers_alive_and_busy(export_pool, worker_list, r, allowed_num_queued=2)\n\n                prediction = self.predict_logits_from_preprocessed_data(data).cpu()\n\n                if ofile is not None:\n                    # this needs to go into background processes\n                    # export_prediction_from_logits(prediction, properties, configuration_manager, plans_manager,\n                    #                               dataset_json, ofile, save_probabilities)\n                    print('sending off prediction to background worker for resampling and export')\n                    r.append(\n                        export_pool.starmap_async(\n                            export_prediction_from_logits,\n                            ((prediction, properties, self.configuration_manager, self.plans_manager,\n                              self.dataset_json, ofile, save_probabilities),)\n                        )\n                    )\n                else:\n                    # convert_predicted_logits_to_segmentation_with_correct_shape(prediction, plans_manager,\n                    #                                                             configuration_manager, label_manager,\n                    #                                                             properties,\n                    #                                                             save_probabilities)\n                    print('sending off prediction to background worker for resampling')\n                    r.append(\n                        export_pool.starmap_async(\n                            convert_predicted_logits_to_segmentation_with_correct_shape, (\n                                (prediction, self.plans_manager,\n                                 self.configuration_manager, self.label_manager,\n                                 properties,\n                                 save_probabilities),)\n                        )\n                    )\n                if ofile is not None:\n                    print(f'done with {os.path.basename(ofile)}')\n                else:\n                    print(f'\\nDone with image of shape {data.shape}:')\n            ret = [i.get()[0] for i in r]\n\n        if isinstance(data_iterator, MultiThreadedAugmenter):\n            data_iterator._finish()\n\n        # clear lru cache\n        compute_gaussian.cache_clear()\n        # clear device cache\n        empty_cache(self.device)\n        return ret\n\n    def predict_single_npy_array(self, input_image: np.ndarray, image_properties: dict,\n                                 segmentation_previous_stage: np.ndarray = None,\n                                 output_file_truncated: str = None,\n                                 save_or_return_probabilities: bool = False):\n        \"\"\"\n        image_properties must only have a 'spacing' key!\n        \"\"\"\n        ppa = PreprocessAdapterFromNpy([input_image], [segmentation_previous_stage], [image_properties],\n                                       [output_file_truncated],\n                                       self.plans_manager, self.dataset_json, self.configuration_manager,\n                                       num_threads_in_multithreaded=1, verbose=self.verbose)\n        if self.verbose:\n            print('preprocessing')\n        dct = next(ppa)\n\n        if self.verbose:\n            print('predicting')\n        predicted_logits = self.predict_logits_from_preprocessed_data(dct['data']).cpu()\n\n        if self.verbose:\n            print('resampling to original shape')\n        if output_file_truncated is not None:\n            export_prediction_from_logits(predicted_logits, dct['data_properties'], self.configuration_manager,\n                                          self.plans_manager, self.dataset_json, output_file_truncated,\n                                          save_or_return_probabilities)\n        else:\n            ret = convert_predicted_logits_to_segmentation_with_correct_shape(predicted_logits, self.plans_manager,\n                                                                              self.configuration_manager,\n                                                                              self.label_manager,\n                                                                              dct['data_properties'],\n                                                                              return_probabilities=\n                                                                              save_or_return_probabilities)\n            if save_or_return_probabilities:\n                return ret[0], ret[1]\n            else:\n                return ret\n\n    def predict_logits_from_preprocessed_data(self, data: torch.Tensor) -> torch.Tensor:\n        \"\"\"\n        IMPORTANT! IF YOU ARE RUNNING THE CASCADE, THE SEGMENTATION FROM THE PREVIOUS STAGE MUST ALREADY BE STACKED ON\n        TOP OF THE IMAGE AS ONE-HOT REPRESENTATION! SEE PreprocessAdapter ON HOW THIS SHOULD BE DONE!\n\n        RETURNED LOGITS HAVE THE SHAPE OF THE INPUT. THEY MUST BE CONVERTED BACK TO THE ORIGINAL IMAGE SIZE.\n        SEE convert_predicted_logits_to_segmentation_with_correct_shape\n        \"\"\"\n        # we have some code duplication here but this allows us to run with perform_everything_on_gpu=True as\n        # default and not have the entire program crash in case of GPU out of memory. Neat. That should make\n        # things a lot faster for some datasets.\n        original_perform_everything_on_gpu = self.perform_everything_on_gpu\n        with torch.no_grad():\n            prediction = None\n            if self.perform_everything_on_gpu:\n                try:\n                    for params in self.list_of_parameters:\n\n                        # messing with state dict names...\n                        if not isinstance(self.network, OptimizedModule):\n                            self.network.load_state_dict(params)\n                        else:\n                            self.network._orig_mod.load_state_dict(params)\n\n                        if prediction is None:\n                            prediction = self.predict_sliding_window_return_logits(data)\n                        else:\n                            prediction += self.predict_sliding_window_return_logits(data)\n\n                    if len(self.list_of_parameters) > 1:\n                        prediction /= len(self.list_of_parameters)\n\n                except RuntimeError:\n                    print('Prediction with perform_everything_on_gpu=True failed due to insufficient GPU memory. '\n                          'Falling back to perform_everything_on_gpu=False. Not a big deal, just slower...')\n                    print('Error:')\n                    traceback.print_exc()\n                    prediction = None\n                    self.perform_everything_on_gpu = False\n\n            if prediction is None:\n                for params in self.list_of_parameters:\n                    # messing with state dict names...\n                    if not isinstance(self.network, OptimizedModule):\n                        self.network.load_state_dict(params)\n                    else:\n                        self.network._orig_mod.load_state_dict(params)\n\n                    if prediction is None:\n                        prediction = self.predict_sliding_window_return_logits(data)\n                    else:\n                        prediction += self.predict_sliding_window_return_logits(data)\n                if len(self.list_of_parameters) > 1:\n                    prediction /= len(self.list_of_parameters)\n\n            print('Prediction done, transferring to CPU if needed')\n            prediction = prediction.to('cpu')\n            self.perform_everything_on_gpu = original_perform_everything_on_gpu\n        return prediction\n\n    def _internal_get_sliding_window_slicers(self, image_size: Tuple[int, ...]):\n        slicers = []\n        if len(self.configuration_manager.patch_size) < len(image_size):\n            assert len(self.configuration_manager.patch_size) == len(\n                image_size) - 1, 'if tile_size has less entries than image_size, ' \\\n                                 'len(tile_size) ' \\\n                                 'must be one shorter than len(image_size) ' \\\n                                 '(only dimension ' \\\n                                 'discrepancy of 1 allowed).'\n            steps = compute_steps_for_sliding_window(image_size[1:], self.configuration_manager.patch_size,\n                                                     self.tile_step_size)\n            if self.verbose: print(f'n_steps {image_size[0] * len(steps[0]) * len(steps[1])}, image size is'\n                                   f' {image_size}, tile_size {self.configuration_manager.patch_size}, '\n                                   f'tile_step_size {self.tile_step_size}\\nsteps:\\n{steps}')\n            for d in range(image_size[0]):\n                for sx in steps[0]:\n                    for sy in steps[1]:\n                        slicers.append(\n                            tuple([slice(None), d, *[slice(si, si + ti) for si, ti in\n                                                     zip((sx, sy), self.configuration_manager.patch_size)]]))\n        else:\n            steps = compute_steps_for_sliding_window(image_size, self.configuration_manager.patch_size,\n                                                     self.tile_step_size)\n            if self.verbose: print(\n                f'n_steps {np.prod([len(i) for i in steps])}, image size is {image_size}, tile_size {self.configuration_manager.patch_size}, '\n                f'tile_step_size {self.tile_step_size}\\nsteps:\\n{steps}')\n            for sx in steps[0]:\n                for sy in steps[1]:\n                    for sz in steps[2]:\n                        slicers.append(\n                            tuple([slice(None), *[slice(si, si + ti) for si, ti in\n                                                  zip((sx, sy, sz), self.configuration_manager.patch_size)]]))\n        return slicers\n\n    def _internal_maybe_mirror_and_predict(self, x: torch.Tensor) -> torch.Tensor:\n        mirror_axes = self.allowed_mirroring_axes if self.use_mirroring else None\n        prediction = self.network(x)\n\n        if mirror_axes is not None:\n            # check for invalid numbers in mirror_axes\n            # x should be 5d for 3d images and 4d for 2d. so the max value of mirror_axes cannot exceed len(x.shape) - 3\n            assert max(mirror_axes) <= x.ndim - 3, 'mirror_axes does not match the dimension of the input!'\n\n            axes_combinations = [\n                c for i in range(len(mirror_axes)) for c in itertools.combinations([m + 2 for m in mirror_axes], i + 1)\n            ]\n            for axes in axes_combinations:\n                prediction += torch.flip(self.network(torch.flip(x, (*axes,))), (*axes,))\n            prediction /= (len(axes_combinations) + 1)\n        return prediction\n\n    def predict_sliding_window_return_logits(self, input_image: torch.Tensor) \\\n            -> Union[np.ndarray, torch.Tensor]:\n        assert isinstance(input_image, torch.Tensor)\n        self.network = self.network.to(self.device)\n        self.network.eval()\n\n        empty_cache(self.device)\n\n        # Autocast is a little bitch.\n        # If the device_type is 'cpu' then it's slow as heck on some CPUs (no auto bfloat16 support detection)\n        # and needs to be disabled.\n        # If the device_type is 'mps' then it will complain that mps is not implemented, even if enabled=False\n        # is set. Whyyyyyyy. (this is why we don't make use of enabled=False)\n        # So autocast will only be active if we have a cuda device.\n        with torch.no_grad():\n            with torch.autocast(self.device.type, enabled=True) if self.device.type == 'cuda' else dummy_context():\n                assert input_image.ndim == 4, 'input_image must be a 4D np.ndarray or torch.Tensor (c, x, y, z)'\n\n                if self.verbose: print(f'Input shape: {input_image.shape}')\n                if self.verbose: print(\"step_size:\", self.tile_step_size)\n                if self.verbose: print(\"mirror_axes:\", self.allowed_mirroring_axes if self.use_mirroring else None)\n\n                # if input_image is smaller than tile_size we need to pad it to tile_size.\n                data, slicer_revert_padding = pad_nd_image(input_image, self.configuration_manager.patch_size,\n                                                           'constant', {'value': 0}, True,\n                                                           None)\n\n                slicers = self._internal_get_sliding_window_slicers(data.shape[1:])\n\n                # preallocate results and num_predictions\n                results_device = self.device if self.perform_everything_on_gpu else torch.device('cpu')\n                if self.verbose: print('preallocating arrays')\n                try:\n                    data = data.to(self.device)\n                    predicted_logits = torch.zeros((self.label_manager.num_segmentation_heads, *data.shape[1:]),\n                                                   dtype=torch.half,\n                                                   device=results_device)\n                    n_predictions = torch.zeros(data.shape[1:], dtype=torch.half,\n                                                device=results_device)\n                    if self.use_gaussian:\n                        gaussian = compute_gaussian(tuple(self.configuration_manager.patch_size), sigma_scale=1. / 8,\n                                                    value_scaling_factor=10,\n                                                    device=results_device)\n                except RuntimeError:\n                    # sometimes the stuff is too large for GPUs. In that case fall back to CPU\n                    results_device = torch.device('cpu')\n                    data = data.to(results_device)\n                    predicted_logits = torch.zeros((self.label_manager.num_segmentation_heads, *data.shape[1:]),\n                                                   dtype=torch.half,\n                                                   device=results_device)\n                    n_predictions = torch.zeros(data.shape[1:], dtype=torch.half,\n                                                device=results_device)\n                    if self.use_gaussian:\n                        gaussian = compute_gaussian(tuple(self.configuration_manager.patch_size), sigma_scale=1. / 8,\n                                                    value_scaling_factor=10,\n                                                    device=results_device)\n                finally:\n                    empty_cache(self.device)\n\n                if self.verbose: print('running prediction')\n                for sl in tqdm(slicers, disable=not self.allow_tqdm):\n                    workon = data[sl][None]\n                    workon = workon.to(self.device, non_blocking=False)\n\n                    prediction = self._internal_maybe_mirror_and_predict(workon)[0].to(results_device)\n\n                    predicted_logits[sl] += (prediction * gaussian if self.use_gaussian else prediction)\n                    n_predictions[sl[1:]] += (gaussian if self.use_gaussian else 1)\n\n                predicted_logits /= n_predictions\n                # check for infs\n                if torch.any(torch.isinf(predicted_logits)):\n                    raise RuntimeError('Encountered inf in predicted array. Aborting... If this problem persists, '\n                                       'reduce value_scaling_factor in compute_gaussian or increase the dtype of '\n                                       'predicted_logits to fp32')\n        empty_cache(self.device)\n        return predicted_logits[tuple([slice(None), *slicer_revert_padding[1:]])]\n\n\ndef predict_entry_point_modelfolder():\n    import argparse\n    parser = argparse.ArgumentParser(description='Use this to run inference with nnU-Net. This function is used when '\n                                                 'you want to manually specify a folder containing a trained nnU-Net '\n                                                 'model. This is useful when the nnunet environment variables '\n                                                 '(nnUNet_results) are not set.')\n    parser.add_argument('-i', type=str, required=True,\n                        help='input folder. Remember to use the correct channel numberings for your files (_0000 etc). '\n                             'File endings must be the same as the training dataset!')\n    parser.add_argument('-o', type=str, required=True,\n                        help='Output folder. If it does not exist it will be created. Predicted segmentations will '\n                             'have the same name as their source images.')\n    parser.add_argument('-m', type=str, required=True,\n                        help='Folder in which the trained model is. Must have subfolders fold_X for the different '\n                             'folds you trained')\n    parser.add_argument('-f', nargs='+', type=str, required=False, default=(0, 1, 2, 3, 4),\n                        help='Specify the folds of the trained model that should be used for prediction. '\n                             'Default: (0, 1, 2, 3, 4)')\n    parser.add_argument('-step_size', type=float, required=False, default=0.5,\n                        help='Step size for sliding window prediction. The larger it is the faster but less accurate '\n                             'the prediction. Default: 0.5. Cannot be larger than 1. We recommend the default.')\n    parser.add_argument('--disable_tta', action='store_true', required=False, default=False,\n                        help='Set this flag to disable test time data augmentation in the form of mirroring. Faster, '\n                             'but less accurate inference. Not recommended.')\n    parser.add_argument('--verbose', action='store_true', help=\"Set this if you like being talked to. You will have \"\n                                                               \"to be a good listener/reader.\")\n    parser.add_argument('--save_probabilities', action='store_true',\n                        help='Set this to export predicted class \"probabilities\". Required if you want to ensemble '\n                             'multiple configurations.')\n    parser.add_argument('--continue_prediction', '--c', action='store_true',\n                        help='Continue an aborted previous prediction (will not overwrite existing files)')\n    parser.add_argument('-chk', type=str, required=False, default='checkpoint_final.pth',\n                        help='Name of the checkpoint you want to use. Default: checkpoint_final.pth')\n    parser.add_argument('-npp', type=int, required=False, default=3,\n                        help='Number of processes used for preprocessing. More is not always better. Beware of '\n                             'out-of-RAM issues. Default: 3')\n    parser.add_argument('-nps', type=int, required=False, default=3,\n                        help='Number of processes used for segmentation export. More is not always better. Beware of '\n                             'out-of-RAM issues. Default: 3')\n    parser.add_argument('-prev_stage_predictions', type=str, required=False, default=None,\n                        help='Folder containing the predictions of the previous stage. Required for cascaded models.')\n    parser.add_argument('-device', type=str, default='cuda', required=False,\n                        help=\"Use this to set the device the inference should run with. Available options are 'cuda' \"\n                             \"(GPU), 'cpu' (CPU) and 'mps' (Apple M1/M2). Do NOT use this to set which GPU ID! \"\n                             \"Use CUDA_VISIBLE_DEVICES=X nnUNetv2_predict [...] instead!\")\n\n    print(\n        \"\\n#######################################################################\\nPlease cite the following paper \"\n        \"when using nnU-Net:\\n\"\n        \"Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). \"\n        \"nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. \"\n        \"Nature methods, 18(2), 203-211.\\n#######################################################################\\n\")\n\n    args = parser.parse_args()\n    args.f = [i if i == 'all' else int(i) for i in args.f]\n\n    if not isdir(args.o):\n        maybe_mkdir_p(args.o)\n\n    assert args.device in ['cpu', 'cuda',\n                           'mps'], f'-device must be either cpu, mps or cuda. Other devices are not tested/supported. Got: {args.device}.'\n    if args.device == 'cpu':\n        # let's allow torch to use hella threads\n        import multiprocessing\n        torch.set_num_threads(multiprocessing.cpu_count())\n        device = torch.device('cpu')\n    elif args.device == 'cuda':\n        # multithreading in torch doesn't help nnU-Net if run on GPU\n        torch.set_num_threads(1)\n        torch.set_num_interop_threads(1)\n        device = torch.device('cuda')\n    else:\n        device = torch.device('mps')\n\n    predictor = nnUNetPredictor(tile_step_size=args.step_size,\n                                use_gaussian=True,\n                                use_mirroring=not args.disable_tta,\n                                perform_everything_on_gpu=True,\n                                device=device,\n                                verbose=args.verbose)\n    predictor.initialize_from_trained_model_folder(args.m, args.f, args.chk)\n    predictor.predict_from_files(args.i, args.o, save_probabilities=args.save_probabilities,\n                                 overwrite=not args.continue_prediction,\n                                 num_processes_preprocessing=args.npp,\n                                 num_processes_segmentation_export=args.nps,\n                                 folder_with_segs_from_prev_stage=args.prev_stage_predictions,\n                                 num_parts=1, part_id=0)\n\n\ndef predict_entry_point():\n    import argparse\n    parser = argparse.ArgumentParser(description='Use this to run inference with nnU-Net. This function is used when '\n                                                 'you want to manually specify a folder containing a trained nnU-Net '\n                                                 'model. This is useful when the nnunet environment variables '\n                                                 '(nnUNet_results) are not set.')\n    parser.add_argument('-i', type=str, required=True,\n                        help='input folder. Remember to use the correct channel numberings for your files (_0000 etc). '\n                             'File endings must be the same as the training dataset!')\n    parser.add_argument('-o', type=str, required=True,\n                        help='Output folder. If it does not exist it will be created. Predicted segmentations will '\n                             'have the same name as their source images.')\n    parser.add_argument('-d', type=str, required=True,\n                        help='Dataset with which you would like to predict. You can specify either dataset name or id')\n    parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',\n                        help='Plans identifier. Specify the plans in which the desired configuration is located. '\n                             'Default: nnUNetPlans')\n    parser.add_argument('-tr', type=str, required=False, default='nnUNetTrainer',\n                        help='What nnU-Net trainer class was used for training? Default: nnUNetTrainer')\n    parser.add_argument('-c', type=str, required=True,\n                        help='nnU-Net configuration that should be used for prediction. Config must be located '\n                             'in the plans specified with -p')\n    parser.add_argument('-f', nargs='+', type=str, required=False, default=(0, 1, 2, 3, 4),\n                        help='Specify the folds of the trained model that should be used for prediction. '\n                             'Default: (0, 1, 2, 3, 4)')\n    parser.add_argument('-step_size', type=float, required=False, default=0.5,\n                        help='Step size for sliding window prediction. The larger it is the faster but less accurate '\n                             'the prediction. Default: 0.5. Cannot be larger than 1. We recommend the default.')\n    parser.add_argument('--disable_tta', action='store_true', required=False, default=False,\n                        help='Set this flag to disable test time data augmentation in the form of mirroring. Faster, '\n                             'but less accurate inference. Not recommended.')\n    parser.add_argument('--verbose', action='store_true', help=\"Set this if you like being talked to. You will have \"\n                                                               \"to be a good listener/reader.\")\n    parser.add_argument('--save_probabilities', action='store_true',\n                        help='Set this to export predicted class \"probabilities\". Required if you want to ensemble '\n                             'multiple configurations.')\n    parser.add_argument('--continue_prediction', action='store_true',\n                        help='Continue an aborted previous prediction (will not overwrite existing files)')\n    parser.add_argument('-chk', type=str, required=False, default='checkpoint_final.pth',\n                        help='Name of the checkpoint you want to use. Default: checkpoint_final.pth')\n    parser.add_argument('-npp', type=int, required=False, default=3,\n                        help='Number of processes used for preprocessing. More is not always better. Beware of '\n                             'out-of-RAM issues. Default: 3')\n    parser.add_argument('-nps', type=int, required=False, default=3,\n                        help='Number of processes used for segmentation export. More is not always better. Beware of '\n                             'out-of-RAM issues. Default: 3')\n    parser.add_argument('-prev_stage_predictions', type=str, required=False, default=None,\n                        help='Folder containing the predictions of the previous stage. Required for cascaded models.')\n    parser.add_argument('-num_parts', type=int, required=False, default=1,\n                        help='Number of separate nnUNetv2_predict call that you will be making. Default: 1 (= this one '\n                             'call predicts everything)')\n    parser.add_argument('-part_id', type=int, required=False, default=0,\n                        help='If multiple nnUNetv2_predict exist, which one is this? IDs start with 0 can end with '\n                             'num_parts - 1. So when you submit 5 nnUNetv2_predict calls you need to set -num_parts '\n                             '5 and use -part_id 0, 1, 2, 3 and 4. Simple, right? Note: You are yourself responsible '\n                             'to make these run on separate GPUs! Use CUDA_VISIBLE_DEVICES (google, yo!)')\n    parser.add_argument('-device', type=str, default='cuda', required=False,\n                        help=\"Use this to set the device the inference should run with. Available options are 'cuda' \"\n                             \"(GPU), 'cpu' (CPU) and 'mps' (Apple M1/M2). Do NOT use this to set which GPU ID! \"\n                             \"Use CUDA_VISIBLE_DEVICES=X nnUNetv2_predict [...] instead!\")\n\n    print(\n        \"\\n#######################################################################\\nPlease cite the following paper \"\n        \"when using nnU-Net:\\n\"\n        \"Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). \"\n        \"nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. \"\n        \"Nature methods, 18(2), 203-211.\\n#######################################################################\\n\")\n\n    args = parser.parse_args()\n    args.f = [i if i == 'all' else int(i) for i in args.f]\n\n    model_folder = get_output_folder(args.d, args.tr, args.p, args.c)\n\n    if not isdir(args.o):\n        maybe_mkdir_p(args.o)\n\n    # slightly passive aggressive haha\n    assert args.part_id < args.num_parts, 'Do you even read the documentation? See nnUNetv2_predict -h.'\n\n    assert args.device in ['cpu', 'cuda',\n                           'mps'], f'-device must be either cpu, mps or cuda. Other devices are not tested/supported. Got: {args.device}.'\n    if args.device == 'cpu':\n        # let's allow torch to use hella threads\n        import multiprocessing\n        torch.set_num_threads(multiprocessing.cpu_count())\n        device = torch.device('cpu')\n    elif args.device == 'cuda':\n        # multithreading in torch doesn't help nnU-Net if run on GPU\n        torch.set_num_threads(1)\n        torch.set_num_interop_threads(1)\n        device = torch.device('cuda')\n    else:\n        device = torch.device('mps')\n\n    predictor = nnUNetPredictor(tile_step_size=args.step_size,\n                                use_gaussian=True,\n                                use_mirroring=not args.disable_tta,\n                                perform_everything_on_gpu=True,\n                                device=device,\n                                verbose=args.verbose,\n                                verbose_preprocessing=False)\n    predictor.initialize_from_trained_model_folder(\n        model_folder,\n        args.f,\n        checkpoint_name=args.chk\n    )\n    predictor.predict_from_files(args.i, args.o, save_probabilities=args.save_probabilities,\n                                 overwrite=not args.continue_prediction,\n                                 num_processes_preprocessing=args.npp,\n                                 num_processes_segmentation_export=args.nps,\n                                 folder_with_segs_from_prev_stage=args.prev_stage_predictions,\n                                 num_parts=args.num_parts,\n                                 part_id=args.part_id)\n    # r = predict_from_raw_data(args.i,\n    #                           args.o,\n    #                           model_folder,\n    #                           args.f,\n    #                           args.step_size,\n    #                           use_gaussian=True,\n    #                           use_mirroring=not args.disable_tta,\n    #                           perform_everything_on_gpu=True,\n    #                           verbose=args.verbose,\n    #                           save_probabilities=args.save_probabilities,\n    #                           overwrite=not args.continue_prediction,\n    #                           checkpoint_name=args.chk,\n    #                           num_processes_preprocessing=args.npp,\n    #                           num_processes_segmentation_export=args.nps,\n    #                           folder_with_segs_from_prev_stage=args.prev_stage_predictions,\n    #                           num_parts=args.num_parts,\n    #                           part_id=args.part_id,\n    #                           device=device)\n\n\nif __name__ == '__main__':\n    # predict a bunch of files\n    from nnunetv2.paths import nnUNet_results, nnUNet_raw\n    predictor = nnUNetPredictor(\n        tile_step_size=0.5,\n        use_gaussian=True,\n        use_mirroring=True,\n        perform_everything_on_gpu=True,\n        device=torch.device('cuda', 0),\n        verbose=False,\n        verbose_preprocessing=False,\n        allow_tqdm=True\n        )\n    predictor.initialize_from_trained_model_folder(\n        join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_lowres'),\n        use_folds=(0, ),\n        checkpoint_name='checkpoint_final.pth',\n    )\n    predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),\n                                 join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres'),\n                                 save_probabilities=False, overwrite=False,\n                                 num_processes_preprocessing=2, num_processes_segmentation_export=2,\n                                 folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)\n\n    # predict a numpy array\n    from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTr/liver_63_0000.nii.gz')])\n    ret = predictor.predict_single_npy_array(img, props, None, None, False)\n\n    iterator = predictor.get_data_iterator_from_raw_npy_data([img], None, [props], None, 1)\n    ret = predictor.predict_from_data_iterator(iterator, False, 1)\n\n\n    # predictor = nnUNetPredictor(\n    #     tile_step_size=0.5,\n    #     use_gaussian=True,\n    #     use_mirroring=True,\n    #     perform_everything_on_gpu=True,\n    #     device=torch.device('cuda', 0),\n    #     verbose=False,\n    #     allow_tqdm=True\n    #     )\n    # predictor.initialize_from_trained_model_folder(\n    #     join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_cascade_fullres'),\n    #     use_folds=(0,),\n    #     checkpoint_name='checkpoint_final.pth',\n    # )\n    # predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),\n    #                              join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predCascade'),\n    #                              save_probabilities=False, overwrite=False,\n    #                              num_processes_preprocessing=2, num_processes_segmentation_export=2,\n    #                              folder_with_segs_from_prev_stage='/media/isensee/data/nnUNet_raw/Dataset003_Liver/imagesTs_predlowres',\n    #                              num_parts=1, part_id=0)\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/readme.md",
    "content": "The nnU-Net inference is now much more dynamic than before, allowing you to more seamlessly integrate nnU-Net into \nyour existing workflows.\nThis readme will give you a quick rundown of your options. This is not a complete guide. Look into the code to learn \nall the details!\n\n# Preface\nIn terms of speed, the most efficient inference strategy is the one done by the nnU-Net defaults! Images are read on \nthe fly and preprocessed in background workers. The main process takes the preprocessed images, predicts them and \nsends the prediction off to another set of background workers which will resize the resulting logits, convert \nthem to a segmentation and export the segmentation.\n\nThe reason the default setup is the best option is because \n\n1) loading and preprocessing as well as segmentation export are interlaced with the prediction. The main process can \nfocus on communicating with the compute device (i.e. your GPU) and does not have to do any other processing. \nThis uses your resources as well as possible!\n2) only the images and segmentation that are currently being needed are stored in RAM! Imaging predicting many images \nand having to store all of them + the results in your system memory\n\n# nnUNetPredictor\nThe new nnUNetPredictor class encapsulates the inferencing code and makes it simple to switch between modes. Your \ncode can hold a nnUNetPredictor instance and perform prediction on the fly. Previously this was not possible and each \nnew prediction request resulted in reloading the parameters and reinstantiating the network architecture. Not ideal.\n\nThe nnUNetPredictor must be ininitialized manually! You will want to use the \n`predictor.initialize_from_trained_model_folder` function for 99% of use cases!\n\nNew feature: If you do not specify an output folder / output files then the predicted segmentations will be \nreturned \n\n\n## Recommended nnU-Net default: predict from source files\n\ntldr:\n- loads images on the fly\n- performs preprocessing in background workers\n- main process focuses only on making predictions\n- results are again given to background workers for resampling and (optional) export\n\npros:\n- best suited for predicting a large number of images\n- nicer to your RAM\n\ncons:\n- not ideal when single images are to be predicted \n- requires images to be present as files\n\nExample:\n```python\n    from nnunetv2.paths import nnUNet_results, nnUNet_raw\n    import torch\n    from batchgenerators.utilities.file_and_folder_operations import join\n    from nnunetv2.inference.predict_from_raw_data import nnUNetPredictor\n    \n    # instantiate the nnUNetPredictor\n    predictor = nnUNetPredictor(\n        tile_step_size=0.5,\n        use_gaussian=True,\n        use_mirroring=True,\n        perform_everything_on_gpu=True,\n        device=torch.device('cuda', 0),\n        verbose=False,\n        verbose_preprocessing=False,\n        allow_tqdm=True\n    )\n    # initializes the network architecture, loads the checkpoint\n    predictor.initialize_from_trained_model_folder(\n        join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_lowres'),\n        use_folds=(0,),\n        checkpoint_name='checkpoint_final.pth',\n    )\n    # variant 1: give input and output folders\n    predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),\n                                 join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres'),\n                                 save_probabilities=False, overwrite=False,\n                                 num_processes_preprocessing=2, num_processes_segmentation_export=2,\n                                 folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)\n```\n\nInstead if giving input and output folders you can also give concrete files. If you give concrete files, there is no \nneed for the _0000 suffix anymore! This can be useful in situations where you have no control over the filenames!\nRemember that the files must be given as 'list of lists' where each entry in the outer list is a case to be predicted \nand the inner list contains all the files belonging to that case. There is just one file for datasets with just one \ninput modality (such as CT) but may be more files for others (such as MRI where there is sometimes T1, T2, Flair etc). \nIMPORTANT: the order in which the files for each case are given must match the order of the channels as defined in the \ndataset.json!\n\nIf you give files as input, you need to give individual output files as output!\n\n```python\n    # variant 2, use list of files as inputs. Note how we use nested lists!!!\n    indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')\n    outdir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres')\n    predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')], \n                                  [join(indir, 'liver_142_0000.nii.gz')]],\n                                 [join(outdir, 'liver_152.nii.gz'),\n                                  join(outdir, 'liver_142.nii.gz')],\n                                 save_probabilities=False, overwrite=False,\n                                 num_processes_preprocessing=2, num_processes_segmentation_export=2,\n                                 folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)\n```\n\nDid you know? If you do not specify output files, the predicted segmentations will be returned:\n```python\n    # variant 2.5, returns segmentations\n    indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')\n    predicted_segmentations = predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],\n                                  [join(indir, 'liver_142_0000.nii.gz')]],\n                                 None,\n                                 save_probabilities=False, overwrite=True,\n                                 num_processes_preprocessing=2, num_processes_segmentation_export=2,\n                                 folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)\n```\n\n## Prediction from npy arrays\ntldr:\n- you give images as a list of npy arrays\n- performs preprocessing in background workers\n- main process focuses only on making predictions\n- results are again given to background workers for resampling and (optional) export\n\npros:\n- the correct variant for when you have images in RAM already\n- well suited for predicting multiple images\n\ncons:\n- uses more ram than the default\n- unsuited for large number of images as all images must be held in RAM\n\n```python\n    from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO\n\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])\n    img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])\n    img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])\n    img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])\n    # we do not set output files so that the segmentations will be returned. You can of course also specify output\n    # files instead (no return value on that case)\n    ret = predictor.predict_from_list_of_npy_arrays([img, img2, img3, img4],\n                                                    None,\n                                                    [props, props2, props3, props4],\n                                                    None, 2, save_probabilities=False,\n                                                    num_processes_segmentation_export=2)\n```\n\n## Predicting a single npy array\n\ntldr:\n- you give one image as npy array\n- everything is done in the main process: preprocessing, prediction, resampling, (export)\n- no interlacing, slowest variant!\n- ONLY USE THIS IF YOU CANNOT GIVE NNUNET MULTIPLE IMAGES AT ONCE FOR SOME REASON\n\npros:\n- no messing with multiprocessing\n- no messing with data iterator blabla\n\ncons:\n- slows as heck, yo\n- never the right choice unless you can only give a single image at a time to nnU-Net\n\n```python\n    # predict a single numpy array\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTr/liver_63_0000.nii.gz')])\n    ret = predictor.predict_single_npy_array(img, props, None, None, False)\n```\n\n## Predicting with a custom data iterator\ntldr: \n- highly flexible\n- not for newbies\n\npros:\n- you can do everything yourself\n- you have all the freedom you want\n- really fast if you remember to use multiprocessing in your iterator\n\ncons:\n- you need to do everything yourself\n- harder than you might think\n\n```python\n    img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])\n    img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])\n    img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])\n    img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])\n    # each element returned by data_iterator must be a dict with 'data', 'ofile' and 'data_properties' keys!\n    # If 'ofile' is None, the result will be returned instead of written to a file\n    # the iterator is responsible for performing the correct preprocessing!\n    # note how the iterator here does not use multiprocessing -> preprocessing will be done in the main thread!\n    # take a look at the default iterators for predict_from_files and predict_from_list_of_npy_arrays\n    # (they both use predictor.predict_from_data_iterator) for inspiration!\n    def my_iterator(list_of_input_arrs, list_of_input_props):\n        preprocessor = predictor.configuration_manager.preprocessor_class(verbose=predictor.verbose)\n        for a, p in zip(list_of_input_arrs, list_of_input_props):\n            data, seg = preprocessor.run_case_npy(a,\n                                                  None,\n                                                  p,\n                                                  predictor.plans_manager,\n                                                  predictor.configuration_manager,\n                                                  predictor.dataset_json)\n            yield {'data': torch.from_numpy(data).contiguous().pin_memory(), 'data_properties': p, 'ofile': None}\n    ret = predictor.predict_from_data_iterator(my_iterator([img, img2, img3, img4], [props, props2, props3, props4]),\n                                               save_probabilities=False, num_processes_segmentation_export=3)\n```"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/inference/sliding_window_prediction.py",
    "content": "from functools import lru_cache\n\nimport numpy as np\nimport torch\nfrom typing import Union, Tuple, List\nfrom acvl_utils.cropping_and_padding.padding import pad_nd_image\nfrom scipy.ndimage import gaussian_filter\n\n\n@lru_cache(maxsize=2)\ndef compute_gaussian(tile_size: Union[Tuple[int, ...], List[int]], sigma_scale: float = 1. / 8,\n                     value_scaling_factor: float = 1, dtype=torch.float16, device=torch.device('cuda', 0)) \\\n        -> torch.Tensor:\n    tmp = np.zeros(tile_size)\n    center_coords = [i // 2 for i in tile_size]\n    sigmas = [i * sigma_scale for i in tile_size]\n    tmp[tuple(center_coords)] = 1\n    gaussian_importance_map = gaussian_filter(tmp, sigmas, 0, mode='constant', cval=0)\n\n    gaussian_importance_map = torch.from_numpy(gaussian_importance_map)\n\n    gaussian_importance_map = gaussian_importance_map / torch.max(gaussian_importance_map) * value_scaling_factor\n    gaussian_importance_map = gaussian_importance_map.type(dtype).to(device)\n\n    # gaussian_importance_map cannot be 0, otherwise we may end up with nans!\n    gaussian_importance_map[gaussian_importance_map == 0] = torch.min(\n        gaussian_importance_map[gaussian_importance_map != 0])\n\n    return gaussian_importance_map\n\n\ndef compute_steps_for_sliding_window(image_size: Tuple[int, ...], tile_size: Tuple[int, ...], tile_step_size: float) -> \\\n        List[List[int]]:\n    assert [i >= j for i, j in zip(image_size, tile_size)], \"image size must be as large or larger than patch_size\"\n    assert 0 < tile_step_size <= 1, 'step_size must be larger than 0 and smaller or equal to 1'\n\n    # our step width is patch_size*step_size at most, but can be narrower. For example if we have image size of\n    # 110, patch size of 64 and step_size of 0.5, then we want to make 3 steps starting at coordinate 0, 23, 46\n    target_step_sizes_in_voxels = [i * tile_step_size for i in tile_size]\n\n    num_steps = [int(np.ceil((i - k) / j)) + 1 for i, j, k in zip(image_size, target_step_sizes_in_voxels, tile_size)]\n\n    steps = []\n    for dim in range(len(tile_size)):\n        # the highest step value for this dimension is\n        max_step_value = image_size[dim] - tile_size[dim]\n        if num_steps[dim] > 1:\n            actual_step_size = max_step_value / (num_steps[dim] - 1)\n        else:\n            actual_step_size = 99999999999  # does not matter because there is only one step at 0\n\n        steps_here = [int(np.round(actual_step_size * i)) for i in range(num_steps[dim])]\n\n        steps.append(steps_here)\n\n    return steps\n\n\nif __name__ == '__main__':\n    a = torch.rand((4, 2, 32, 23))\n    a_npy = a.numpy()\n\n    a_padded = pad_nd_image(a, new_shape=(48, 27))\n    a_npy_padded = pad_nd_image(a_npy, new_shape=(48, 27))\n    assert all([i == j for i, j in zip(a_padded.shape, (4, 2, 48, 27))])\n    assert all([i == j for i, j in zip(a_npy_padded.shape, (4, 2, 48, 27))])\n    assert np.all(a_padded.numpy() == a_npy_padded)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/model_sharing/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/model_sharing/entry_points.py",
    "content": "from nnunetv2.model_sharing.model_download import download_and_install_from_url\nfrom nnunetv2.model_sharing.model_export import export_pretrained_model\nfrom nnunetv2.model_sharing.model_import import install_model_from_zip_file\n\n\ndef print_license_warning():\n    print('')\n    print('######################################################')\n    print('!!!!!!!!!!!!!!!!!!!!!!!!WARNING!!!!!!!!!!!!!!!!!!!!!!!')\n    print('######################################################')\n    print(\"Using the pretrained model weights is subject to the license of the dataset they were trained on. Some \"\n          \"allow commercial use, others don't. It is your responsibility to make sure you use them appropriately! Use \"\n          \"nnUNet_print_pretrained_model_info(task_name) to see a summary of the dataset and where to find its license!\")\n    print('######################################################')\n    print('')\n\n\ndef download_by_url():\n    import argparse\n    parser = argparse.ArgumentParser(\n        description=\"Use this to download pretrained models. This script is intended to download models via url only. \"\n                    \"CAREFUL: This script will overwrite \"\n                    \"existing models (if they share the same trainer class and plans as \"\n                    \"the pretrained model.\")\n    parser.add_argument(\"url\", type=str, help='URL of the pretrained model')\n    args = parser.parse_args()\n    url = args.url\n    download_and_install_from_url(url)\n\n\ndef install_from_zip_entry_point():\n    import argparse\n    parser = argparse.ArgumentParser(\n        description=\"Use this to install a zip file containing a pretrained model.\")\n    parser.add_argument(\"zip\", type=str, help='zip file')\n    args = parser.parse_args()\n    zip = args.zip\n    install_model_from_zip_file(zip)\n\n\ndef export_pretrained_model_entry():\n    import argparse\n    parser = argparse.ArgumentParser(\n        description=\"Use this to export a trained model as a zip file.\")\n    parser.add_argument('-d', type=str, required=True, help='Dataset name or id')\n    parser.add_argument('-o', type=str, required=True, help='Output file name')\n    parser.add_argument('-c', nargs='+', type=str, required=False,\n                        default=('3d_lowres', '3d_fullres', '2d', '3d_cascade_fullres'),\n                        help=\"List of configuration names\")\n    parser.add_argument('-tr', required=False, type=str, default='nnUNetTrainer', help='Trainer class')\n    parser.add_argument('-p', required=False, type=str, default='nnUNetPlans', help='plans identifier')\n    parser.add_argument('-f', required=False, nargs='+', type=str, default=(0, 1, 2, 3, 4), help='list of fold ids')\n    parser.add_argument('-chk', required=False, nargs='+', type=str, default=('checkpoint_final.pth', ),\n                        help='Lis tof checkpoint names to export. Default: checkpoint_final.pth')\n    parser.add_argument('--not_strict', action='store_false', default=False, required=False, help='Set this to allow missing folds and/or configurations')\n    parser.add_argument('--exp_cv_preds', action='store_true', required=False, help='Set this to export the cross-validation predictions as well')\n    args = parser.parse_args()\n\n    export_pretrained_model(dataset_name_or_id=args.d, output_file=args.o, configurations=args.c, trainer=args.tr,\n                            plans_identifier=args.p, folds=args.f, strict=not args.not_strict, save_checkpoints=args.chk,\n                            export_crossval_predictions=args.exp_cv_preds)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/model_sharing/model_download.py",
    "content": "from typing import Optional\n\nimport requests\nfrom batchgenerators.utilities.file_and_folder_operations import *\nfrom time import time\nfrom nnunetv2.model_sharing.model_import import install_model_from_zip_file\nfrom nnunetv2.paths import nnUNet_results\nfrom tqdm import tqdm\n\n\ndef download_and_install_from_url(url):\n    assert nnUNet_results is not None, \"Cannot install model because network_training_output_dir is not \" \\\n                                                    \"set (RESULTS_FOLDER missing as environment variable, see \" \\\n                                                    \"Installation instructions)\"\n    print('Downloading pretrained model from url:', url)\n    import http.client\n    http.client.HTTPConnection._http_vsn = 10\n    http.client.HTTPConnection._http_vsn_str = 'HTTP/1.0'\n\n    import os\n    home = os.path.expanduser('~')\n    random_number = int(time() * 1e7)\n    tempfile = join(home, f'.nnunetdownload_{str(random_number)}')\n\n    try:\n        download_file(url=url, local_filename=tempfile, chunk_size=8192 * 16)\n        print(\"Download finished. Extracting...\")\n        install_model_from_zip_file(tempfile)\n        print(\"Done\")\n    except Exception as e:\n        raise e\n    finally:\n        if isfile(tempfile):\n            os.remove(tempfile)\n\n\ndef download_file(url: str, local_filename: str, chunk_size: Optional[int] = 8192 * 16) -> str:\n    # borrowed from https://stackoverflow.com/questions/16694907/download-large-file-in-python-with-requests\n    # NOTE the stream=True parameter below\n    with requests.get(url, stream=True, timeout=100) as r:\n        r.raise_for_status()\n        with tqdm.wrapattr(open(local_filename, 'wb'), \"write\", total=int(r.headers.get(\"Content-Length\"))) as f:\n            for chunk in r.iter_content(chunk_size=chunk_size):\n                f.write(chunk)\n    return local_filename\n\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/model_sharing/model_export.py",
    "content": "import zipfile\n\nfrom nnunetv2.utilities.file_path_utilities import *\n\n\ndef export_pretrained_model(dataset_name_or_id: Union[int, str], output_file: str,\n                            configurations: Tuple[str] = (\"2d\", \"3d_lowres\", \"3d_fullres\", \"3d_cascade_fullres\"),\n                            trainer: str = 'nnUNetTrainer',\n                            plans_identifier: str = 'nnUNetPlans',\n                            folds: Tuple[int, ...] = (0, 1, 2, 3, 4),\n                            strict: bool = True,\n                            save_checkpoints: Tuple[str, ...] = ('checkpoint_final.pth',),\n                            export_crossval_predictions: bool = False) -> None:\n    dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n    with(zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED)) as zipf:\n        for c in configurations:\n            print(f\"Configuration {c}\")\n            trainer_output_dir = get_output_folder(dataset_name, trainer, plans_identifier, c)\n\n            if not isdir(trainer_output_dir):\n                if strict:\n                    raise RuntimeError(f\"{dataset_name} is missing the trained model of configuration {c}\")\n                else:\n                    continue\n\n            expected_fold_folder = [f\"fold_{i}\" if i != 'all' else 'fold_all' for i in folds]\n            assert all([isdir(join(trainer_output_dir, i)) for i in expected_fold_folder]), \\\n                f\"not all requested folds are present; {dataset_name} {c}; requested folds: {folds}\"\n\n            assert isfile(join(trainer_output_dir, \"plans.json\")), f\"plans.json missing, {dataset_name} {c}\"\n\n            for fold_folder in expected_fold_folder:\n                print(f\"Exporting {fold_folder}\")\n                # debug.json, does not exist yet\n                source_file = join(trainer_output_dir, fold_folder, \"debug.json\")\n                if isfile(source_file):\n                    zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n\n                # all requested checkpoints\n                for chk in save_checkpoints:\n                    source_file = join(trainer_output_dir, fold_folder, chk)\n                    zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n\n                # progress.png\n                source_file = join(trainer_output_dir, fold_folder, \"progress.png\")\n                zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n\n                # if it exists, network architecture.png\n                source_file = join(trainer_output_dir, fold_folder, \"network_architecture.pdf\")\n                if isfile(source_file):\n                    zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n\n                # validation folder with all predicted segmentations etc\n                if export_crossval_predictions:\n                    source_folder = join(trainer_output_dir, fold_folder, \"validation\")\n                    files = [i for i in subfiles(source_folder, join=False) if not i.endswith('.npz') and not i.endswith('.pkl')]\n                    for f in files:\n                        zipf.write(join(source_folder, f), os.path.relpath(join(source_folder, f), nnUNet_results))\n                # just the summary.json file from the validation\n                else:\n                    source_file = join(trainer_output_dir, fold_folder, \"validation\", \"summary.json\")\n                    zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n\n            source_folder = join(trainer_output_dir, f'crossval_results_folds_{folds_tuple_to_string(folds)}')\n            if isdir(source_folder):\n                if export_crossval_predictions:\n                    source_files = subfiles(source_folder, join=True)\n                else:\n                    source_files = [\n                        join(trainer_output_dir, f'crossval_results_folds_{folds_tuple_to_string(folds)}', i) for i in\n                        ['summary.json', 'postprocessing.pkl', 'postprocessing.json']\n                    ]\n                for s in source_files:\n                    if isfile(s):\n                        zipf.write(s, os.path.relpath(s, nnUNet_results))\n            # plans\n            source_file = join(trainer_output_dir, \"plans.json\")\n            zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n            # fingerprint\n            source_file = join(trainer_output_dir, \"dataset_fingerprint.json\")\n            zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n            # dataset\n            source_file = join(trainer_output_dir, \"dataset.json\")\n            zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))\n\n        ensemble_dir = join(nnUNet_results, dataset_name, 'ensembles')\n\n        if not isdir(ensemble_dir):\n            print(\"No ensemble directory found for task\", dataset_name_or_id)\n            return\n        subd = subdirs(ensemble_dir, join=False)\n                # figure out whether the models in the ensemble are all within the exported models here\n        for ens in subd:\n            identifiers, folds = convert_ensemble_folder_to_model_identifiers_and_folds(ens)\n            ok = True\n            for i in identifiers:\n                tr, pl, c = convert_identifier_to_trainer_plans_config(i)\n                if tr == trainer and pl == plans_identifier and c in configurations:\n                    pass\n                else:\n                    ok = False\n            if ok:\n                print(f'found matching ensemble: {ens}')\n                source_folder = join(ensemble_dir, ens)\n                if export_crossval_predictions:\n                    source_files = subfiles(source_folder, join=True)\n                else:\n                    source_files = [\n                        join(source_folder, i) for i in\n                        ['summary.json', 'postprocessing.pkl', 'postprocessing.json'] if isfile(join(source_folder, i))\n                    ]\n                for s in source_files:\n                    zipf.write(s, os.path.relpath(s, nnUNet_results))\n        inference_information_file = join(nnUNet_results, dataset_name, 'inference_information.json')\n        if isfile(inference_information_file):\n            zipf.write(inference_information_file, os.path.relpath(inference_information_file, nnUNet_results))\n        inference_information_txt_file = join(nnUNet_results, dataset_name, 'inference_information.txt')\n        if isfile(inference_information_txt_file):\n            zipf.write(inference_information_txt_file, os.path.relpath(inference_information_txt_file, nnUNet_results))\n    print('Done')\n\n\nif __name__ == '__main__':\n    export_pretrained_model(2, '/home/fabian/temp/dataset2.zip', strict=False, export_crossval_predictions=True, folds=(0, ))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/model_sharing/model_import.py",
    "content": "import zipfile\n\nfrom nnunetv2.paths import nnUNet_results\n\n\ndef install_model_from_zip_file(zip_file: str):\n    with zipfile.ZipFile(zip_file, 'r') as zip_ref:\n        zip_ref.extractall(nnUNet_results)"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/paths.py",
    "content": "#    Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\n\nimport os\n\n\"\"\"\nPLEASE READ paths.md FOR INFORMATION TO HOW TO SET THIS UP\n\"\"\"\n\nbase = '/data/linshan/nnunet_data'\nnnUNet_raw = '/data/linshan/nnunet_data/nnUNet_raw'\nnnUNet_preprocessed = '/data/linshan/nnunet_data/nnUNet_preprocessed'\nnnUNet_results = '/data/linshan/nnunet_data/nnUNet_results'\n\nif nnUNet_raw is None:\n    print(\"nnUNet_raw is not defined and nnU-Net can only be used on data for which preprocessed files \"\n          \"are already present on your system. nnU-Net cannot be used for experiment planning and preprocessing like \"\n          \"this. If this is not intended, please read documentation/setting_up_paths.md for information on how to set \"\n          \"this up properly.\")\n\nif nnUNet_preprocessed is None:\n    print(\"nnUNet_preprocessed is not defined and nnU-Net can not be used for preprocessing \"\n          \"or training. If this is not intended, please read documentation/setting_up_paths.md for information on how \"\n          \"to set this up.\")\n\nif nnUNet_results is None:\n    print(\"nnUNet_results is not defined and nnU-Net cannot be used for training or \"\n          \"inference. If this is not intended behavior, please read documentation/setting_up_paths.md for information \"\n          \"on how to set this up.\")\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/postprocessing/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/postprocessing/remove_connected_components.py",
    "content": "import argparse\nimport multiprocessing\nimport shutil\nfrom multiprocessing import Pool\nfrom typing import Union, Tuple, List, Callable\n\nimport numpy as np\nfrom acvl_utils.morphology.morphology_helper import remove_all_but_largest_component\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, subfiles, maybe_mkdir_p, join, isfile, \\\n    isdir, save_pickle, load_pickle, save_json\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.evaluation.accumulate_cv_results import accumulate_cv_results\nfrom nnunetv2.evaluation.evaluate_predictions import region_or_label_to_mask, compute_metrics_on_folder, \\\n    load_summary_json, label_or_region_to_key\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.paths import nnUNet_raw\nfrom nnunetv2.utilities.file_path_utilities import folds_tuple_to_string\nfrom nnunetv2.utilities.json_export import recursive_fix_for_json_export\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager\n\n\ndef remove_all_but_largest_component_from_segmentation(segmentation: np.ndarray,\n                                                       labels_or_regions: Union[int, Tuple[int, ...],\n                                                                                List[Union[int, Tuple[int, ...]]]],\n                                                       background_label: int = 0) -> np.ndarray:\n    mask = np.zeros_like(segmentation, dtype=bool)\n    if not isinstance(labels_or_regions, list):\n        labels_or_regions = [labels_or_regions]\n    for l_or_r in labels_or_regions:\n        mask |= region_or_label_to_mask(segmentation, l_or_r)\n    mask_keep = remove_all_but_largest_component(mask)\n    ret = np.copy(segmentation)  # do not modify the input!\n    ret[mask & ~mask_keep] = background_label\n    return ret\n\n\ndef apply_postprocessing(segmentation: np.ndarray, pp_fns: List[Callable], pp_fn_kwargs: List[dict]):\n    for fn, kwargs in zip(pp_fns, pp_fn_kwargs):\n        segmentation = fn(segmentation, **kwargs)\n    return segmentation\n\n\ndef load_postprocess_save(segmentation_file: str,\n                          output_fname: str,\n                          image_reader_writer: BaseReaderWriter,\n                          pp_fns: List[Callable],\n                          pp_fn_kwargs: List[dict]):\n    seg, props = image_reader_writer.read_seg(segmentation_file)\n    seg = apply_postprocessing(seg[0], pp_fns, pp_fn_kwargs)\n    image_reader_writer.write_seg(seg, output_fname, props)\n\n\ndef determine_postprocessing(folder_predictions: str,\n                             folder_ref: str,\n                             plans_file_or_dict: Union[str, dict],\n                             dataset_json_file_or_dict: Union[str, dict],\n                             num_processes: int = default_num_processes,\n                             keep_postprocessed_files: bool = True):\n    \"\"\"\n    Determines nnUNet postprocessing. Its output is a postprocessing.pkl file in folder_predictions which can be\n    used with apply_postprocessing_to_folder.\n\n    Postprocessed files are saved in folder_predictions/postprocessed. Set\n    keep_postprocessed_files=False to delete these files after this function is done (temp files will eb created\n    and deleted regardless).\n\n    If plans_file_or_dict or dataset_json_file_or_dict are None, we will look for them in input_folder\n    \"\"\"\n    output_folder = join(folder_predictions, 'postprocessed')\n\n    if plans_file_or_dict is None:\n        expected_plans_file = join(folder_predictions, 'plans.json')\n        if not isfile(expected_plans_file):\n            raise RuntimeError(f\"Expected plans file missing: {expected_plans_file}. The plans files should have been \"\n                               f\"created while running nnUNetv2_predict. Sadge.\")\n        plans_file_or_dict = load_json(expected_plans_file)\n    plans_manager = PlansManager(plans_file_or_dict)\n\n    if dataset_json_file_or_dict is None:\n        expected_dataset_json_file = join(folder_predictions, 'dataset.json')\n        if not isfile(expected_dataset_json_file):\n            raise RuntimeError(\n                f\"Expected plans file missing: {expected_dataset_json_file}. The plans files should have been \"\n                f\"created while running nnUNetv2_predict. Sadge.\")\n        dataset_json_file_or_dict = load_json(expected_dataset_json_file)\n\n    if not isinstance(dataset_json_file_or_dict, dict):\n        dataset_json = load_json(dataset_json_file_or_dict)\n    else:\n        dataset_json = dataset_json_file_or_dict\n\n    rw = plans_manager.image_reader_writer_class()\n    label_manager = plans_manager.get_label_manager(dataset_json)\n    labels_or_regions = label_manager.foreground_regions if label_manager.has_regions else label_manager.foreground_labels\n\n    predicted_files = subfiles(folder_predictions, suffix=dataset_json['file_ending'], join=False)\n    ref_files = subfiles(folder_ref, suffix=dataset_json['file_ending'], join=False)\n    # we should print a warning if not all files from folder_ref are present in folder_predictions\n    if not all([i in predicted_files for i in ref_files]):\n        print(f'WARNING: Not all files in folder_ref were found in folder_predictions. Determining postprocessing '\n              f'should always be done on the entire dataset!')\n\n    # before we start we should evaluate the imaegs in the source folder\n    if not isfile(join(folder_predictions, 'summary.json')):\n        compute_metrics_on_folder(folder_ref,\n                                  folder_predictions,\n                                  join(folder_predictions, 'summary.json'),\n                                  rw,\n                                  dataset_json['file_ending'],\n                                  labels_or_regions,\n                                  label_manager.ignore_label,\n                                  num_processes)\n\n    # we save the postprocessing functions in here\n    pp_fns = []\n    pp_fn_kwargs = []\n\n    # pool party!\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as pool:\n        # now let's see whether removing all but the largest foreground region improves the scores\n        output_here = join(output_folder, 'temp', 'keep_largest_fg')\n        maybe_mkdir_p(output_here)\n        pp_fn = remove_all_but_largest_component_from_segmentation\n        kwargs = {\n            'labels_or_regions': label_manager.foreground_labels,\n        }\n\n        pool.starmap(\n            load_postprocess_save,\n            zip(\n                [join(folder_predictions, i) for i in predicted_files],\n                [join(output_here, i) for i in predicted_files],\n                [rw] * len(predicted_files),\n                [[pp_fn]] * len(predicted_files),\n                [[kwargs]] * len(predicted_files)\n            )\n        )\n        compute_metrics_on_folder(folder_ref,\n                                  output_here,\n                                  join(output_here, 'summary.json'),\n                                  rw,\n                                  dataset_json['file_ending'],\n                                  labels_or_regions,\n                                  label_manager.ignore_label,\n                                  num_processes)\n        # now we need to figure out if doing this improved the dice scores. We will implement that defensively in so far\n        # that if a single class got worse as a result we won't do this. We can change this in the future but right now I\n        # prefer to do it this way\n        baseline_results = load_summary_json(join(folder_predictions, 'summary.json'))\n        pp_results = load_summary_json(join(output_here, 'summary.json'))\n        do_this = pp_results['foreground_mean']['Dice'] > baseline_results['foreground_mean']['Dice']\n        if do_this:\n            for class_id in pp_results['mean'].keys():\n                if pp_results['mean'][class_id]['Dice'] < baseline_results['mean'][class_id]['Dice']:\n                    do_this = False\n                    break\n        if do_this:\n            print(f'Results were improved by removing all but the largest foreground region. '\n                  f'Mean dice before: {round(baseline_results[\"foreground_mean\"][\"Dice\"], 5)} '\n                  f'after: {round(pp_results[\"foreground_mean\"][\"Dice\"], 5)}')\n            source = output_here\n            pp_fns.append(pp_fn)\n            pp_fn_kwargs.append(kwargs)\n        else:\n            print(f'Removing all but the largest foreground region did not improve results!')\n            source = folder_predictions\n\n        # in the old nnU-Net we could just apply all-but-largest component removal to all classes at the same time and\n        # then evaluate for each class whether this improved results. This is no longer possible because we now support\n        # region-based predictions and regions can overlap, causing interactions\n        # in principle the order with which the postprocessing is applied to the regions matter as well and should be\n        # investigated, but due to some things that I am too lazy to explain right now it's going to be alright (I think)\n        # to stick to the order in which they are declared in dataset.json (if you want to think about it then think about\n        # region_class_order)\n        # 2023_02_06: I hate myself for the comment above. Thanks past me\n        if len(labels_or_regions) > 1:\n            for label_or_region in labels_or_regions:\n                pp_fn = remove_all_but_largest_component_from_segmentation\n                kwargs = {\n                    'labels_or_regions': label_or_region,\n                }\n\n                output_here = join(output_folder, 'temp', 'keep_largest_perClassOrRegion')\n                maybe_mkdir_p(output_here)\n\n                pool.starmap(\n                    load_postprocess_save,\n                    zip(\n                        [join(source, i) for i in predicted_files],\n                        [join(output_here, i) for i in predicted_files],\n                        [rw] * len(predicted_files),\n                        [[pp_fn]] * len(predicted_files),\n                        [[kwargs]] * len(predicted_files)\n                    )\n                )\n                compute_metrics_on_folder(folder_ref,\n                                          output_here,\n                                          join(output_here, 'summary.json'),\n                                          rw,\n                                          dataset_json['file_ending'],\n                                          labels_or_regions,\n                                          label_manager.ignore_label,\n                                          num_processes)\n                baseline_results = load_summary_json(join(source, 'summary.json'))\n                pp_results = load_summary_json(join(output_here, 'summary.json'))\n                do_this = pp_results['mean'][label_or_region]['Dice'] > baseline_results['mean'][label_or_region]['Dice']\n                if do_this:\n                    print(f'Results were improved by removing all but the largest component for {label_or_region}. '\n                          f'Dice before: {round(baseline_results[\"mean\"][label_or_region][\"Dice\"], 5)} '\n                          f'after: {round(pp_results[\"mean\"][label_or_region][\"Dice\"], 5)}')\n                    if isdir(join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest')):\n                        shutil.rmtree(join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest'))\n                    shutil.move(output_here, join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest'), )\n                    source = join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest')\n                    pp_fns.append(pp_fn)\n                    pp_fn_kwargs.append(kwargs)\n                else:\n                    print(f'Removing all but the largest component for {label_or_region} did not improve results! '\n                          f'Dice before: {round(baseline_results[\"mean\"][label_or_region][\"Dice\"], 5)} '\n                          f'after: {round(pp_results[\"mean\"][label_or_region][\"Dice\"], 5)}')\n    [shutil.copy(join(source, i), join(output_folder, i)) for i in subfiles(source, join=False)]\n    save_pickle((pp_fns, pp_fn_kwargs), join(folder_predictions, 'postprocessing.pkl'))\n\n    baseline_results = load_summary_json(join(folder_predictions, 'summary.json'))\n    final_results = load_summary_json(join(output_folder, 'summary.json'))\n    tmp = {\n        'input_folder': {i: baseline_results[i] for i in ['foreground_mean', 'mean']},\n        'postprocessed': {i: final_results[i] for i in ['foreground_mean', 'mean']},\n        'postprocessing_fns': [i.__name__ for i in pp_fns],\n        'postprocessing_kwargs': pp_fn_kwargs,\n    }\n    # json is a very annoying little bi###. Can't handle tuples as dict keys.\n    tmp['input_folder']['mean'] = {label_or_region_to_key(k): tmp['input_folder']['mean'][k] for k in\n                                   tmp['input_folder']['mean'].keys()}\n    tmp['postprocessed']['mean'] = {label_or_region_to_key(k): tmp['postprocessed']['mean'][k] for k in\n                                    tmp['postprocessed']['mean'].keys()}\n    # did I already say that I hate json? \"TypeError: Object of type int64 is not JSON serializable\" You retarded bro?\n    recursive_fix_for_json_export(tmp)\n    save_json(tmp, join(folder_predictions, 'postprocessing.json'))\n\n    shutil.rmtree(join(output_folder, 'temp'))\n\n    if not keep_postprocessed_files:\n        shutil.rmtree(output_folder)\n    return pp_fns, pp_fn_kwargs\n\n\ndef apply_postprocessing_to_folder(input_folder: str,\n                                   output_folder: str,\n                                   pp_fns: List[Callable],\n                                   pp_fn_kwargs: List[dict],\n                                   plans_file_or_dict: Union[str, dict] = None,\n                                   dataset_json_file_or_dict: Union[str, dict] = None,\n                                   num_processes=8) -> None:\n    \"\"\"\n    If plans_file_or_dict or dataset_json_file_or_dict are None, we will look for them in input_folder\n    \"\"\"\n    if plans_file_or_dict is None:\n        expected_plans_file = join(input_folder, 'plans.json')\n        if not isfile(expected_plans_file):\n            raise RuntimeError(f\"Expected plans file missing: {expected_plans_file}. The plans file should have been \"\n                               f\"created while running nnUNetv2_predict. Sadge. If the folder you want to apply \"\n                               f\"postprocessing to was create from an ensemble then just specify one of the \"\n                               f\"plans files of the ensemble members in plans_file_or_dict\")\n        plans_file_or_dict = load_json(expected_plans_file)\n    plans_manager = PlansManager(plans_file_or_dict)\n\n    if dataset_json_file_or_dict is None:\n        expected_dataset_json_file = join(input_folder, 'dataset.json')\n        if not isfile(expected_dataset_json_file):\n            raise RuntimeError(\n                f\"Expected plans file missing: {expected_dataset_json_file}. The dataset.json should have been \"\n                f\"copied while running nnUNetv2_predict/nnUNetv2_ensemble. Sadge.\")\n        dataset_json_file_or_dict = load_json(expected_dataset_json_file)\n\n    if not isinstance(dataset_json_file_or_dict, dict):\n        dataset_json = load_json(dataset_json_file_or_dict)\n    else:\n        dataset_json = dataset_json_file_or_dict\n\n    rw = plans_manager.image_reader_writer_class()\n\n    maybe_mkdir_p(output_folder)\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        files = subfiles(input_folder, suffix=dataset_json['file_ending'], join=False)\n\n        _ = p.starmap(load_postprocess_save,\n                      zip(\n                          [join(input_folder, i) for i in files],\n                          [join(output_folder, i) for i in files],\n                          [rw] * len(files),\n                          [pp_fns] * len(files),\n                          [pp_fn_kwargs] * len(files)\n                      )\n                      )\n\n\ndef entry_point_determine_postprocessing_folder():\n    parser = argparse.ArgumentParser('Writes postprocessing.pkl and postprocessing.json in input_folder.')\n    parser.add_argument('-i', type=str, required=True, help='Input folder')\n    parser.add_argument('-ref', type=str, required=True, help='Folder with gt labels')\n    parser.add_argument('-plans_json', type=str, required=False, default=None,\n                        help=\"plans file to use. If not specified we will look for the plans.json file in the \"\n                             \"input folder (input_folder/plans.json)\")\n    parser.add_argument('-dataset_json', type=str, required=False, default=None,\n                        help=\"dataset.json file to use. If not specified we will look for the dataset.json file in the \"\n                             \"input folder (input_folder/dataset.json)\")\n    parser.add_argument('-np', type=int, required=False, default=default_num_processes,\n                        help=f\"number of processes to use. Default: {default_num_processes}\")\n    parser.add_argument('--remove_postprocessed', action='store_true', required=False,\n                        help='set this is you don\\'t want to keep the postprocessed files')\n\n    args = parser.parse_args()\n    determine_postprocessing(args.i, args.ref, args.plans_json, args.dataset_json, args.np,\n                             not args.remove_postprocessed)\n\n\ndef entry_point_apply_postprocessing():\n    parser = argparse.ArgumentParser('Apples postprocessing specified in pp_pkl_file to input folder.')\n    parser.add_argument('-i', type=str, required=True, help='Input folder')\n    parser.add_argument('-o', type=str, required=True, help='Output folder')\n    parser.add_argument('-pp_pkl_file', type=str, required=True, help='postprocessing.pkl file')\n    parser.add_argument('-np', type=int, required=False, default=default_num_processes,\n                        help=f\"number of processes to use. Default: {default_num_processes}\")\n    parser.add_argument('-plans_json', type=str, required=False, default=None,\n                        help=\"plans file to use. If not specified we will look for the plans.json file in the \"\n                             \"input folder (input_folder/plans.json)\")\n    parser.add_argument('-dataset_json', type=str, required=False, default=None,\n                        help=\"dataset.json file to use. If not specified we will look for the dataset.json file in the \"\n                             \"input folder (input_folder/dataset.json)\")\n    args = parser.parse_args()\n    pp_fns, pp_fn_kwargs = load_pickle(args.pp_pkl_file)\n    apply_postprocessing_to_folder(args.i, args.o, pp_fns, pp_fn_kwargs, args.plans_json, args.dataset_json, args.np)\n\n\nif __name__ == '__main__':\n    trained_model_folder = '/home/fabian/results/nnUNet_remake/Dataset004_Hippocampus/nnUNetTrainer__nnUNetPlans__3d_fullres'\n    labelstr = join(nnUNet_raw, 'Dataset004_Hippocampus', 'labelsTr')\n    plans_manager = PlansManager(join(trained_model_folder, 'plans.json'))\n    dataset_json = load_json(join(trained_model_folder, 'dataset.json'))\n    folds = (0, 1, 2, 3, 4)\n    label_manager = plans_manager.get_label_manager(dataset_json)\n\n    merged_output_folder = join(trained_model_folder, f'crossval_results_folds_{folds_tuple_to_string(folds)}')\n    accumulate_cv_results(trained_model_folder, merged_output_folder, folds, 8, False)\n\n    fns, kwargs = determine_postprocessing(merged_output_folder, labelstr, plans_manager.plans,\n                                           dataset_json, 8, keep_postprocessed_files=True)\n    save_pickle((fns, kwargs), join(trained_model_folder, 'postprocessing.pkl'))\n    fns, kwargs = load_pickle(join(trained_model_folder, 'postprocessing.pkl'))\n\n    apply_postprocessing_to_folder(merged_output_folder, merged_output_folder + '_pp', fns, kwargs,\n                                   plans_manager.plans, dataset_json,\n                                   8)\n    compute_metrics_on_folder(labelstr,\n                              merged_output_folder + '_pp',\n                              join(merged_output_folder + '_pp', 'summary.json'),\n                              plans_manager.image_reader_writer_class(),\n                              dataset_json['file_ending'],\n                              label_manager.foreground_regions if label_manager.has_regions else label_manager.foreground_labels,\n                              label_manager.ignore_label,\n                              8)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/cropping/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/cropping/cropping.py",
    "content": "import numpy as np\n\n\n# Hello! crop_to_nonzero is the function you are looking for. Ignore the rest.\nfrom acvl_utils.cropping_and_padding.bounding_boxes import get_bbox_from_mask, crop_to_bbox, bounding_box_to_slice\n\n\ndef create_nonzero_mask(data):\n    \"\"\"\n\n    :param data:\n    :return: the mask is True where the data is nonzero\n    \"\"\"\n    from scipy.ndimage import binary_fill_holes\n    assert data.ndim in (3, 4), \"data must have shape (C, X, Y, Z) or shape (C, X, Y)\"\n    nonzero_mask = np.zeros(data.shape[1:], dtype=bool)\n    for c in range(data.shape[0]):\n        this_mask = data[c] != 0\n        nonzero_mask = nonzero_mask | this_mask\n    nonzero_mask = binary_fill_holes(nonzero_mask)\n    return nonzero_mask\n\n\ndef crop_to_nonzero(data, seg=None, nonzero_label=-1):\n    \"\"\"\n\n    :param data:\n    :param seg:\n    :param nonzero_label: this will be written into the segmentation map\n    :return:\n    \"\"\"\n    nonzero_mask = create_nonzero_mask(data)\n    bbox = get_bbox_from_mask(nonzero_mask)\n\n    slicer = bounding_box_to_slice(bbox)\n    data = data[tuple([slice(None), *slicer])]\n\n    if seg is not None:\n        seg = seg[tuple([slice(None), *slicer])]\n\n    nonzero_mask = nonzero_mask[slicer][None]\n    if seg is not None:\n        seg[(seg == 0) & (~nonzero_mask)] = nonzero_label\n    else:\n        nonzero_mask = nonzero_mask.astype(np.int8)\n        nonzero_mask[nonzero_mask == 0] = nonzero_label\n        nonzero_mask[nonzero_mask > 0] = 0\n        seg = nonzero_mask\n    return data, seg, bbox\n\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/normalization/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/normalization/default_normalization_schemes.py",
    "content": "from abc import ABC, abstractmethod\nfrom typing import Type\n\nimport numpy as np\nfrom numpy import number\n\n\nclass ImageNormalization(ABC):\n    leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = None\n\n    def __init__(self, use_mask_for_norm: bool = None, intensityproperties: dict = None,\n                 target_dtype: Type[number] = np.float32):\n        assert use_mask_for_norm is None or isinstance(use_mask_for_norm, bool)\n        self.use_mask_for_norm = use_mask_for_norm\n        assert isinstance(intensityproperties, dict)\n        self.intensityproperties = intensityproperties\n        self.target_dtype = target_dtype\n\n    @abstractmethod\n    def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:\n        \"\"\"\n        Image and seg must have the same shape. Seg is not always used\n        \"\"\"\n        pass\n\n\nclass ZScoreNormalization(ImageNormalization):\n    leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = True\n\n    def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:\n        \"\"\"\n        here seg is used to store the zero valued region. The value for that region in the segmentation is -1 by\n        default.\n        \"\"\"\n        image = image.astype(self.target_dtype)\n        if self.use_mask_for_norm is not None and self.use_mask_for_norm:\n            # negative values in the segmentation encode the 'outside' region (think zero values around the brain as\n            # in BraTS). We want to run the normalization only in the brain region, so we need to mask the image.\n            # The default nnU-net sets use_mask_for_norm to True if cropping to the nonzero region substantially\n            # reduced the image size.\n            mask = seg >= 0\n            mean = image[mask].mean()\n            std = image[mask].std()\n            image[mask] = (image[mask] - mean) / (max(std, 1e-8))\n        else:\n            mean = image.mean()\n            std = image.std()\n            image = (image - mean) / (max(std, 1e-8))\n        return image\n\n\nclass CTNormalization(ImageNormalization):\n    leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False\n\n    def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:\n        assert self.intensityproperties is not None, \"CTNormalization requires intensity properties\"\n        image = image.astype(self.target_dtype)\n        mean_intensity = self.intensityproperties['mean']\n        std_intensity = self.intensityproperties['std']\n        lower_bound = self.intensityproperties['percentile_00_5']\n        upper_bound = self.intensityproperties['percentile_99_5']\n        image = np.clip(image, lower_bound, upper_bound)\n        image = (image - mean_intensity) / max(std_intensity, 1e-8)\n        return image\n\n\nclass NoNormalization(ImageNormalization):\n    leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False\n\n    def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:\n        return image.astype(self.target_dtype)\n\n\nclass RescaleTo01Normalization(ImageNormalization):\n    leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False\n\n    def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:\n        image = image.astype(self.target_dtype)\n        image = image - image.min()\n        image = image / np.clip(image.max(), a_min=1e-8, a_max=None)\n        return image\n\n\nclass RGBTo01Normalization(ImageNormalization):\n    leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False\n\n    def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:\n        assert image.min() >= 0, \"RGB images are uint 8, for whatever reason I found pixel values smaller than 0. \" \\\n                                 \"Your images do not seem to be RGB images\"\n        assert image.max() <= 255, \"RGB images are uint 8, for whatever reason I found pixel values greater than 255\" \\\n                                   \". Your images do not seem to be RGB images\"\n        image = image.astype(self.target_dtype)\n        image = image / 255.\n        return image\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/normalization/map_channel_name_to_normalization.py",
    "content": "from typing import Type\n\nfrom nnunetv2.preprocessing.normalization.default_normalization_schemes import CTNormalization, NoNormalization, \\\n    ZScoreNormalization, RescaleTo01Normalization, RGBTo01Normalization, ImageNormalization\n\nchannel_name_to_normalization_mapping = {\n    'CT': CTNormalization,\n    'noNorm': NoNormalization,\n    'zscore': ZScoreNormalization,\n    'rescale_to_0_1': RescaleTo01Normalization,\n    'rgb_to_0_1': RGBTo01Normalization\n}\n\n\ndef get_normalization_scheme(channel_name: str) -> Type[ImageNormalization]:\n    \"\"\"\n    If we find the channel_name in channel_name_to_normalization_mapping return the corresponding normalization. If it is\n    not found, use the default (ZScoreNormalization)\n    \"\"\"\n    norm_scheme = channel_name_to_normalization_mapping.get(channel_name)\n    if norm_scheme is None:\n        norm_scheme = ZScoreNormalization\n    # print('Using %s for image normalization' % norm_scheme.__name__)\n    return norm_scheme\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/normalization/readme.md",
    "content": "The channel_names entry in dataset.json only determines the normlaization scheme. So if you want to use something different \nthen you can just\n- create a new subclass of ImageNormalization\n- map your custom channel identifier to that subclass in channel_name_to_normalization_mapping\n- run plan and preprocess again with your custom normlaization scheme"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/preprocessors/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/preprocessors/default_preprocessor.py",
    "content": "#    Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nimport multiprocessing\nimport shutil\nfrom time import sleep\nfrom typing import Union, Tuple\n\nimport nnunetv2\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\nfrom nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw\nfrom nnunetv2.preprocessing.cropping.cropping import crop_to_nonzero\nfrom nnunetv2.preprocessing.resampling.default_resampling import compute_new_shape\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\nfrom nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \\\n    create_lists_from_splitted_dataset_folder, get_filenames_of_train_images_and_targets\nfrom tqdm import tqdm\n\n\nclass DefaultPreprocessor(object):\n    def __init__(self, verbose: bool = True):\n        self.verbose = verbose\n        \"\"\"\n        Everything we need is in the plans. Those are given when run() is called\n        \"\"\"\n\n    def run_case_npy(self, data: np.ndarray, seg: Union[np.ndarray, None], properties: dict,\n                     plans_manager: PlansManager, configuration_manager: ConfigurationManager,\n                     dataset_json: Union[dict, str]):\n        # let's not mess up the inputs!\n        data = np.copy(data)\n        if seg is not None:\n            assert data.shape[1:] == seg.shape[1:], \"Shape mismatch between image and segmentation. Please fix your dataset and make use of the --verify_dataset_integrity flag to ensure everything is correct\"\n            seg = np.copy(seg)\n\n        has_seg = seg is not None\n\n        # apply transpose_forward, this also needs to be applied to the spacing!\n        data = data.transpose([0, *[i + 1 for i in plans_manager.transpose_forward]])\n        if seg is not None:\n            seg = seg.transpose([0, *[i + 1 for i in plans_manager.transpose_forward]])\n        original_spacing = [properties['spacing'][i] for i in plans_manager.transpose_forward]\n\n        # crop, remember to store size before cropping!\n        shape_before_cropping = data.shape[1:]\n        properties['shape_before_cropping'] = shape_before_cropping\n        # this command will generate a segmentation. This is important because of the nonzero mask which we may need\n        data, seg, bbox = crop_to_nonzero(data, seg)\n        properties['bbox_used_for_cropping'] = bbox\n        # print(data.shape, seg.shape)\n        properties['shape_after_cropping_and_before_resampling'] = data.shape[1:]\n\n        # resample\n        target_spacing = configuration_manager.spacing  # this should already be transposed\n\n        if len(target_spacing) < len(data.shape[1:]):\n            # target spacing for 2d has 2 entries but the data and original_spacing have three because everything is 3d\n            # in 2d configuration we do not change the spacing between slices\n            target_spacing = [original_spacing[0]] + target_spacing\n        new_shape = compute_new_shape(data.shape[1:], original_spacing, target_spacing)\n\n        # normalize\n        # normalization MUST happen before resampling or we get huge problems with resampled nonzero masks no\n        # longer fitting the images perfectly!\n        data = self._normalize(data, seg, configuration_manager,\n                               plans_manager.foreground_intensity_properties_per_channel)\n\n        # print('current shape', data.shape[1:], 'current_spacing', original_spacing,\n        #       '\\ntarget shape', new_shape, 'target_spacing', target_spacing)\n        old_shape = data.shape[1:]\n        data = configuration_manager.resampling_fn_data(data, new_shape, original_spacing, target_spacing)\n        seg = configuration_manager.resampling_fn_seg(seg, new_shape, original_spacing, target_spacing)\n        if self.verbose:\n            print(f'old shape: {old_shape}, new_shape: {new_shape}, old_spacing: {original_spacing}, '\n                  f'new_spacing: {target_spacing}, fn_data: {configuration_manager.resampling_fn_data}')\n\n        # if we have a segmentation, sample foreground locations for oversampling and add those to properties\n        if has_seg:\n            # reinstantiating LabelManager for each case is not ideal. We could replace the dataset_json argument\n            # with a LabelManager Instance in this function because that's all its used for. Dunno what's better.\n            # LabelManager is pretty light computation-wise.\n            label_manager = plans_manager.get_label_manager(dataset_json)\n            collect_for_this = label_manager.foreground_regions if label_manager.has_regions \\\n                else label_manager.foreground_labels\n\n            # when using the ignore label we want to sample only from annotated regions. Therefore we also need to\n            # collect samples uniformly from all classes (incl background)\n            if label_manager.has_ignore_label:\n                collect_for_this.append(label_manager.all_labels)\n\n            # no need to filter background in regions because it is already filtered in handle_labels\n            # print(all_labels, regions)\n            properties['class_locations'] = self._sample_foreground_locations(seg, collect_for_this,\n                                                                                   verbose=self.verbose)\n            seg = self.modify_seg_fn(seg, plans_manager, dataset_json, configuration_manager)\n        if np.max(seg) > 127:\n            seg = seg.astype(np.int16)\n        else:\n            seg = seg.astype(np.int8)\n        return data, seg\n\n    def run_case(self, image_files: List[str], seg_file: Union[str, None], plans_manager: PlansManager,\n                 configuration_manager: ConfigurationManager,\n                 dataset_json: Union[dict, str]):\n        \"\"\"\n        seg file can be none (test cases)\n\n        order of operations is: transpose -> crop -> resample\n        so when we export we need to run the following order: resample -> crop -> transpose (we could also run\n        transpose at a different place, but reverting the order of operations done during preprocessing seems cleaner)\n        \"\"\"\n        if isinstance(dataset_json, str):\n            dataset_json = load_json(dataset_json)\n\n        rw = plans_manager.image_reader_writer_class()\n\n        # load image(s)\n        data, data_properties = rw.read_images(image_files)\n\n        # if possible, load seg\n        if seg_file is not None:\n            seg, _ = rw.read_seg(seg_file)\n        else:\n            seg = None\n\n        data, seg = self.run_case_npy(data, seg, data_properties, plans_manager, configuration_manager,\n                                      dataset_json)\n        return data, seg, data_properties\n\n    def run_case_save(self, output_filename_truncated: str, image_files: List[str], seg_file: str,\n                      plans_manager: PlansManager, configuration_manager: ConfigurationManager,\n                      dataset_json: Union[dict, str]):\n        data, seg, properties = self.run_case(image_files, seg_file, plans_manager, configuration_manager, dataset_json)\n        # print('dtypes', data.dtype, seg.dtype)\n        np.savez_compressed(output_filename_truncated + '.npz', data=data, seg=seg)\n        write_pickle(properties, output_filename_truncated + '.pkl')\n\n    @staticmethod\n    def _sample_foreground_locations(seg: np.ndarray, classes_or_regions: Union[List[int], List[Tuple[int, ...]]],\n                                     seed: int = 1234, verbose: bool = False):\n        num_samples = 10000\n        min_percent_coverage = 0.01  # at least 1% of the class voxels need to be selected, otherwise it may be too\n        # sparse\n        rndst = np.random.RandomState(seed)\n        class_locs = {}\n        for c in classes_or_regions:\n            k = c if not isinstance(c, list) else tuple(c)\n            if isinstance(c, (tuple, list)):\n                mask = seg == c[0]\n                for cc in c[1:]:\n                    mask = mask | (seg == cc)\n                all_locs = np.argwhere(mask)\n            else:\n                all_locs = np.argwhere(seg == c)\n            if len(all_locs) == 0:\n                class_locs[k] = []\n                continue\n            target_num_samples = min(num_samples, len(all_locs))\n            target_num_samples = max(target_num_samples, int(np.ceil(len(all_locs) * min_percent_coverage)))\n\n            selected = all_locs[rndst.choice(len(all_locs), target_num_samples, replace=False)]\n            class_locs[k] = selected\n            if verbose:\n                print(c, target_num_samples)\n        return class_locs\n\n    def _normalize(self, data: np.ndarray, seg: np.ndarray, configuration_manager: ConfigurationManager,\n                   foreground_intensity_properties_per_channel: dict) -> np.ndarray:\n        for c in range(data.shape[0]):\n            scheme = configuration_manager.normalization_schemes[c]\n            normalizer_class = recursive_find_python_class(join(nnunetv2.__path__[0], \"preprocessing\", \"normalization\"),\n                                                           scheme,\n                                                           'nnunetv2.preprocessing.normalization')\n            if normalizer_class is None:\n                raise RuntimeError(f'Unable to locate class \\'{scheme}\\' for normalization')\n            normalizer = normalizer_class(use_mask_for_norm=configuration_manager.use_mask_for_norm[c],\n                                          intensityproperties=foreground_intensity_properties_per_channel[str(c)])\n            data[c] = normalizer.run(data[c], seg[0])\n        return data\n\n    def run(self, dataset_name_or_id: Union[int, str], configuration_name: str, plans_identifier: str,\n            num_processes: int):\n        \"\"\"\n        data identifier = configuration name in plans. EZ.\n        \"\"\"\n        dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n\n        assert isdir(join(nnUNet_raw, dataset_name)), \"The requested dataset could not be found in nnUNet_raw\"\n\n        plans_file = join(nnUNet_preprocessed, dataset_name, plans_identifier + '.json')\n        assert isfile(plans_file), \"Expected plans file (%s) not found. Run corresponding nnUNet_plan_experiment \" \\\n                                   \"first.\" % plans_file\n        plans = load_json(plans_file)\n        plans_manager = PlansManager(plans)\n        configuration_manager = plans_manager.get_configuration(configuration_name)\n\n        if self.verbose:\n            print(f'Preprocessing the following configuration: {configuration_name}')\n        if self.verbose:\n            print(configuration_manager)\n\n        dataset_json_file = join(nnUNet_preprocessed, dataset_name, 'dataset.json')\n        dataset_json = load_json(dataset_json_file)\n\n        output_directory = join(nnUNet_preprocessed, dataset_name, configuration_manager.data_identifier)\n\n        if isdir(output_directory):\n            shutil.rmtree(output_directory)\n\n        maybe_mkdir_p(output_directory)\n\n        dataset = get_filenames_of_train_images_and_targets(join(nnUNet_raw, dataset_name), dataset_json)\n\n        # identifiers = [os.path.basename(i[:-len(dataset_json['file_ending'])]) for i in seg_fnames]\n        # output_filenames_truncated = [join(output_directory, i) for i in identifiers]\n\n        # multiprocessing magic.\n        r = []\n        with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n            for k in dataset.keys():\n                r.append(p.starmap_async(self.run_case_save,\n                                         ((join(output_directory, k), dataset[k]['images'], dataset[k]['label'],\n                                           plans_manager, configuration_manager,\n                                           dataset_json),)))\n            remaining = list(range(len(dataset)))\n            # p is pretty nifti. If we kill workers they just respawn but don't do any work.\n            # So we need to store the original pool of workers.\n            workers = [j for j in p._pool]\n            with tqdm(desc=None, total=len(dataset), disable=self.verbose) as pbar:\n                while len(remaining) > 0:\n                    all_alive = all([j.is_alive() for j in workers])\n                    if not all_alive:\n                        raise RuntimeError('Some background worker is 6 feet under. Yuck. \\n'\n                                           'OK jokes aside.\\n'\n                                           'One of your background processes is missing. This could be because of '\n                                           'an error (look for an error message) or because it was killed '\n                                           'by your OS due to running out of RAM. If you don\\'t see '\n                                           'an error message, out of RAM is likely the problem. In that case '\n                                           'reducing the number of workers might help')\n                    done = [i for i in remaining if r[i].ready()]\n                    for _ in done:\n                        pbar.update()\n                    remaining = [i for i in remaining if i not in done]\n                    sleep(0.1)\n\n    def modify_seg_fn(self, seg: np.ndarray, plans_manager: PlansManager, dataset_json: dict,\n                      configuration_manager: ConfigurationManager) -> np.ndarray:\n        # this function will be called at the end of self.run_case. Can be used to change the segmentation\n        # after resampling. Useful for experimenting with sparse annotations: I can introduce sparsity after resampling\n        # and don't have to create a new dataset each time I modify my experiments\n        return seg\n\n\ndef example_test_case_preprocessing():\n    # (paths to files may need adaptations)\n    plans_file = '/home/isensee/drives/gpu_data/nnUNet_preprocessed/Dataset219_AMOS2022_postChallenge_task2/nnUNetPlans.json'\n    dataset_json_file = '/home/isensee/drives/gpu_data/nnUNet_preprocessed/Dataset219_AMOS2022_postChallenge_task2/dataset.json'\n    input_images = ['/home/isensee/drives/e132-rohdaten/nnUNetv2/Dataset219_AMOS2022_postChallenge_task2/imagesTr/amos_0600_0000.nii.gz', ]  # if you only have one channel, you still need a list: ['case000_0000.nii.gz']\n\n    configuration = '3d_fullres'\n    pp = DefaultPreprocessor()\n\n    # _ because this position would be the segmentation if seg_file was not None (training case)\n    # even if you have the segmentation, don't put the file there! You should always evaluate in the original\n    # resolution. What comes out of the preprocessor might have been resampled to some other image resolution (as\n    # specified by plans)\n    plans_manager = PlansManager(plans_file)\n    data, _, properties = pp.run_case(input_images, seg_file=None, plans_manager=plans_manager,\n                                      configuration_manager=plans_manager.get_configuration(configuration),\n                                      dataset_json=dataset_json_file)\n\n    # voila. Now plug data into your prediction function of choice. We of course recommend nnU-Net's default (TODO)\n    return data\n\n\nif __name__ == '__main__':\n    example_test_case_preprocessing()\n    # pp = DefaultPreprocessor()\n    # pp.run(2, '2d', 'nnUNetPlans', 8)\n\n    ###########################################################################################################\n    # how to process a test cases? This is an example:\n    # example_test_case_preprocessing()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/resampling/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/resampling/default_resampling.py",
    "content": "from collections import OrderedDict\nfrom typing import Union, Tuple, List\n\nimport numpy as np\nimport pandas as pd\nimport torch\nfrom batchgenerators.augmentations.utils import resize_segmentation\nfrom scipy.ndimage.interpolation import map_coordinates\nfrom skimage.transform import resize\nfrom nnunetv2.configuration import ANISO_THRESHOLD\n\n\ndef get_do_separate_z(spacing: Union[Tuple[float, ...], List[float], np.ndarray], anisotropy_threshold=ANISO_THRESHOLD):\n    do_separate_z = (np.max(spacing) / np.min(spacing)) > anisotropy_threshold\n    return do_separate_z\n\n\ndef get_lowres_axis(new_spacing: Union[Tuple[float, ...], List[float], np.ndarray]):\n    axis = np.where(max(new_spacing) / np.array(new_spacing) == 1)[0]  # find which axis is anisotropic\n    return axis\n\n\ndef compute_new_shape(old_shape: Union[Tuple[int, ...], List[int], np.ndarray],\n                      old_spacing: Union[Tuple[float, ...], List[float], np.ndarray],\n                      new_spacing: Union[Tuple[float, ...], List[float], np.ndarray]) -> np.ndarray:\n    assert len(old_spacing) == len(old_shape)\n    assert len(old_shape) == len(new_spacing)\n    new_shape = np.array([int(round(i / j * k)) for i, j, k in zip(old_spacing, new_spacing, old_shape)])\n    return new_shape\n\n\ndef resample_data_or_seg_to_spacing(data: np.ndarray,\n                                    current_spacing: Union[Tuple[float, ...], List[float], np.ndarray],\n                                    new_spacing: Union[Tuple[float, ...], List[float], np.ndarray],\n                                    is_seg: bool = False,\n                                    order: int = 3, order_z: int = 0,\n                                    force_separate_z: Union[bool, None] = False,\n                                    separate_z_anisotropy_threshold: float = ANISO_THRESHOLD):\n    if force_separate_z is not None:\n        do_separate_z = force_separate_z\n        if force_separate_z:\n            axis = get_lowres_axis(current_spacing)\n        else:\n            axis = None\n    else:\n        if get_do_separate_z(current_spacing, separate_z_anisotropy_threshold):\n            do_separate_z = True\n            axis = get_lowres_axis(current_spacing)\n        elif get_do_separate_z(new_spacing, separate_z_anisotropy_threshold):\n            do_separate_z = True\n            axis = get_lowres_axis(new_spacing)\n        else:\n            do_separate_z = False\n            axis = None\n\n    if axis is not None:\n        if len(axis) == 3:\n            # every axis has the same spacing, this should never happen, why is this code here?\n            do_separate_z = False\n        elif len(axis) == 2:\n            # this happens for spacings like (0.24, 1.25, 1.25) for example. In that case we do not want to resample\n            # separately in the out of plane axis\n            do_separate_z = False\n        else:\n            pass\n\n    if data is not None:\n        assert data.ndim == 4, \"data must be c x y z\"\n\n    shape = np.array(data[0].shape)\n    new_shape = compute_new_shape(shape[1:], current_spacing, new_spacing)\n\n    data_reshaped = resample_data_or_seg(data, new_shape, is_seg, axis, order, do_separate_z, order_z=order_z)\n    return data_reshaped\n\n\ndef resample_data_or_seg_to_shape(data: Union[torch.Tensor, np.ndarray],\n                                  new_shape: Union[Tuple[int, ...], List[int], np.ndarray],\n                                  current_spacing: Union[Tuple[float, ...], List[float], np.ndarray],\n                                  new_spacing: Union[Tuple[float, ...], List[float], np.ndarray],\n                                  is_seg: bool = False,\n                                  order: int = 3, order_z: int = 0,\n                                  force_separate_z: Union[bool, None] = False,\n                                  separate_z_anisotropy_threshold: float = ANISO_THRESHOLD):\n    \"\"\"\n    needed for segmentation export. Stupid, I know. Maybe we can fix that with Leos new resampling functions\n    \"\"\"\n    if isinstance(data, torch.Tensor):\n        data = data.cpu().numpy()\n    if force_separate_z is not None:\n        do_separate_z = force_separate_z\n        if force_separate_z:\n            axis = get_lowres_axis(current_spacing)\n        else:\n            axis = None\n    else:\n        if get_do_separate_z(current_spacing, separate_z_anisotropy_threshold):\n            do_separate_z = True\n            axis = get_lowres_axis(current_spacing)\n        elif get_do_separate_z(new_spacing, separate_z_anisotropy_threshold):\n            do_separate_z = True\n            axis = get_lowres_axis(new_spacing)\n        else:\n            do_separate_z = False\n            axis = None\n\n    if axis is not None:\n        if len(axis) == 3:\n            # every axis has the same spacing, this should never happen, why is this code here?\n            do_separate_z = False\n        elif len(axis) == 2:\n            # this happens for spacings like (0.24, 1.25, 1.25) for example. In that case we do not want to resample\n            # separately in the out of plane axis\n            do_separate_z = False\n        else:\n            pass\n\n    if data is not None:\n        assert data.ndim == 4, \"data must be c x y z\"\n\n    data_reshaped = resample_data_or_seg(data, new_shape, is_seg, axis, order, do_separate_z, order_z=order_z)\n    return data_reshaped\n\n\ndef resample_data_or_seg(data: np.ndarray, new_shape: Union[Tuple[float, ...], List[float], np.ndarray],\n                         is_seg: bool = False, axis: Union[None, int] = None, order: int = 3,\n                         do_separate_z: bool = False, order_z: int = 0):\n    \"\"\"\n    separate_z=True will resample with order 0 along z\n    :param data:\n    :param new_shape:\n    :param is_seg:\n    :param axis:\n    :param order:\n    :param do_separate_z:\n    :param order_z: only applies if do_separate_z is True\n    :return:\n    \"\"\"\n    assert data.ndim == 4, \"data must be (c, x, y, z)\"\n    assert len(new_shape) == data.ndim - 1\n\n    if is_seg:\n        resize_fn = resize_segmentation\n        kwargs = OrderedDict()\n    else:\n        resize_fn = resize\n        kwargs = {'mode': 'edge', 'anti_aliasing': False}\n    dtype_data = data.dtype\n    shape = np.array(data[0].shape)\n    new_shape = np.array(new_shape)\n    if np.any(shape != new_shape):\n        data = data.astype(float)\n        if do_separate_z:\n            # print(\"separate z, order in z is\", order_z, \"order inplane is\", order)\n            assert len(axis) == 1, \"only one anisotropic axis supported\"\n            axis = axis[0]\n            if axis == 0:\n                new_shape_2d = new_shape[1:]\n            elif axis == 1:\n                new_shape_2d = new_shape[[0, 2]]\n            else:\n                new_shape_2d = new_shape[:-1]\n\n            reshaped_final_data = []\n            for c in range(data.shape[0]):\n                reshaped_data = []\n                for slice_id in range(shape[axis]):\n                    if axis == 0:\n                        reshaped_data.append(resize_fn(data[c, slice_id], new_shape_2d, order, **kwargs))\n                    elif axis == 1:\n                        reshaped_data.append(resize_fn(data[c, :, slice_id], new_shape_2d, order, **kwargs))\n                    else:\n                        reshaped_data.append(resize_fn(data[c, :, :, slice_id], new_shape_2d, order, **kwargs))\n                reshaped_data = np.stack(reshaped_data, axis)\n                if shape[axis] != new_shape[axis]:\n\n                    # The following few lines are blatantly copied and modified from sklearn's resize()\n                    rows, cols, dim = new_shape[0], new_shape[1], new_shape[2]\n                    orig_rows, orig_cols, orig_dim = reshaped_data.shape\n\n                    row_scale = float(orig_rows) / rows\n                    col_scale = float(orig_cols) / cols\n                    dim_scale = float(orig_dim) / dim\n\n                    map_rows, map_cols, map_dims = np.mgrid[:rows, :cols, :dim]\n                    map_rows = row_scale * (map_rows + 0.5) - 0.5\n                    map_cols = col_scale * (map_cols + 0.5) - 0.5\n                    map_dims = dim_scale * (map_dims + 0.5) - 0.5\n\n                    coord_map = np.array([map_rows, map_cols, map_dims])\n                    if not is_seg or order_z == 0:\n                        reshaped_final_data.append(map_coordinates(reshaped_data, coord_map, order=order_z,\n                                                                   mode='nearest')[None])\n                    else:\n                        unique_labels = np.sort(pd.unique(reshaped_data.ravel()))  # np.unique(reshaped_data)\n                        reshaped = np.zeros(new_shape, dtype=dtype_data)\n\n                        for i, cl in enumerate(unique_labels):\n                            reshaped_multihot = np.round(\n                                map_coordinates((reshaped_data == cl).astype(float), coord_map, order=order_z,\n                                                mode='nearest'))\n                            reshaped[reshaped_multihot > 0.5] = cl\n                        reshaped_final_data.append(reshaped[None])\n                else:\n                    reshaped_final_data.append(reshaped_data[None])\n            reshaped_final_data = np.vstack(reshaped_final_data)\n        else:\n            # print(\"no separate z, order\", order)\n            reshaped = []\n            for c in range(data.shape[0]):\n                reshaped.append(resize_fn(data[c], new_shape, order, **kwargs)[None])\n            reshaped_final_data = np.vstack(reshaped)\n        return reshaped_final_data.astype(dtype_data)\n    else:\n        # print(\"no resampling necessary\")\n        return data\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/preprocessing/resampling/utils.py",
    "content": "from typing import Callable\n\nimport nnunetv2\nfrom batchgenerators.utilities.file_and_folder_operations import join\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\n\n\ndef recursive_find_resampling_fn_by_name(resampling_fn: str) -> Callable:\n    ret = recursive_find_python_class(join(nnunetv2.__path__[0], \"preprocessing\", \"resampling\"), resampling_fn,\n                                      'nnunetv2.preprocessing.resampling')\n    if ret is None:\n        raise RuntimeError(\"Unable to find resampling function named '%s'. Please make sure this fn is located in the \"\n                           \"nnunetv2.preprocessing.resampling module.\" % resampling_fn)\n    else:\n        return ret\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/run/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/run/load_pretrained_weights.py",
    "content": "import torch\nfrom torch._dynamo import OptimizedModule\nfrom torch.nn.parallel import DistributedDataParallel as DDP\n\n\ndef load_pretrained_weights(network, fname, verbose=False):\n    \"\"\"\n    Transfers all weights between matching keys in state_dicts. matching is done by name and we only transfer if the\n    shape is also the same. Segmentation layers (the 1x1(x1) layers that produce the segmentation maps)\n    identified by keys ending with '.seg_layers') are not transferred!\n\n    If the pretrained weights were obtained with a training outside nnU-Net and DDP or torch.optimize was used,\n    you need to change the keys of the pretrained state_dict. DDP adds a 'module.' prefix and torch.optim adds\n    '_orig_mod'. You DO NOT need to worry about this if pretraining was done with nnU-Net as\n    nnUNetTrainer.save_checkpoint takes care of that!\n\n    \"\"\"\n    saved_model = torch.load(fname)\n    pretrained_dict = saved_model['network_weights']\n\n    skip_strings_in_pretrained = [\n        '.seg_layers.',\n    ]\n\n    if isinstance(network, DDP):\n        mod = network.module\n    else:\n        mod = network\n    if isinstance(mod, OptimizedModule):\n        mod = mod._orig_mod\n\n    model_dict = mod.state_dict()\n    # verify that all but the segmentation layers have the same shape\n    for key, _ in model_dict.items():\n        if all([i not in key for i in skip_strings_in_pretrained]):\n            assert key in pretrained_dict, \\\n                f\"Key {key} is missing in the pretrained model weights. The pretrained weights do not seem to be \" \\\n                f\"compatible with your network.\"\n            assert model_dict[key].shape == pretrained_dict[key].shape, \\\n                f\"The shape of the parameters of key {key} is not the same. Pretrained model: \" \\\n                f\"{pretrained_dict[key].shape}; your network: {model_dict[key]}. The pretrained model \" \\\n                f\"does not seem to be compatible with your network.\"\n\n    # fun fact: in principle this allows loading from parameters that do not cover the entire network. For example pretrained\n    # encoders. Not supported by this function though (see assertions above)\n\n    # commenting out this abomination of a dict comprehension for preservation in the archives of 'what not to do'\n    # pretrained_dict = {'module.' + k if is_ddp else k: v\n    #                    for k, v in pretrained_dict.items()\n    #                    if (('module.' + k if is_ddp else k) in model_dict) and\n    #                    all([i not in k for i in skip_strings_in_pretrained])}\n\n    pretrained_dict = {k: v for k, v in pretrained_dict.items()\n                       if k in model_dict.keys() and all([i not in k for i in skip_strings_in_pretrained])}\n\n    model_dict.update(pretrained_dict)\n\n    print(\"################### Loading pretrained weights from file \", fname, '###################')\n    if verbose:\n        print(\"Below is the list of overlapping blocks in pretrained model and nnUNet architecture:\")\n        for key, value in pretrained_dict.items():\n            print(key, 'shape', value.shape)\n        print(\"################### Done ###################\")\n    mod.load_state_dict(model_dict)\n\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/run/run_training.py",
    "content": "import os\nimport socket\nfrom typing import Union, Optional\n\nimport nnunetv2\nimport torch.cuda\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\nfrom batchgenerators.utilities.file_and_folder_operations import join, isfile, load_json\nfrom nnunetv2.paths import nnUNet_preprocessed\nfrom nnunetv2.run.load_pretrained_weights import load_pretrained_weights\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\nfrom torch.backends import cudnn\n\n\ndef find_free_network_port() -> int:\n    \"\"\"Finds a free port on localhost.\n\n    It is useful in single-node training when we don't want to connect to a real main node but have to set the\n    `MASTER_PORT` environment variable.\n    \"\"\"\n    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n    s.bind((\"\", 0))\n    port = s.getsockname()[1]\n    s.close()\n    return port\n\n\ndef get_trainer_from_args(dataset_name_or_id: Union[int, str],\n                          configuration: str,\n                          fold: int,\n                          trainer_name: str = 'nnUNetTrainer',\n                          plans_identifier: str = 'nnUNetPlans',\n                          use_compressed: bool = False,\n                          device: torch.device = torch.device('cuda')):\n    # load nnunet class and do sanity checks\n    nnunet_trainer = recursive_find_python_class(join(nnunetv2.__path__[0], \"training\", \"nnUNetTrainer\"),\n                                                trainer_name, 'nnunetv2.training.nnUNetTrainer')\n    if nnunet_trainer is None:\n        raise RuntimeError(f'Could not find requested nnunet trainer {trainer_name} in '\n                           f'nnunetv2.training.nnUNetTrainer ('\n                           f'{join(nnunetv2.__path__[0], \"training\", \"nnUNetTrainer\")}). If it is located somewhere '\n                           f'else, please move it there.')\n    assert issubclass(nnunet_trainer, nnUNetTrainer), 'The requested nnunet trainer class must inherit from ' \\\n                                                    'nnUNetTrainer'\n\n    # handle dataset input. If it's an ID we need to convert to int from string\n    if dataset_name_or_id.startswith('Dataset'):\n        pass\n    else:\n        try:\n            dataset_name_or_id = int(dataset_name_or_id)\n        except ValueError:\n            raise ValueError(f'dataset_name_or_id must either be an integer or a valid dataset name with the pattern '\n                             f'DatasetXXX_YYY where XXX are the three(!) task ID digits. Your '\n                             f'input: {dataset_name_or_id}')\n\n    # initialize nnunet trainer\n    preprocessed_dataset_folder_base = join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id))\n    plans_file = join(preprocessed_dataset_folder_base, plans_identifier + '.json')\n    plans = load_json(plans_file)\n    dataset_json = load_json(join(preprocessed_dataset_folder_base, 'dataset.json'))\n    nnunet_trainer = nnunet_trainer(plans=plans, configuration=configuration, fold=fold,\n                                    dataset_json=dataset_json, unpack_dataset=not use_compressed, device=device)\n    return nnunet_trainer\n\n\ndef maybe_load_checkpoint(nnunet_trainer: nnUNetTrainer, continue_training: bool, validation_only: bool,\n                          pretrained_weights_file: str = None):\n    if continue_training and pretrained_weights_file is not None:\n        raise RuntimeError('Cannot both continue a training AND load pretrained weights. Pretrained weights can only '\n                           'be used at the beginning of the training.')\n    if continue_training:\n        expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_final.pth')\n        if not isfile(expected_checkpoint_file):\n            expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_latest.pth')\n        # special case where --c is used to run a previously aborted validation\n        if not isfile(expected_checkpoint_file):\n            expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_best.pth')\n        if not isfile(expected_checkpoint_file):\n            print(f\"WARNING: Cannot continue training because there seems to be no checkpoint available to \"\n                               f\"continue from. Starting a new training...\")\n            expected_checkpoint_file = None\n    elif validation_only:\n        expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_final.pth')\n        if not isfile(expected_checkpoint_file):\n            raise RuntimeError(f\"Cannot run validation because the training is not finished yet!\")\n    else:\n        if pretrained_weights_file is not None:\n            if not nnunet_trainer.was_initialized:\n                nnunet_trainer.initialize()\n            load_pretrained_weights(nnunet_trainer.network, pretrained_weights_file, verbose=True)\n        expected_checkpoint_file = None\n\n    if expected_checkpoint_file is not None:\n        nnunet_trainer.load_checkpoint(expected_checkpoint_file)\n\n\ndef setup_ddp(rank, world_size):\n    # initialize the process group\n    dist.init_process_group(\"nccl\", rank=rank, world_size=world_size)\n\n\ndef cleanup_ddp():\n    dist.destroy_process_group()\n\n\ndef run_ddp(rank, dataset_name_or_id, configuration, fold, tr, p, use_compressed, disable_checkpointing, c, val,\n            pretrained_weights, npz, val_with_best, world_size):\n    setup_ddp(rank, world_size)\n    torch.cuda.set_device(torch.device('cuda', dist.get_rank()))\n\n    nnunet_trainer = get_trainer_from_args(dataset_name_or_id, configuration, fold, tr, p,\n                                           use_compressed)\n\n    if disable_checkpointing:\n        nnunet_trainer.disable_checkpointing = disable_checkpointing\n\n    assert not (c and val), f'Cannot set --c and --val flag at the same time. Dummy.'\n\n    maybe_load_checkpoint(nnunet_trainer, c, val, pretrained_weights)\n\n    if torch.cuda.is_available():\n        cudnn.deterministic = False\n        cudnn.benchmark = True\n\n    if not val:\n        nnunet_trainer.run_training()\n\n    if val_with_best:\n        nnunet_trainer.load_checkpoint(join(nnunet_trainer.output_folder, 'checkpoint_best.pth'))\n    nnunet_trainer.perform_actual_validation(npz)\n    cleanup_ddp()\n\n\ndef run_training(dataset_name_or_id: Union[str, int],\n                 configuration: str, fold: Union[int, str],\n                 trainer_class_name: str = 'nnUNetTrainer',\n                 plans_identifier: str = 'nnUNetPlans',\n                 pretrained_weights: Optional[str] = None,\n                 num_gpus: int = 1,\n                 use_compressed_data: bool = False,\n                 export_validation_probabilities: bool = False,\n                 continue_training: bool = False,\n                 only_run_validation: bool = False,\n                 disable_checkpointing: bool = False,\n                 val_with_best: bool = False,\n                 device: torch.device = torch.device('cuda')):\n    if isinstance(fold, str):\n        if fold != 'all':\n            try:\n                fold = int(fold)\n            except ValueError as e:\n                print(f'Unable to convert given value for fold to int: {fold}. fold must bei either \"all\" or an integer!')\n                raise e\n\n    if val_with_best:\n        assert not disable_checkpointing, '--val_best is not compatible with --disable_checkpointing'\n\n    if num_gpus > 1:\n        assert device.type == 'cuda', f\"DDP training (triggered by num_gpus > 1) is only implemented for cuda devices. Your device: {device}\"\n\n        os.environ['MASTER_ADDR'] = 'localhost'\n        if 'MASTER_PORT' not in os.environ.keys():\n            port = str(find_free_network_port())\n            print(f\"using port {port}\")\n            os.environ['MASTER_PORT'] = port  # str(port)\n\n        mp.spawn(run_ddp,\n                 args=(\n                     dataset_name_or_id,\n                     configuration,\n                     fold,\n                     trainer_class_name,\n                     plans_identifier,\n                     use_compressed_data,\n                     disable_checkpointing,\n                     continue_training,\n                     only_run_validation,\n                     pretrained_weights,\n                     export_validation_probabilities,\n                     val_with_best,\n                     num_gpus),\n                 nprocs=num_gpus,\n                 join=True)\n    else:\n        nnunet_trainer = get_trainer_from_args(dataset_name_or_id, configuration, fold, trainer_class_name,\n                                               plans_identifier, use_compressed_data, device=device)\n\n        if disable_checkpointing:\n            nnunet_trainer.disable_checkpointing = disable_checkpointing\n\n        assert not (continue_training and only_run_validation), f'Cannot set --c and --val flag at the same time. Dummy.'\n\n        maybe_load_checkpoint(nnunet_trainer, continue_training, only_run_validation, pretrained_weights)\n\n        if torch.cuda.is_available():\n            cudnn.deterministic = False\n            cudnn.benchmark = True\n\n        if not only_run_validation:\n            nnunet_trainer.run_training()\n\n        if val_with_best:\n            nnunet_trainer.load_checkpoint(join(nnunet_trainer.output_folder, 'checkpoint_best.pth'))\n        nnunet_trainer.perform_actual_validation(export_validation_probabilities)\n\n\ndef run_training_entry():\n    import argparse\n    parser = argparse.ArgumentParser()\n    parser.add_argument('dataset_name_or_id', type=str,\n                        help=\"Dataset name or ID to train with\")\n    parser.add_argument('configuration', type=str,\n                        help=\"Configuration that should be trained\")\n    parser.add_argument('fold', type=str,\n                        help='Fold of the 5-fold cross-validation. Should be an int between 0 and 4.')\n    parser.add_argument('-tr', type=str, required=False, default='nnUNetTrainer',\n                        help='[OPTIONAL] Use this flag to specify a custom trainer. Default: nnUNetTrainer')\n    parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',\n                        help='[OPTIONAL] Use this flag to specify a custom plans identifier. Default: nnUNetPlans')\n    parser.add_argument('-pretrained_weights', type=str, required=False, default=None,\n                        help='[OPTIONAL] path to nnU-Net checkpoint file to be used as pretrained model. Will only '\n                             'be used when actually training. Beta. Use with caution.')\n    parser.add_argument('-num_gpus', type=int, default=1, required=False,\n                        help='Specify the number of GPUs to use for training')\n    parser.add_argument(\"--use_compressed\", default=False, action=\"store_true\", required=False,\n                        help=\"[OPTIONAL] If you set this flag the training cases will not be decompressed. Reading compressed \"\n                             \"data is much more CPU and (potentially) RAM intensive and should only be used if you \"\n                             \"know what you are doing\")\n    parser.add_argument('--npz', action='store_true', required=False,\n                        help='[OPTIONAL] Save softmax predictions from final validation as npz files (in addition to predicted '\n                             'segmentations). Needed for finding the best ensemble.')\n    parser.add_argument('--c', action='store_true', required=False,\n                        help='[OPTIONAL] Continue training from latest checkpoint')\n    parser.add_argument('--val', action='store_true', required=False,\n                        help='[OPTIONAL] Set this flag to only run the validation. Requires training to have finished.')\n    parser.add_argument('--val_best', action='store_true', required=False,\n                        help='[OPTIONAL] If set, the validation will be performed with the checkpoint_best instead '\n                             'of checkpoint_final. NOT COMPATIBLE with --disable_checkpointing! '\n                             'WARNING: This will use the same \\'validation\\' folder as the regular validation '\n                             'with no way of distinguishing the two!')\n    parser.add_argument('--disable_checkpointing', action='store_true', required=False,\n                        help='[OPTIONAL] Set this flag to disable checkpointing. Ideal for testing things out and '\n                             'you dont want to flood your hard drive with checkpoints.')\n    parser.add_argument('-device', type=str, default='cuda', required=False,\n                    help=\"Use this to set the device the training should run with. Available options are 'cuda' \"\n                         \"(GPU), 'cpu' (CPU) and 'mps' (Apple M1/M2). Do NOT use this to set which GPU ID! \"\n                         \"Use CUDA_VISIBLE_DEVICES=X nnUNetv2_train [...] instead!\")\n    args = parser.parse_args()\n\n    assert args.device in ['cpu', 'cuda', 'mps'], f'-device must be either cpu, mps or cuda. Other devices are not tested/supported. Got: {args.device}.'\n    if args.device == 'cpu':\n        # let's allow torch to use hella threads\n        import multiprocessing\n        torch.set_num_threads(multiprocessing.cpu_count())\n        device = torch.device('cpu')\n    elif args.device == 'cuda':\n        # multithreading in torch doesn't help nnU-Net if run on GPU\n        torch.set_num_threads(1)\n        torch.set_num_interop_threads(1)\n        device = torch.device('cuda')\n    else:\n        device = torch.device('mps')\n\n    run_training(args.dataset_name_or_id, args.configuration, args.fold, args.tr, args.p, args.pretrained_weights,\n                 args.num_gpus, args.use_compressed, args.npz, args.c, args.val, args.disable_checkpointing, args.val_best,\n                 device=device)\n\n\nif __name__ == '__main__':\n    run_training_entry()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/add_lowres_and_cascade.py",
    "content": "from batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.paths import nnUNet_preprocessed\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\n\nif __name__ == '__main__':\n    import argparse\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-d', nargs='+', type=int, help='List of dataset ids')\n    args = parser.parse_args()\n\n    for d in args.d:\n        dataset_name = maybe_convert_to_dataset_name(d)\n        plans = load_json(join(nnUNet_preprocessed, dataset_name, 'nnUNetPlans.json'))\n        plans['configurations']['3d_lowres'] = {\n            \"data_identifier\": \"nnUNetPlans_3d_lowres\",  # do not be a dumbo and forget this. I was a dumbo. And I paid dearly with ~10 min debugging time\n            'inherits_from': '3d_fullres',\n            \"patch_size\": [20, 28, 20],\n            \"median_image_size_in_voxels\": [18.0, 25.0, 18.0],\n            \"spacing\": [2.0, 2.0, 2.0],\n            \"n_conv_per_stage_encoder\": [2, 2, 2],\n            \"n_conv_per_stage_decoder\": [2, 2],\n            \"num_pool_per_axis\": [2, 2, 2],\n            \"pool_op_kernel_sizes\": [[1, 1, 1], [2, 2, 2], [2, 2, 2]],\n            \"conv_kernel_sizes\": [[3, 3, 3], [3, 3, 3], [3, 3, 3]],\n            \"next_stage\": \"3d_cascade_fullres\"\n        }\n        plans['configurations']['3d_cascade_fullres'] = {\n            'inherits_from': '3d_fullres',\n            \"previous_stage\": \"3d_lowres\"\n        }\n        save_json(plans, join(nnUNet_preprocessed, dataset_name, 'nnUNetPlans.json'), sort_keys=False)"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/cleanup_integration_test.py",
    "content": "import shutil\n\nfrom batchgenerators.utilities.file_and_folder_operations import isdir, join\n\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_results, nnUNet_preprocessed\n\nif __name__ == '__main__':\n    # deletes everything!\n    dataset_names = [\n        'Dataset996_IntegrationTest_Hippocampus_regions_ignore',\n        'Dataset997_IntegrationTest_Hippocampus_regions',\n        'Dataset998_IntegrationTest_Hippocampus_ignore',\n        'Dataset999_IntegrationTest_Hippocampus',\n    ]\n    for fld in [nnUNet_raw, nnUNet_preprocessed, nnUNet_results]:\n        for d in dataset_names:\n            if isdir(join(fld, d)):\n                shutil.rmtree(join(fld, d))\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/lsf_commands.sh",
    "content": "bsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 996\"\nbsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 997\"\nbsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 998\"\nbsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 999\"\n\n\nbsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 996\"\nbsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 997\"\nbsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 998\"\nbsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash \". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 999\"\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/prepare_integration_tests.sh",
    "content": "# assumes you are in the nnunet repo!\n\n# prepare raw datasets\npython nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset999_IntegrationTest_Hippocampus.py\npython nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset998_IntegrationTest_Hippocampus_ignore.py\npython nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset997_IntegrationTest_Hippocampus_regions.py\npython nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset996_IntegrationTest_Hippocampus_regions_ignore.py\n\n# now run experiment planning without preprocessing\nnnUNetv2_plan_and_preprocess -d 996 997 998 999 --no_pp\n\n# now add 3d lowres and cascade\npython nnunetv2/tests/integration_tests/add_lowres_and_cascade.py -d 996 997 998 999\n\n# now preprocess everything\nnnUNetv2_preprocess -d 996 997 998 999 -c 2d 3d_lowres 3d_fullres -np 8 8 8  # no need to preprocess cascade as its the same data as 3d_fullres\n\n# done"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/readme.md",
    "content": "# Preface\n\nI am just a mortal with many tasks and limited time. Aint nobody got time for unittests.\n\nHOWEVER, at least some integration tests should be performed testing nnU-Net from start to finish.\n\n# Introduction - What the heck is happening?\nThis test covers all possible labeling scenarios (standard labels, regions, ignore labels and regions with \nignore labels). It runs the entire nnU-Net pipeline from start to finish:\n\n- fingerprint extraction\n- experiment planning\n- preprocessing\n- train all 4 configurations (2d, 3d_lowres, 3d_fullres, 3d_cascade_fullres) as 5-fold CV\n- automatically find the best model or ensemble\n- determine the postprocessing used for this\n- predict some test set\n- apply postprocessing to the test set\n\nTo speed things up, we do the following:\n- pick Dataset004_Hippocampus because it is quadratisch praktisch gut. MNIST of medical image segmentation\n- by default this dataset does not have 3d_lowres or cascade. We just manually add them (cool new feature, eh?). See `add_lowres_and_cascade.py` to learn more! \n- we use nnUNetTrainer_5epochs for a short training\n\n# How to run it?\n\nSet your pwd to be the nnunet repo folder (the one where the `nnunetv2` folder and the `setup.py` are located!)\n\nNow generate the 4 dummy datasets (ids 996, 997, 998, 999) from dataset 4. This will crash if you don't have Dataset004!\n```commandline\nbash nnunetv2/tests/integration_tests/prepare_integration_tests.sh \n```\n\nNow you can run the integration test for each of the datasets:\n```commandline\nbash nnunetv2/tests/integration_tests/run_integration_test.sh DATSET_ID\n```\nuse DATSET_ID 996, 997, 998 and 999. You can run these independently on different GPUs/systems to speed things up. \nThis will take i dunno like 10-30 Minutes!?\n\nAlso run \n```commandline\nbash nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh DATSET_ID\n```\nto verify DDP is working (needs 2 GPUs!)\n\n# How to check if the test was successful?\nIf I was not as lazy as I am I would have programmed some automatism that checks if Dice scores etc are in an acceptable range.\nSo you need to do the following:\n1) check that none of your runs crashed (duh)\n2) for each run, navigate to `nnUNet_results/DATASET_NAME` and take a look at the `inference_information.json` file. \nDoes it make sense? If so: NICE!\n\nOnce the integration test is completed you can delete all the temporary files associated with it by running:\n\n```commandline\npython nnunetv2/tests/integration_tests/cleanup_integration_test.py\n```"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/run_integration_test.sh",
    "content": "\n\nnnUNetv2_train $1 3d_fullres 0 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_fullres 1 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_fullres 2 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_fullres 3 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_fullres 4 -tr nnUNetTrainer_5epochs --npz\n\nnnUNetv2_train $1 2d 0 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 2d 1 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 2d 2 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 2d 3 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 2d 4 -tr nnUNetTrainer_5epochs --npz\n\nnnUNetv2_train $1 3d_lowres 0 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_lowres 1 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_lowres 2 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_lowres 3 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_lowres 4 -tr nnUNetTrainer_5epochs --npz\n\nnnUNetv2_train $1 3d_cascade_fullres 0 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_cascade_fullres 1 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_cascade_fullres 2 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_cascade_fullres 3 -tr nnUNetTrainer_5epochs --npz\nnnUNetv2_train $1 3d_cascade_fullres 4 -tr nnUNetTrainer_5epochs --npz\n\npython nnunetv2/tests/integration_tests/run_integration_test_bestconfig_inference.py -d $1"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/run_integration_test_bestconfig_inference.py",
    "content": "import argparse\n\nimport torch\nfrom batchgenerators.utilities.file_and_folder_operations import join, load_pickle\n\nfrom nnunetv2.ensembling.ensemble import ensemble_folders\nfrom nnunetv2.evaluation.find_best_configuration import find_best_configuration, \\\n    dumb_trainer_config_plans_to_trained_models_dict\nfrom nnunetv2.inference.predict_from_raw_data import nnUNetPredictor\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_results\nfrom nnunetv2.postprocessing.remove_connected_components import apply_postprocessing_to_folder\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.file_path_utilities import get_output_folder\n\n\nif __name__ == '__main__':\n    \"\"\"\n    Predicts the imagesTs folder with the best configuration and applies postprocessing\n    \"\"\"\n    torch.set_num_threads(1)\n    torch.set_num_interop_threads(1)\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-d', type=int, help='dataset id')\n    args = parser.parse_args()\n    d = args.d\n\n    dataset_name = maybe_convert_to_dataset_name(d)\n    source_dir = join(nnUNet_raw, dataset_name, 'imagesTs')\n    target_dir_base = join(nnUNet_results, dataset_name)\n\n    models = dumb_trainer_config_plans_to_trained_models_dict(['nnUNetTrainer_5epochs'],\n                                                              ['2d',\n                                                               '3d_lowres',\n                                                               '3d_cascade_fullres',\n                                                               '3d_fullres'],\n                                                              ['nnUNetPlans'])\n    ret = find_best_configuration(d, models, allow_ensembling=True, num_processes=8, overwrite=True,\n                                  folds=(0, 1, 2, 3, 4), strict=True)\n\n    has_ensemble = len(ret['best_model_or_ensemble']['selected_model_or_models']) > 1\n\n    # we don't use all folds to speed stuff up\n    used_folds = (0, 3)\n    output_folders = []\n    for im in ret['best_model_or_ensemble']['selected_model_or_models']:\n        output_dir = join(target_dir_base, f\"pred_{im['configuration']}\")\n        model_folder = get_output_folder(d, im['trainer'], im['plans_identifier'], im['configuration'])\n        # note that if the best model is the enseble of 3d_lowres and 3d cascade then 3d_lowres will be predicted\n        # twice (once standalone and once to generate the predictions for the cascade) because we don't reuse the\n        # prediction here. Proper way would be to check for that and\n        # then give the output of 3d_lowres inference to the folder_with_segs_from_prev_stage kwarg in\n        # predict_from_raw_data. Since we allow for\n        # dynamically setting 'previous_stage' in the plans I am too lazy to implement this here. This is just an\n        # integration test after all. Take a closer look at how this in handled in predict_from_raw_data\n        predictor = nnUNetPredictor(verbose=False, allow_tqdm=False)\n        predictor.initialize_from_trained_model_folder(model_folder, used_folds)\n        predictor.predict_from_files(source_dir, output_dir, has_ensemble, overwrite=True)\n        # predict_from_raw_data(list_of_lists_or_source_folder=source_dir, output_folder=output_dir,\n        #                       model_training_output_dir=model_folder, use_folds=used_folds,\n        #                       save_probabilities=has_ensemble, verbose=False, overwrite=True)\n        output_folders.append(output_dir)\n\n    # if we have an ensemble, we need to ensemble the results\n    if has_ensemble:\n        ensemble_folders(output_folders, join(target_dir_base, 'ensemble_predictions'), save_merged_probabilities=False)\n        folder_for_pp = join(target_dir_base, 'ensemble_predictions')\n    else:\n        folder_for_pp = output_folders[0]\n\n    # apply postprocessing\n    pp_fns, pp_fn_kwargs = load_pickle(ret['best_model_or_ensemble']['postprocessing_file'])\n    apply_postprocessing_to_folder(folder_for_pp, join(target_dir_base, 'ensemble_predictions_postprocessed'),\n                                   pp_fns,\n                                   pp_fn_kwargs, plans_file_or_dict=ret['best_model_or_ensemble']['some_plans_file'])\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh",
    "content": "nnUNetv2_train $1 3d_fullres 0 -tr nnUNetTrainer_10epochs -num_gpus 2\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/compute_initial_patch_size.py",
    "content": "import numpy as np\n\n\ndef get_patch_size(final_patch_size, rot_x, rot_y, rot_z, scale_range):\n    if isinstance(rot_x, (tuple, list)):\n        rot_x = max(np.abs(rot_x))\n    if isinstance(rot_y, (tuple, list)):\n        rot_y = max(np.abs(rot_y))\n    if isinstance(rot_z, (tuple, list)):\n        rot_z = max(np.abs(rot_z))\n    rot_x = min(90 / 360 * 2. * np.pi, rot_x)\n    rot_y = min(90 / 360 * 2. * np.pi, rot_y)\n    rot_z = min(90 / 360 * 2. * np.pi, rot_z)\n    from batchgenerators.augmentations.utils import rotate_coords_3d, rotate_coords_2d\n    coords = np.array(final_patch_size)\n    final_shape = np.copy(coords)\n    if len(coords) == 3:\n        final_shape = np.max(np.vstack((np.abs(rotate_coords_3d(coords, rot_x, 0, 0)), final_shape)), 0)\n        final_shape = np.max(np.vstack((np.abs(rotate_coords_3d(coords, 0, rot_y, 0)), final_shape)), 0)\n        final_shape = np.max(np.vstack((np.abs(rotate_coords_3d(coords, 0, 0, rot_z)), final_shape)), 0)\n    elif len(coords) == 2:\n        final_shape = np.max(np.vstack((np.abs(rotate_coords_2d(coords, rot_x)), final_shape)), 0)\n    final_shape /= min(scale_range)\n    return final_shape.astype(int)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/cascade_transforms.py",
    "content": "from typing import Union, List, Tuple, Callable\n\nimport numpy as np\nfrom acvl_utils.morphology.morphology_helper import label_with_component_sizes\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform\nfrom skimage.morphology import ball\nfrom skimage.morphology.binary import binary_erosion, binary_dilation, binary_closing, binary_opening\n\n\nclass MoveSegAsOneHotToData(AbstractTransform):\n    def __init__(self, index_in_origin: int, all_labels: Union[Tuple[int, ...], List[int]],\n                 key_origin=\"seg\", key_target=\"data\", remove_from_origin=True):\n        \"\"\"\n        Takes data_dict[seg][:, index_in_origin], converts it to one hot encoding and appends it to\n        data_dict[key_target]. Optionally removes index_in_origin from data_dict[seg].\n        \"\"\"\n        self.remove_from_origin = remove_from_origin\n        self.all_labels = all_labels\n        self.key_target = key_target\n        self.key_origin = key_origin\n        self.index_in_origin = index_in_origin\n\n    def __call__(self, **data_dict):\n        seg = data_dict[self.key_origin][:, self.index_in_origin:self.index_in_origin+1]\n\n        seg_onehot = np.zeros((seg.shape[0], len(self.all_labels), *seg.shape[2:]),\n                              dtype=data_dict[self.key_target].dtype)\n        for i, l in enumerate(self.all_labels):\n            seg_onehot[:, i][seg[:, 0] == l] = 1\n\n        data_dict[self.key_target] = np.concatenate((data_dict[self.key_target], seg_onehot), 1)\n\n        if self.remove_from_origin:\n            remaining_channels = [i for i in range(data_dict[self.key_origin].shape[1]) if i != self.index_in_origin]\n            data_dict[self.key_origin] = data_dict[self.key_origin][:, remaining_channels]\n\n        return data_dict\n\n\nclass RemoveRandomConnectedComponentFromOneHotEncodingTransform(AbstractTransform):\n    def __init__(self, channel_idx: Union[int, List[int]], key: str = \"data\", p_per_sample: float = 0.2,\n                 fill_with_other_class_p: float = 0.25,\n                 dont_do_if_covers_more_than_x_percent: float = 0.25, p_per_label: float = 1):\n        \"\"\"\n        Randomly removes connected components in the specified channel_idx of data_dict[key]. Only considers components\n        smaller than dont_do_if_covers_more_than_X_percent of the sample. Also has the option of simulating\n        misclassification as another class (fill_with_other_class_p)\n        \"\"\"\n        self.p_per_label = p_per_label\n        self.dont_do_if_covers_more_than_x_percent = dont_do_if_covers_more_than_x_percent\n        self.fill_with_other_class_p = fill_with_other_class_p\n        self.p_per_sample = p_per_sample\n        self.key = key\n        if not isinstance(channel_idx, (list, tuple)):\n            channel_idx = [channel_idx]\n        self.channel_idx = channel_idx\n\n    def __call__(self, **data_dict):\n        data = data_dict.get(self.key)\n        for b in range(data.shape[0]):\n            if np.random.uniform() < self.p_per_sample:\n                for c in self.channel_idx:\n                    if np.random.uniform() < self.p_per_label:\n                        # print(np.unique(data[b, c])) ## should be [0, 1]\n                        workon = data[b, c].astype(bool)\n                        if not np.any(workon):\n                            continue\n                        num_voxels = np.prod(workon.shape, dtype=np.uint64)\n                        lab, component_sizes = label_with_component_sizes(workon.astype(bool))\n                        if len(component_sizes) > 0:\n                            valid_component_ids = [i for i, j in component_sizes.items() if j <\n                                                   num_voxels*self.dont_do_if_covers_more_than_x_percent]\n                            # print('RemoveRandomConnectedComponentFromOneHotEncodingTransform', c,\n                            # np.unique(data[b, c]), len(component_sizes), valid_component_ids,\n                            # len(valid_component_ids))\n                            if len(valid_component_ids) > 0:\n                                random_component = np.random.choice(valid_component_ids)\n                                data[b, c][lab == random_component] = 0\n                                if np.random.uniform() < self.fill_with_other_class_p:\n                                    other_ch = [i for i in self.channel_idx if i != c]\n                                    if len(other_ch) > 0:\n                                        other_class = np.random.choice(other_ch)\n                                        data[b, other_class][lab == random_component] = 1\n        data_dict[self.key] = data\n        return data_dict\n\n\nclass ApplyRandomBinaryOperatorTransform(AbstractTransform):\n    def __init__(self,\n                 channel_idx: Union[int, List[int], Tuple[int, ...]],\n                 p_per_sample: float = 0.3,\n                 any_of_these: Tuple[Callable] = (binary_dilation, binary_erosion, binary_closing, binary_opening),\n                 key: str = \"data\",\n                 strel_size: Tuple[int, int] = (1, 10),\n                 p_per_label: float = 1):\n        \"\"\"\n        Applies random binary operations (specified by any_of_these) with random ball size (radius is uniformly sampled\n        from interval strel_size) to specified channels. Expects the channel_idx to correspond to a hone hot encoded\n        segmentation (see for example MoveSegAsOneHotToData)\n        \"\"\"\n        self.p_per_label = p_per_label\n        self.strel_size = strel_size\n        self.key = key\n        self.any_of_these = any_of_these\n        self.p_per_sample = p_per_sample\n\n        if not isinstance(channel_idx, (list, tuple)):\n            channel_idx = [channel_idx]\n        self.channel_idx = channel_idx\n\n    def __call__(self, **data_dict):\n        for b in range(data_dict[self.key].shape[0]):\n            if np.random.uniform() < self.p_per_sample:\n                # this needs to be applied in random order to the channels\n                np.random.shuffle(self.channel_idx)\n                for c in self.channel_idx:\n                    if np.random.uniform() < self.p_per_label:\n                        operation = np.random.choice(self.any_of_these)\n                        selem = ball(np.random.uniform(*self.strel_size))\n                        workon = data_dict[self.key][b, c].astype(bool)\n                        if not np.any(workon):\n                            continue\n                        # print(np.unique(workon))\n                        res = operation(workon, selem).astype(data_dict[self.key].dtype)\n                        # print('ApplyRandomBinaryOperatorTransform', c, operation, np.sum(workon), np.sum(res))\n                        data_dict[self.key][b, c] = res\n\n                        # if class was added, we need to remove it in ALL other channels to keep one hot encoding\n                        # properties\n                        other_ch = [i for i in self.channel_idx if i != c]\n                        if len(other_ch) > 0:\n                            was_added_mask = (res - workon) > 0\n                            for oc in other_ch:\n                                data_dict[self.key][b, oc][was_added_mask] = 0\n                            # if class was removed, leave it at background\n        return data_dict\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/deep_supervision_donwsampling.py",
    "content": "from typing import Tuple, Union, List\n\nfrom batchgenerators.augmentations.utils import resize_segmentation\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform\nimport numpy as np\n\n\nclass DownsampleSegForDSTransform2(AbstractTransform):\n    '''\n    data_dict['output_key'] will be a list of segmentations scaled according to ds_scales\n    '''\n    def __init__(self, ds_scales: Union[List, Tuple],\n                 order: int = 0, input_key: str = \"seg\",\n                 output_key: str = \"seg\", axes: Tuple[int] = None):\n        \"\"\"\n        Downscales data_dict[input_key] according to ds_scales. Each entry in ds_scales specified one deep supervision\n        output and its resolution relative to the original data, for example 0.25 specifies 1/4 of the original shape.\n        ds_scales can also be a tuple of tuples, for example ((1, 1, 1), (0.5, 0.5, 0.5)) to specify the downsampling\n        for each axis independently\n        \"\"\"\n        self.axes = axes\n        self.output_key = output_key\n        self.input_key = input_key\n        self.order = order\n        self.ds_scales = ds_scales\n\n    def __call__(self, **data_dict):\n        if self.axes is None:\n            axes = list(range(2, data_dict[self.input_key].ndim))\n        else:\n            axes = self.axes\n\n        output = []\n        for s in self.ds_scales:\n            if not isinstance(s, (tuple, list)):\n                s = [s] * len(axes)\n            else:\n                assert len(s) == len(axes), f'If ds_scales is a tuple for each resolution (one downsampling factor ' \\\n                                            f'for each axis) then the number of entried in that tuple (here ' \\\n                                            f'{len(s)}) must be the same as the number of axes (here {len(axes)}).'\n\n            if all([i == 1 for i in s]):\n                output.append(data_dict[self.input_key])\n            else:\n                new_shape = np.array(data_dict[self.input_key].shape).astype(float)\n                for i, a in enumerate(axes):\n                    new_shape[a] *= s[i]\n                new_shape = np.round(new_shape).astype(int)\n                out_seg = np.zeros(new_shape, dtype=data_dict[self.input_key].dtype)\n                for b in range(data_dict[self.input_key].shape[0]):\n                    for c in range(data_dict[self.input_key].shape[1]):\n                        out_seg[b, c] = resize_segmentation(data_dict[self.input_key][b, c], new_shape[2:], self.order)\n                output.append(out_seg)\n        data_dict[self.output_key] = output\n        return data_dict\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/limited_length_multithreaded_augmenter.py",
    "content": "from batchgenerators.dataloading.nondet_multi_threaded_augmenter import NonDetMultiThreadedAugmenter\n\n\nclass LimitedLenWrapper(NonDetMultiThreadedAugmenter):\n    def __init__(self, my_imaginary_length, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n        self.len = my_imaginary_length\n\n    def __len__(self):\n        return self.len\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/manipulating_data_dict.py",
    "content": "from batchgenerators.transforms.abstract_transforms import AbstractTransform\n\n\nclass RemoveKeyTransform(AbstractTransform):\n    def __init__(self, key_to_remove: str):\n        self.key_to_remove = key_to_remove\n\n    def __call__(self, **data_dict):\n        _ = data_dict.pop(self.key_to_remove, None)\n        return data_dict\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/masking.py",
    "content": "from typing import List\n\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform\n\n\nclass MaskTransform(AbstractTransform):\n    def __init__(self, apply_to_channels: List[int], mask_idx_in_seg: int = 0, set_outside_to: int = 0,\n                 data_key: str = \"data\", seg_key: str = \"seg\"):\n        \"\"\"\n        Sets everything outside the mask to 0. CAREFUL! outside is defined as < 0, not =0 (in the Mask)!!!\n        \"\"\"\n        self.apply_to_channels = apply_to_channels\n        self.seg_key = seg_key\n        self.data_key = data_key\n        self.set_outside_to = set_outside_to\n        self.mask_idx_in_seg = mask_idx_in_seg\n\n    def __call__(self, **data_dict):\n        mask = data_dict[self.seg_key][:, self.mask_idx_in_seg] < 0\n        for c in self.apply_to_channels:\n            data_dict[self.data_key][:, c][mask] = self.set_outside_to\n        return data_dict\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/region_based_training.py",
    "content": "from typing import List, Tuple, Union\n\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform\nimport numpy as np\n\n\nclass ConvertSegmentationToRegionsTransform(AbstractTransform):\n    def __init__(self, regions: Union[List, Tuple],\n                 seg_key: str = \"seg\", output_key: str = \"seg\", seg_channel: int = 0):\n        \"\"\"\n        regions are tuple of tuples where each inner tuple holds the class indices that are merged into one region,\n        example:\n        regions= ((1, 2), (2, )) will result in 2 regions: one covering the region of labels 1&2 and the other just 2\n        :param regions:\n        :param seg_key:\n        :param output_key:\n        \"\"\"\n        self.seg_channel = seg_channel\n        self.output_key = output_key\n        self.seg_key = seg_key\n        self.regions = regions\n\n    def __call__(self, **data_dict):\n        seg = data_dict.get(self.seg_key)\n        num_regions = len(self.regions)\n        if seg is not None:\n            seg_shp = seg.shape\n            output_shape = list(seg_shp)\n            output_shape[1] = num_regions\n            region_output = np.zeros(output_shape, dtype=seg.dtype)\n            for b in range(seg_shp[0]):\n                for region_id, region_source_labels in enumerate(self.regions):\n                    if not isinstance(region_source_labels, (list, tuple)):\n                        region_source_labels = (region_source_labels, )\n                    for label_value in region_source_labels:\n                        region_output[b, region_id][seg[b, self.seg_channel] == label_value] = 1\n            data_dict[self.output_key] = region_output\n        return data_dict\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/transforms_for_dummy_2d.py",
    "content": "from typing import Tuple, Union, List\n\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform\n\n\nclass Convert3DTo2DTransform(AbstractTransform):\n    def __init__(self, apply_to_keys: Union[List[str], Tuple[str]] = ('data', 'seg')):\n        \"\"\"\n        Transforms a 5D array (b, c, x, y, z) to a 4D array (b, c * x, y, z) by overloading the color channel\n        \"\"\"\n        self.apply_to_keys = apply_to_keys\n\n    def __call__(self, **data_dict):\n        for k in self.apply_to_keys:\n            shp = data_dict[k].shape\n            assert len(shp) == 5, 'This transform only works on 3D data, so expects 5D tensor (b, c, x, y, z) as input.'\n            data_dict[k] = data_dict[k].reshape((shp[0], shp[1] * shp[2], shp[3], shp[4]))\n            shape_key = f'orig_shape_{k}'\n            assert shape_key not in data_dict.keys(), f'Convert3DTo2DTransform needs to store the original shape. ' \\\n                                                      f'It does that using the {shape_key} key. That key is ' \\\n                                                      f'already taken. Bummer.'\n            data_dict[shape_key] = shp\n        return data_dict\n\n\nclass Convert2DTo3DTransform(AbstractTransform):\n    def __init__(self, apply_to_keys: Union[List[str], Tuple[str]] = ('data', 'seg')):\n        \"\"\"\n        Reverts Convert3DTo2DTransform by transforming a 4D array (b, c * x, y, z) back to 5D  (b, c, x, y, z)\n        \"\"\"\n        self.apply_to_keys = apply_to_keys\n\n    def __call__(self, **data_dict):\n        for k in self.apply_to_keys:\n            shape_key = f'orig_shape_{k}'\n            assert shape_key in data_dict.keys(), f'Did not find key {shape_key} in data_dict. Shitty. ' \\\n                                                  f'Convert2DTo3DTransform only works in tandem with ' \\\n                                                  f'Convert3DTo2DTransform and you probably forgot to add ' \\\n                                                  f'Convert3DTo2DTransform to your pipeline. (Convert3DTo2DTransform ' \\\n                                                  f'is where the missing key is generated)'\n            original_shape = data_dict[shape_key]\n            current_shape = data_dict[k].shape\n            data_dict[k] = data_dict[k].reshape((original_shape[0], original_shape[1], original_shape[2],\n                                                 current_shape[-2], current_shape[-1]))\n        return data_dict\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/dataloading/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/dataloading/base_data_loader.py",
    "content": "from typing import Union, Tuple\n\nfrom batchgenerators.dataloading.data_loader import DataLoader\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\nfrom nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset\nfrom nnunetv2.utilities.label_handling.label_handling import LabelManager\n\n\nclass nnUNetDataLoaderBase(DataLoader):\n    def __init__(self,\n                 data: nnUNetDataset,\n                 batch_size: int,\n                 patch_size: Union[List[int], Tuple[int, ...], np.ndarray],\n                 final_patch_size: Union[List[int], Tuple[int, ...], np.ndarray],\n                 label_manager: LabelManager,\n                 oversample_foreground_percent: float = 0.0,\n                 sampling_probabilities: Union[List[int], Tuple[int, ...], np.ndarray] = None,\n                 pad_sides: Union[List[int], Tuple[int, ...], np.ndarray] = None,\n                 probabilistic_oversampling: bool = False):\n        super().__init__(data, batch_size, 1, None, True, False, True, sampling_probabilities)\n        assert isinstance(data, nnUNetDataset), 'nnUNetDataLoaderBase only supports dictionaries as data'\n        self.indices = list(data.keys())\n\n        self.oversample_foreground_percent = oversample_foreground_percent\n        self.final_patch_size = final_patch_size\n        self.patch_size = patch_size\n        self.list_of_keys = list(self._data.keys())\n        # need_to_pad denotes by how much we need to pad the data so that if we sample a patch of size final_patch_size\n        # (which is what the network will get) these patches will also cover the border of the images\n        self.need_to_pad = (np.array(patch_size) - np.array(final_patch_size)).astype(int)\n        if pad_sides is not None:\n            if not isinstance(pad_sides, np.ndarray):\n                pad_sides = np.array(pad_sides)\n            self.need_to_pad += pad_sides\n        self.num_channels = None\n        self.pad_sides = pad_sides\n        self.data_shape, self.seg_shape = self.determine_shapes()\n        self.sampling_probabilities = sampling_probabilities\n        self.annotated_classes_key = tuple(label_manager.all_labels)\n        self.has_ignore = label_manager.has_ignore_label\n        self.get_do_oversample = self._oversample_last_XX_percent if not probabilistic_oversampling \\\n            else self._probabilistic_oversampling\n\n    def _oversample_last_XX_percent(self, sample_idx: int) -> bool:\n        \"\"\"\n        determines whether sample sample_idx in a minibatch needs to be guaranteed foreground\n        \"\"\"\n        return not sample_idx < round(self.batch_size * (1 - self.oversample_foreground_percent))\n\n    def _probabilistic_oversampling(self, sample_idx: int) -> bool:\n        # print('YEAH BOIIIIII')\n        return np.random.uniform() < self.oversample_foreground_percent\n\n    def determine_shapes(self):\n        # load one case\n        data, seg, properties = self._data.load_case(self.indices[0])\n        num_color_channels = data.shape[0]\n\n        data_shape = (self.batch_size, num_color_channels, *self.patch_size)\n        seg_shape = (self.batch_size, seg.shape[0], *self.patch_size)\n        return data_shape, seg_shape\n\n    def get_bbox(self, data_shape: np.ndarray, force_fg: bool, class_locations: Union[dict, None],\n                 overwrite_class: Union[int, Tuple[int, ...]] = None, verbose: bool = False):\n        # in dataloader 2d we need to select the slice prior to this and also modify the class_locations to only have\n        # locations for the given slice\n        need_to_pad = self.need_to_pad.copy()\n        dim = len(data_shape)\n\n        for d in range(dim):\n            # if case_all_data.shape + need_to_pad is still < patch size we need to pad more! We pad on both sides\n            # always\n            if need_to_pad[d] + data_shape[d] < self.patch_size[d]:\n                need_to_pad[d] = self.patch_size[d] - data_shape[d]\n\n        # we can now choose the bbox from -need_to_pad // 2 to shape - patch_size + need_to_pad // 2. Here we\n        # define what the upper and lower bound can be to then sample form them with np.random.randint\n        lbs = [- need_to_pad[i] // 2 for i in range(dim)]\n        ubs = [data_shape[i] + need_to_pad[i] // 2 + need_to_pad[i] % 2 - self.patch_size[i] for i in range(dim)]\n\n        # if not force_fg then we can just sample the bbox randomly from lb and ub. Else we need to make sure we get\n        # at least one of the foreground classes in the patch\n        if not force_fg and not self.has_ignore:\n            bbox_lbs = [np.random.randint(lbs[i], ubs[i] + 1) for i in range(dim)]\n            # print('I want a random location')\n        else:\n            if not force_fg and self.has_ignore:\n                selected_class = self.annotated_classes_key\n                if len(class_locations[selected_class]) == 0:\n                    # no annotated pixels in this case. Not good. But we can hardly skip it here\n                    print('Warning! No annotated pixels in image!')\n                    selected_class = None\n                # print(f'I have ignore labels and want to pick a labeled area. annotated_classes_key: {self.annotated_classes_key}')\n            elif force_fg:\n                assert class_locations is not None, 'if force_fg is set class_locations cannot be None'\n                if overwrite_class is not None:\n                    assert overwrite_class in class_locations.keys(), 'desired class (\"overwrite_class\") does not ' \\\n                                                                      'have class_locations (missing key)'\n                # this saves us a np.unique. Preprocessing already did that for all cases. Neat.\n                # class_locations keys can also be tuple\n                eligible_classes_or_regions = [i for i in class_locations.keys() if len(class_locations[i]) > 0]\n\n                # if we have annotated_classes_key locations and other classes are present, remove the annotated_classes_key from the list\n                # strange formulation needed to circumvent\n                # ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n                tmp = [i == self.annotated_classes_key if isinstance(i, tuple) else False for i in eligible_classes_or_regions]\n                if any(tmp):\n                    if len(eligible_classes_or_regions) > 1:\n                        eligible_classes_or_regions.pop(np.where(tmp)[0][0])\n\n                if len(eligible_classes_or_regions) == 0:\n                    # this only happens if some image does not contain foreground voxels at all\n                    selected_class = None\n                    if verbose:\n                        print('case does not contain any foreground classes')\n                else:\n                    # I hate myself. Future me aint gonna be happy to read this\n                    # 2022_11_25: had to read it today. Wasn't too bad\n                    selected_class = eligible_classes_or_regions[np.random.choice(len(eligible_classes_or_regions))] if \\\n                        (overwrite_class is None or (overwrite_class not in eligible_classes_or_regions)) else overwrite_class\n                # print(f'I want to have foreground, selected class: {selected_class}')\n            else:\n                raise RuntimeError('lol what!?')\n            voxels_of_that_class = class_locations[selected_class] if selected_class is not None else None\n\n            if voxels_of_that_class is not None and len(voxels_of_that_class) > 0:\n                selected_voxel = voxels_of_that_class[np.random.choice(len(voxels_of_that_class))]\n                # selected voxel is center voxel. Subtract half the patch size to get lower bbox voxel.\n                # Make sure it is within the bounds of lb and ub\n                # i + 1 because we have first dimension 0!\n                bbox_lbs = [max(lbs[i], selected_voxel[i + 1] - self.patch_size[i] // 2) for i in range(dim)]\n            else:\n                # If the image does not contain any foreground classes, we fall back to random cropping\n                bbox_lbs = [np.random.randint(lbs[i], ubs[i] + 1) for i in range(dim)]\n\n        bbox_ubs = [bbox_lbs[i] + self.patch_size[i] for i in range(dim)]\n\n        return bbox_lbs, bbox_ubs\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/dataloading/data_loader_2d.py",
    "content": "import numpy as np\nfrom nnunetv2.training.dataloading.base_data_loader import nnUNetDataLoaderBase\nfrom nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset\n\n\nclass nnUNetDataLoader2D(nnUNetDataLoaderBase):\n    def generate_train_batch(self):\n        selected_keys = self.get_indices()\n        # preallocate memory for data and seg\n        data_all = np.zeros(self.data_shape, dtype=np.float32)\n        seg_all = np.zeros(self.seg_shape, dtype=np.int16)\n        case_properties = []\n\n        for j, current_key in enumerate(selected_keys):\n            # oversampling foreground will improve stability of model training, especially if many patches are empty\n            # (Lung for example)\n            force_fg = self.get_do_oversample(j)\n            data, seg, properties = self._data.load_case(current_key)\n            case_properties.append(properties)\n\n            # select a class/region first, then a slice where this class is present, then crop to that area\n            if not force_fg:\n                if self.has_ignore:\n                    selected_class_or_region = self.annotated_classes_key\n                else:\n                    selected_class_or_region = None\n            else:\n                # filter out all classes that are not present here\n                eligible_classes_or_regions = [i for i in properties['class_locations'].keys() if len(properties['class_locations'][i]) > 0]\n\n                # if we have annotated_classes_key locations and other classes are present, remove the annotated_classes_key from the list\n                # strange formulation needed to circumvent\n                # ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n                tmp = [i == self.annotated_classes_key if isinstance(i, tuple) else False for i in eligible_classes_or_regions]\n                if any(tmp):\n                    if len(eligible_classes_or_regions) > 1:\n                        eligible_classes_or_regions.pop(np.where(tmp)[0][0])\n\n                selected_class_or_region = eligible_classes_or_regions[np.random.choice(len(eligible_classes_or_regions))] if \\\n                    len(eligible_classes_or_regions) > 0 else None\n            if selected_class_or_region is not None:\n                selected_slice = np.random.choice(properties['class_locations'][selected_class_or_region][:, 1])\n            else:\n                selected_slice = np.random.choice(len(data[0]))\n\n            data = data[:, selected_slice]\n            seg = seg[:, selected_slice]\n\n            # the line of death lol\n            # this needs to be a separate variable because we could otherwise permanently overwrite\n            # properties['class_locations']\n            # selected_class_or_region is:\n            # - None if we do not have an ignore label and force_fg is False OR if force_fg is True but there is no foreground in the image\n            # - A tuple of all (non-ignore) labels if there is an ignore label and force_fg is False\n            # - a class or region if force_fg is True\n            class_locations = {\n                selected_class_or_region: properties['class_locations'][selected_class_or_region][properties['class_locations'][selected_class_or_region][:, 1] == selected_slice][:, (0, 2, 3)]\n            } if (selected_class_or_region is not None) else None\n\n            # print(properties)\n            shape = data.shape[1:]\n            dim = len(shape)\n            bbox_lbs, bbox_ubs = self.get_bbox(shape, force_fg if selected_class_or_region is not None else None,\n                                               class_locations, overwrite_class=selected_class_or_region)\n\n            # whoever wrote this knew what he was doing (hint: it was me). We first crop the data to the region of the\n            # bbox that actually lies within the data. This will result in a smaller array which is then faster to pad.\n            # valid_bbox is just the coord that lied within the data cube. It will be padded to match the patch size\n            # later\n            valid_bbox_lbs = [max(0, bbox_lbs[i]) for i in range(dim)]\n            valid_bbox_ubs = [min(shape[i], bbox_ubs[i]) for i in range(dim)]\n\n            # At this point you might ask yourself why we would treat seg differently from seg_from_previous_stage.\n            # Why not just concatenate them here and forget about the if statements? Well that's because segneeds to\n            # be padded with -1 constant whereas seg_from_previous_stage needs to be padded with 0s (we could also\n            # remove label -1 in the data augmentation but this way it is less error prone)\n            this_slice = tuple([slice(0, data.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])\n            data = data[this_slice]\n\n            this_slice = tuple([slice(0, seg.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])\n            seg = seg[this_slice]\n\n            padding = [(-min(0, bbox_lbs[i]), max(bbox_ubs[i] - shape[i], 0)) for i in range(dim)]\n            data_all[j] = np.pad(data, ((0, 0), *padding), 'constant', constant_values=0)\n            seg_all[j] = np.pad(seg, ((0, 0), *padding), 'constant', constant_values=-1)\n\n        return {'data': data_all, 'seg': seg_all, 'properties': case_properties, 'keys': selected_keys}\n\n\nif __name__ == '__main__':\n    folder = '/media/fabian/data/nnUNet_preprocessed/Dataset004_Hippocampus/2d'\n    ds = nnUNetDataset(folder, None, 1000)  # this should not load the properties!\n    dl = nnUNetDataLoader2D(ds, 366, (65, 65), (56, 40), 0.33, None, None)\n    a = next(dl)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/dataloading/data_loader_3d.py",
    "content": "import numpy as np\nfrom nnunetv2.training.dataloading.base_data_loader import nnUNetDataLoaderBase\nfrom nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset\n\n\nclass nnUNetDataLoader3D(nnUNetDataLoaderBase):\n    def generate_train_batch(self):\n        selected_keys = self.get_indices()\n        # preallocate memory for data and seg\n        data_all = np.zeros(self.data_shape, dtype=np.float32)\n        seg_all = np.zeros(self.seg_shape, dtype=np.int16)\n        case_properties = []\n\n        for j, i in enumerate(selected_keys):\n            # oversampling foreground will improve stability of model training, especially if many patches are empty\n            # (Lung for example)\n            force_fg = self.get_do_oversample(j)\n\n            data, seg, properties = self._data.load_case(i)\n            case_properties.append(properties)\n\n            # If we are doing the cascade then the segmentation from the previous stage will already have been loaded by\n            # self._data.load_case(i) (see nnUNetDataset.load_case)\n            shape = data.shape[1:]\n            dim = len(shape)\n            bbox_lbs, bbox_ubs = self.get_bbox(shape, force_fg, properties['class_locations'])\n\n            # whoever wrote this knew what he was doing (hint: it was me). We first crop the data to the region of the\n            # bbox that actually lies within the data. This will result in a smaller array which is then faster to pad.\n            # valid_bbox is just the coord that lied within the data cube. It will be padded to match the patch size\n            # later\n            valid_bbox_lbs = [max(0, bbox_lbs[i]) for i in range(dim)]\n            valid_bbox_ubs = [min(shape[i], bbox_ubs[i]) for i in range(dim)]\n\n            # At this point you might ask yourself why we would treat seg differently from seg_from_previous_stage.\n            # Why not just concatenate them here and forget about the if statements? Well that's because segneeds to\n            # be padded with -1 constant whereas seg_from_previous_stage needs to be padded with 0s (we could also\n            # remove label -1 in the data augmentation but this way it is less error prone)\n            this_slice = tuple([slice(0, data.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])\n            data = data[this_slice]\n\n            this_slice = tuple([slice(0, seg.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])\n            seg = seg[this_slice]\n\n            padding = [(-min(0, bbox_lbs[i]), max(bbox_ubs[i] - shape[i], 0)) for i in range(dim)]\n            data_all[j] = np.pad(data, ((0, 0), *padding), 'constant', constant_values=0)\n            seg_all[j] = np.pad(seg, ((0, 0), *padding), 'constant', constant_values=-1)\n\n        return {'data': data_all, 'seg': seg_all, 'properties': case_properties, 'keys': selected_keys}\n\n\nif __name__ == '__main__':\n    folder = '/media/fabian/data/nnUNet_preprocessed/Dataset002_Heart/3d_fullres'\n    ds = nnUNetDataset(folder, 0)  # this should not load the properties!\n    dl = nnUNetDataLoader3D(ds, 5, (16, 16, 16), (16, 16, 16), 0.33, None, None)\n    a = next(dl)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/dataloading/nnunet_dataset.py",
    "content": "import os\nfrom typing import List\n\nimport numpy as np\nimport shutil\n\nfrom batchgenerators.utilities.file_and_folder_operations import join, load_pickle, isfile\nfrom nnunetv2.training.dataloading.utils import get_case_identifiers\n\n\nclass nnUNetDataset(object):\n    def __init__(self, folder: str, case_identifiers: List[str] = None,\n                 num_images_properties_loading_threshold: int = 0,\n                 folder_with_segs_from_previous_stage: str = None):\n        \"\"\"\n        This does not actually load the dataset. It merely creates a dictionary where the keys are training case names and\n        the values are dictionaries containing the relevant information for that case.\n        dataset[training_case] -> info\n        Info has the following key:value pairs:\n        - dataset[case_identifier]['properties']['data_file'] -> the full path to the npz file associated with the training case\n        - dataset[case_identifier]['properties']['properties_file'] -> the pkl file containing the case properties\n\n        In addition, if the total number of cases is < num_images_properties_loading_threshold we load all the pickle files\n        (containing auxiliary information). This is done for small datasets so that we don't spend too much CPU time on\n        reading pkl files on the fly during training. However, for large datasets storing all the aux info (which also\n        contains locations of foreground voxels in the images) can cause too much RAM utilization. In that\n        case is it better to load on the fly.\n\n        If properties are loaded into the RAM, the info dicts each will have an additional entry:\n        - dataset[case_identifier]['properties'] -> pkl file content\n\n        IMPORTANT! THIS CLASS ITSELF IS READ-ONLY. YOU CANNOT ADD KEY:VALUE PAIRS WITH nnUNetDataset[key] = value\n        USE THIS INSTEAD:\n        nnUNetDataset.dataset[key] = value\n        (not sure why you'd want to do that though. So don't do it)\n        \"\"\"\n        super().__init__()\n        # print('loading dataset')\n        if case_identifiers is None:\n            case_identifiers = get_case_identifiers(folder)\n        case_identifiers.sort()\n\n        self.dataset = {}\n        for c in case_identifiers:\n            self.dataset[c] = {}\n            self.dataset[c]['data_file'] = join(folder, f\"{c}.npz\")\n            self.dataset[c]['properties_file'] = join(folder, f\"{c}.pkl\")\n            if folder_with_segs_from_previous_stage is not None:\n                self.dataset[c]['seg_from_prev_stage_file'] = join(folder_with_segs_from_previous_stage, f\"{c}.npz\")\n\n        if len(case_identifiers) <= num_images_properties_loading_threshold:\n            for i in self.dataset.keys():\n                self.dataset[i]['properties'] = load_pickle(self.dataset[i]['properties_file'])\n\n        self.keep_files_open = ('nnUNet_keep_files_open' in os.environ.keys()) and \\\n                               (os.environ['nnUNet_keep_files_open'].lower() in ('true', '1', 't'))\n        # print(f'nnUNetDataset.keep_files_open: {self.keep_files_open}')\n\n    def __getitem__(self, key):\n        ret = {**self.dataset[key]}\n        if 'properties' not in ret.keys():\n            ret['properties'] = load_pickle(ret['properties_file'])\n        return ret\n\n    def __setitem__(self, key, value):\n        return self.dataset.__setitem__(key, value)\n\n    def keys(self):\n        return self.dataset.keys()\n\n    def __len__(self):\n        return self.dataset.__len__()\n\n    def items(self):\n        return self.dataset.items()\n\n    def values(self):\n        return self.dataset.values()\n\n    def load_case(self, key):\n        entry = self[key]\n        if 'open_data_file' in entry.keys():\n            data = entry['open_data_file']\n            # print('using open data file')\n        elif isfile(entry['data_file'][:-4] + \".npy\"):\n            data = np.load(entry['data_file'][:-4] + \".npy\", 'r')\n            if self.keep_files_open:\n                self.dataset[key]['open_data_file'] = data\n                # print('saving open data file')\n        else:\n            data = np.load(entry['data_file'])['data']\n\n        if 'open_seg_file' in entry.keys():\n            seg = entry['open_seg_file']\n            # print('using open data file')\n        elif isfile(entry['data_file'][:-4] + \"_seg.npy\"):\n            seg = np.load(entry['data_file'][:-4] + \"_seg.npy\", 'r')\n            if self.keep_files_open:\n                self.dataset[key]['open_seg_file'] = seg\n                # print('saving open seg file')\n        else:\n            seg = np.load(entry['data_file'])['seg']\n\n        if 'seg_from_prev_stage_file' in entry.keys():\n            if isfile(entry['seg_from_prev_stage_file'][:-4] + \".npy\"):\n                seg_prev = np.load(entry['seg_from_prev_stage_file'][:-4] + \".npy\", 'r')\n            else:\n                seg_prev = np.load(entry['seg_from_prev_stage_file'])['seg']\n            seg = np.vstack((seg, seg_prev[None]))\n\n        return data, seg, entry['properties']\n\n\nif __name__ == '__main__':\n    # this is a mini test. Todo: We can move this to tests in the future (requires simulated dataset)\n\n    folder = '/media/fabian/data/nnUNet_preprocessed/Dataset003_Liver/3d_lowres'\n    ds = nnUNetDataset(folder, num_images_properties_loading_threshold=0) # this should not load the properties!\n    # this SHOULD HAVE the properties\n    ks = ds['liver_0'].keys()\n    assert 'properties' in ks\n    # amazing. I am the best.\n\n    # this should have the properties\n    ds = nnUNetDataset(folder, num_images_properties_loading_threshold=1000)\n    # now rename the properties file so that it does not exist anymore\n    shutil.move(join(folder, 'liver_0.pkl'), join(folder, 'liver_XXX.pkl'))\n    # now we should still be able to access the properties because they have already been loaded\n    ks = ds['liver_0'].keys()\n    assert 'properties' in ks\n    # move file back\n    shutil.move(join(folder, 'liver_XXX.pkl'), join(folder, 'liver_0.pkl'))\n\n    # this should not have the properties\n    ds = nnUNetDataset(folder, num_images_properties_loading_threshold=0)\n    # now rename the properties file so that it does not exist anymore\n    shutil.move(join(folder, 'liver_0.pkl'), join(folder, 'liver_XXX.pkl'))\n    # now this should crash\n    try:\n        ks = ds['liver_0'].keys()\n        raise RuntimeError('we should not have come here')\n    except FileNotFoundError:\n        print('all good')\n        # move file back\n        shutil.move(join(folder, 'liver_XXX.pkl'), join(folder, 'liver_0.pkl'))\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/dataloading/utils.py",
    "content": "from __future__ import annotations\nimport multiprocessing\nimport os\nfrom typing import List\nfrom pathlib import Path\nfrom warnings import warn\n\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import isfile, subfiles\nfrom nnunetv2.configuration import default_num_processes\n\n\ndef find_broken_image_and_labels(\n    path_to_data_dir: str | Path,\n) -> tuple[set[str], set[str]]:\n    \"\"\"\n    Iterates through all numpys and tries to read them once to see if a ValueError is raised.\n    If so, the case id is added to the respective set and returned for potential fixing.\n\n    :path_to_data_dir: Path/str to the preprocessed directory containing the npys and npzs.\n    :returns: Tuple of a set containing the case ids of the broken npy images and a set of the case ids of broken npy segmentations. \n    \"\"\"\n    content = os.listdir(path_to_data_dir)\n    unique_ids = [c[:-4] for c in content if c.endswith(\".npz\")]\n    failed_data_ids = set()\n    failed_seg_ids = set()\n    for unique_id in unique_ids:\n        # Try reading data\n        try:\n            np.load(path_to_data_dir / (unique_id + \".npy\"), \"r\")\n        except ValueError:\n            failed_data_ids.add(unique_id)\n        # Try reading seg\n        try:\n            np.load(path_to_data_dir / (unique_id + \"_seg.npy\"), \"r\")\n        except ValueError:\n            failed_seg_ids.add(unique_id)\n\n    return failed_data_ids, failed_seg_ids\n\n\ndef try_fix_broken_npy(path_do_data_dir: Path, case_ids: set[str], fix_image: bool):\n    \"\"\" \n    Receives broken case ids and tries to fix them by re-extracting the npz file (up to 5 times).\n\n    :param case_ids: Set of case ids that are broken.\n    :param path_do_data_dir: Path to the preprocessed directory containing the npys and npzs.\n    :raises ValueError: If the npy file could not be unpacked after 5 tries. --\n    \"\"\"\n    for case_id in case_ids:\n        for i in range(5):\n            try:\n                key = \"data\" if fix_image else \"seg\"\n                suffix = \".npy\" if fix_image else \"_seg.npy\"\n                read_npz = np.load(path_do_data_dir / (case_id + \".npz\"), \"r\")[key]\n                np.save(path_do_data_dir / (case_id + suffix), read_npz)\n                # Try loading the just saved image.\n                np.load(path_do_data_dir / (case_id + suffix), \"r\")\n                break\n            except ValueError:\n                if i == 4:\n                    raise ValueError(\n                        f\"Could not unpack {case_id + suffix} after 5 tries!\"\n                    )\n                continue\n\n\ndef verify_or_stratify_npys(path_to_data_dir: str | Path) -> None:\n    \"\"\"\n    This re-reads the npy files after unpacking. Should there be a loading issue with any, it will try to unpack this file again and overwrites the existing.\n    If the new file does not get saved correctly 5 times, it will raise an error with the file name to the user. Does the same for images and segmentations.\n    :param path_to_data_dir: Path to the preprocessed directory containing the npys and npzs.\n    :raises ValueError: If the npy file could not be unpacked after 5 tries. --\n      Otherwise an obscured error will be raised later during training (depending when the broken file is sampled)\n    \"\"\"\n    path_to_data_dir = Path(path_to_data_dir)\n    # Check for broken image and segmentation npys\n    failed_data_ids, failed_seg_ids = find_broken_image_and_labels(path_to_data_dir)\n\n    if len(failed_data_ids) != 0 or len(failed_seg_ids) != 0:\n        warn(\n            f\"Found {len(failed_data_ids)} faulty data npys and {len(failed_seg_ids)}!\\n\"\n            + f\"Faulty images: {failed_data_ids}; Faulty segmentations: {failed_seg_ids})\\n\"\n            + \"Trying to fix them now.\"\n        )\n        # Try to fix the broken npys by reextracting the npz. If that fails, raise error\n        try_fix_broken_npy(path_to_data_dir, failed_data_ids, fix_image=True)\n        try_fix_broken_npy(path_to_data_dir, failed_seg_ids, fix_image=False)\n\n\ndef _convert_to_npy(npz_file: str, unpack_segmentation: bool = True, overwrite_existing: bool = False) -> None:\n    try:\n        a = np.load(npz_file)  # inexpensive, no compression is done here. This just reads metadata\n        if overwrite_existing or not isfile(npz_file[:-3] + \"npy\"):\n            np.save(npz_file[:-3] + \"npy\", a['data'])\n        if unpack_segmentation and (overwrite_existing or not isfile(npz_file[:-4] + \"_seg.npy\")):\n            np.save(npz_file[:-4] + \"_seg.npy\", a['seg'])\n    except KeyboardInterrupt:\n        if isfile(npz_file[:-3] + \"npy\"):\n            os.remove(npz_file[:-3] + \"npy\")\n        if isfile(npz_file[:-4] + \"_seg.npy\"):\n            os.remove(npz_file[:-4] + \"_seg.npy\")\n        raise KeyboardInterrupt\n\n\ndef unpack_dataset(folder: str, unpack_segmentation: bool = True, overwrite_existing: bool = False,\n                   num_processes: int = default_num_processes):\n    \"\"\"\n    all npz files in this folder belong to the dataset, unpack them all\n    \"\"\"\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        npz_files = subfiles(folder, True, None, \".npz\", True)\n        p.starmap(_convert_to_npy, zip(npz_files,\n                                       [unpack_segmentation] * len(npz_files),\n                                       [overwrite_existing] * len(npz_files))\n                  )\n\n\ndef get_case_identifiers(folder: str) -> List[str]:\n    \"\"\"\n    finds all npz files in the given folder and reconstructs the training case names from them\n    \"\"\"\n    case_identifiers = [i[:-4] for i in os.listdir(folder) if i.endswith(\"npz\") and (i.find(\"segFromPrevStage\") == -1)]\n    return case_identifiers\n\n\nif __name__ == '__main__':\n    unpack_dataset('/media/fabian/data/nnUNet_preprocessed/Dataset002_Heart/2d')"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/logging/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/logging/nnunet_logger.py",
    "content": "import matplotlib\nfrom batchgenerators.utilities.file_and_folder_operations import join\n\nmatplotlib.use('agg')\nimport seaborn as sns\nimport matplotlib.pyplot as plt\n\n\nclass nnUNetLogger(object):\n    \"\"\"\n    This class is really trivial. Don't expect cool functionality here. This is my makeshift solution to problems\n    arising from out-of-sync epoch numbers and numbers of logged loss values. It also simplifies the trainer class a\n    little\n\n    YOU MUST LOG EXACTLY ONE VALUE PER EPOCH FOR EACH OF THE LOGGING ITEMS! DONT FUCK IT UP\n    \"\"\"\n    def __init__(self, verbose: bool = False):\n        self.my_fantastic_logging = {\n            'mean_fg_dice': list(),\n            'ema_fg_dice': list(),\n            'dice_per_class_or_region': list(),\n            'train_losses': list(),\n            'val_losses': list(),\n            'lrs': list(),\n            'epoch_start_timestamps': list(),\n            'epoch_end_timestamps': list()\n        }\n        self.verbose = verbose\n        # shut up, this logging is great\n\n    def log(self, key, value, epoch: int):\n        \"\"\"\n        sometimes shit gets messed up. We try to catch that here\n        \"\"\"\n        assert key in self.my_fantastic_logging.keys() and isinstance(self.my_fantastic_logging[key], list), \\\n            'This function is only intended to log stuff to lists and to have one entry per epoch'\n\n        if self.verbose: print(f'logging {key}: {value} for epoch {epoch}')\n\n        if len(self.my_fantastic_logging[key]) < (epoch + 1):\n            self.my_fantastic_logging[key].append(value)\n        else:\n            assert len(self.my_fantastic_logging[key]) == (epoch + 1), 'something went horribly wrong. My logging ' \\\n                                                                       'lists length is off by more than 1'\n            print(f'maybe some logging issue!? logging {key} and {value}')\n            self.my_fantastic_logging[key][epoch] = value\n\n        # handle the ema_fg_dice special case! It is automatically logged when we add a new mean_fg_dice\n        if key == 'mean_fg_dice':\n            new_ema_pseudo_dice = self.my_fantastic_logging['ema_fg_dice'][epoch - 1] * 0.9 + 0.1 * value \\\n                if len(self.my_fantastic_logging['ema_fg_dice']) > 0 else value\n            self.log('ema_fg_dice', new_ema_pseudo_dice, epoch)\n\n    def plot_progress_png(self, output_folder):\n        # we infer the epoch form our internal logging\n        epoch = min([len(i) for i in self.my_fantastic_logging.values()]) - 1  # lists of epoch 0 have len 1\n        sns.set(font_scale=2.5)\n        fig, ax_all = plt.subplots(3, 1, figsize=(30, 54))\n        # regular progress.png as we are used to from previous nnU-Net versions\n        ax = ax_all[0]\n        ax2 = ax.twinx()\n        x_values = list(range(epoch + 1))\n        ax.plot(x_values, self.my_fantastic_logging['train_losses'][:epoch + 1], color='b', ls='-', label=\"loss_tr\", linewidth=4)\n        ax.plot(x_values, self.my_fantastic_logging['val_losses'][:epoch + 1], color='r', ls='-', label=\"loss_val\", linewidth=4)\n        ax2.plot(x_values, self.my_fantastic_logging['mean_fg_dice'][:epoch + 1], color='g', ls='dotted', label=\"pseudo dice\",\n                 linewidth=3)\n        ax2.plot(x_values, self.my_fantastic_logging['ema_fg_dice'][:epoch + 1], color='g', ls='-', label=\"pseudo dice (mov. avg.)\",\n                 linewidth=4)\n        ax.set_xlabel(\"epoch\")\n        ax.set_ylabel(\"loss\")\n        ax2.set_ylabel(\"pseudo dice\")\n        ax.legend(loc=(0, 1))\n        ax2.legend(loc=(0.2, 1))\n\n        # epoch times to see whether the training speed is consistent (inconsistent means there are other jobs\n        # clogging up the system)\n        ax = ax_all[1]\n        ax.plot(x_values, [i - j for i, j in zip(self.my_fantastic_logging['epoch_end_timestamps'][:epoch + 1],\n                                                 self.my_fantastic_logging['epoch_start_timestamps'])][:epoch + 1], color='b',\n                ls='-', label=\"epoch duration\", linewidth=4)\n        ylim = [0] + [ax.get_ylim()[1]]\n        ax.set(ylim=ylim)\n        ax.set_xlabel(\"epoch\")\n        ax.set_ylabel(\"time [s]\")\n        ax.legend(loc=(0, 1))\n\n        # learning rate\n        ax = ax_all[2]\n        ax.plot(x_values, self.my_fantastic_logging['lrs'][:epoch + 1], color='b', ls='-', label=\"learning rate\", linewidth=4)\n        ax.set_xlabel(\"epoch\")\n        ax.set_ylabel(\"learning rate\")\n        ax.legend(loc=(0, 1))\n\n        plt.tight_layout()\n\n        fig.savefig(join(output_folder, \"progress.png\"))\n        plt.close()\n\n    def get_checkpoint(self):\n        return self.my_fantastic_logging\n\n    def load_checkpoint(self, checkpoint: dict):\n        self.my_fantastic_logging = checkpoint\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/loss/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/loss/compound_losses.py",
    "content": "import torch\nfrom nnunetv2.training.loss.dice import SoftDiceLoss, MemoryEfficientSoftDiceLoss\nfrom nnunetv2.training.loss.robust_ce_loss import RobustCrossEntropyLoss, TopKLoss\nfrom nnunetv2.utilities.helpers import softmax_helper_dim1\nfrom torch import nn\n\n\nclass DC_and_CE_loss(nn.Module):\n    def __init__(self, soft_dice_kwargs, ce_kwargs, weight_ce=1, weight_dice=1, ignore_label=None,\n                 dice_class=SoftDiceLoss):\n        \"\"\"\n        Weights for CE and Dice do not need to sum to one. You can set whatever you want.\n        :param soft_dice_kwargs:\n        :param ce_kwargs:\n        :param aggregate:\n        :param square_dice:\n        :param weight_ce:\n        :param weight_dice:\n        \"\"\"\n        super(DC_and_CE_loss, self).__init__()\n        if ignore_label is not None:\n            ce_kwargs['ignore_index'] = ignore_label\n\n        self.weight_dice = weight_dice\n        self.weight_ce = weight_ce\n        self.ignore_label = ignore_label\n\n        self.ce = RobustCrossEntropyLoss(**ce_kwargs)\n        self.dc = dice_class(apply_nonlin=softmax_helper_dim1, **soft_dice_kwargs)\n\n    def forward(self, net_output: torch.Tensor, target: torch.Tensor):\n        \"\"\"\n        target must be b, c, x, y(, z) with c=1\n        :param net_output:\n        :param target:\n        :return:\n        \"\"\"\n        if self.ignore_label is not None:\n            assert target.shape[1] == 1, 'ignore label is not implemented for one hot encoded target variables ' \\\n                                         '(DC_and_CE_loss)'\n            mask = target != self.ignore_label\n            # remove ignore label from target, replace with one of the known labels. It doesn't matter because we\n            # ignore gradients in those areas anyway\n            target_dice = torch.where(mask, target, 0)\n            num_fg = mask.sum()\n        else:\n            target_dice = target\n            mask = None\n\n        dc_loss = self.dc(net_output, target_dice, loss_mask=mask) \\\n            if self.weight_dice != 0 else 0\n        ce_loss = self.ce(net_output, target[:, 0]) \\\n            if self.weight_ce != 0 and (self.ignore_label is None or num_fg > 0) else 0\n\n        result = self.weight_ce * ce_loss + self.weight_dice * dc_loss\n        return result\n\n\nclass DC_and_BCE_loss(nn.Module):\n    def __init__(self, bce_kwargs, soft_dice_kwargs, weight_ce=1, weight_dice=1, use_ignore_label: bool = False,\n                 dice_class=MemoryEfficientSoftDiceLoss):\n        \"\"\"\n        DO NOT APPLY NONLINEARITY IN YOUR NETWORK!\n\n        target mut be one hot encoded\n        IMPORTANT: We assume use_ignore_label is located in target[:, -1]!!!\n\n        :param soft_dice_kwargs:\n        :param bce_kwargs:\n        :param aggregate:\n        \"\"\"\n        super(DC_and_BCE_loss, self).__init__()\n        if use_ignore_label:\n            bce_kwargs['reduction'] = 'none'\n\n        self.weight_dice = weight_dice\n        self.weight_ce = weight_ce\n        self.use_ignore_label = use_ignore_label\n\n        self.ce = nn.BCEWithLogitsLoss(**bce_kwargs)\n        self.dc = dice_class(apply_nonlin=torch.sigmoid, **soft_dice_kwargs)\n\n    def forward(self, net_output: torch.Tensor, target: torch.Tensor):\n        if self.use_ignore_label:\n            # target is one hot encoded here. invert it so that it is True wherever we can compute the loss\n            mask = (1 - target[:, -1:]).bool()\n            # remove ignore channel now that we have the mask\n            target_regions = torch.clone(target[:, :-1])\n        else:\n            target_regions = target\n            mask = None\n\n        dc_loss = self.dc(net_output, target_regions, loss_mask=mask)\n        if mask is not None:\n            ce_loss = (self.ce(net_output, target_regions) * mask).sum() / torch.clip(mask.sum(), min=1e-8)\n        else:\n            ce_loss = self.ce(net_output, target_regions)\n        result = self.weight_ce * ce_loss + self.weight_dice * dc_loss\n        return result\n\n\nclass DC_and_topk_loss(nn.Module):\n    def __init__(self, soft_dice_kwargs, ce_kwargs, weight_ce=1, weight_dice=1, ignore_label=None):\n        \"\"\"\n        Weights for CE and Dice do not need to sum to one. You can set whatever you want.\n        :param soft_dice_kwargs:\n        :param ce_kwargs:\n        :param aggregate:\n        :param square_dice:\n        :param weight_ce:\n        :param weight_dice:\n        \"\"\"\n        super().__init__()\n        if ignore_label is not None:\n            ce_kwargs['ignore_index'] = ignore_label\n\n        self.weight_dice = weight_dice\n        self.weight_ce = weight_ce\n        self.ignore_label = ignore_label\n\n        self.ce = TopKLoss(**ce_kwargs)\n        self.dc = SoftDiceLoss(apply_nonlin=softmax_helper_dim1, **soft_dice_kwargs)\n\n    def forward(self, net_output: torch.Tensor, target: torch.Tensor):\n        \"\"\"\n        target must be b, c, x, y(, z) with c=1\n        :param net_output:\n        :param target:\n        :return:\n        \"\"\"\n        if self.ignore_label is not None:\n            assert target.shape[1] == 1, 'ignore label is not implemented for one hot encoded target variables ' \\\n                                         '(DC_and_CE_loss)'\n            mask = (target != self.ignore_label).bool()\n            # remove ignore label from target, replace with one of the known labels. It doesn't matter because we\n            # ignore gradients in those areas anyway\n            target_dice = torch.clone(target)\n            target_dice[target == self.ignore_label] = 0\n            num_fg = mask.sum()\n        else:\n            target_dice = target\n            mask = None\n\n        dc_loss = self.dc(net_output, target_dice, loss_mask=mask) \\\n            if self.weight_dice != 0 else 0\n        ce_loss = self.ce(net_output, target) \\\n            if self.weight_ce != 0 and (self.ignore_label is None or num_fg > 0) else 0\n\n        result = self.weight_ce * ce_loss + self.weight_dice * dc_loss\n        return result\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/loss/deep_supervision.py",
    "content": "import torch\nfrom torch import nn\n\n\nclass DeepSupervisionWrapper(nn.Module):\n    def __init__(self, loss, weight_factors=None):\n        \"\"\"\n        Wraps a loss function so that it can be applied to multiple outputs. Forward accepts an arbitrary number of\n        inputs. Each input is expected to be a tuple/list. Each tuple/list must have the same length. The loss is then\n        applied to each entry like this:\n        l = w0 * loss(input0[0], input1[0], ...) +  w1 * loss(input0[1], input1[1], ...) + ...\n        If weights are None, all w will be 1.\n        \"\"\"\n        super(DeepSupervisionWrapper, self).__init__()\n        assert any([x != 0 for x in weight_factors]), \"At least one weight factor should be != 0.0\"\n        self.weight_factors = tuple(weight_factors)\n        self.loss = loss\n\n    def forward(self, *args):\n        assert all([isinstance(i, (tuple, list)) for i in args]), \\\n            f\"all args must be either tuple or list, got {[type(i) for i in args]}\"\n        # we could check for equal lengths here as well, but we really shouldn't overdo it with checks because\n        # this code is executed a lot of times!\n\n        if self.weight_factors is None:\n            weights = (1, ) * len(args[0])\n        else:\n            weights = self.weight_factors\n\n        return sum([weights[i] * self.loss(*inputs) for i, inputs in enumerate(zip(*args)) if weights[i] != 0.0])\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/loss/dice.py",
    "content": "from typing import Callable\n\nimport torch\nfrom nnunetv2.utilities.ddp_allgather import AllGatherGrad\nfrom torch import nn\n\n\nclass SoftDiceLoss(nn.Module):\n    def __init__(self, apply_nonlin: Callable = None, batch_dice: bool = False, do_bg: bool = True, smooth: float = 1.,\n                 ddp: bool = True, clip_tp: float = None):\n        \"\"\"\n        \"\"\"\n        super(SoftDiceLoss, self).__init__()\n\n        self.do_bg = do_bg\n        self.batch_dice = batch_dice\n        self.apply_nonlin = apply_nonlin\n        self.smooth = smooth\n        self.clip_tp = clip_tp\n        self.ddp = ddp\n\n    def forward(self, x, y, loss_mask=None):\n        shp_x = x.shape\n\n        if self.batch_dice:\n            axes = [0] + list(range(2, len(shp_x)))\n        else:\n            axes = list(range(2, len(shp_x)))\n\n        if self.apply_nonlin is not None:\n            x = self.apply_nonlin(x)\n\n        tp, fp, fn, _ = get_tp_fp_fn_tn(x, y, axes, loss_mask, False)\n\n        if self.ddp and self.batch_dice:\n            tp = AllGatherGrad.apply(tp).sum(0)\n            fp = AllGatherGrad.apply(fp).sum(0)\n            fn = AllGatherGrad.apply(fn).sum(0)\n\n        if self.clip_tp is not None:\n            tp = torch.clip(tp, min=self.clip_tp , max=None)\n\n        nominator = 2 * tp\n        denominator = 2 * tp + fp + fn\n\n        dc = (nominator + self.smooth) / (torch.clip(denominator + self.smooth, 1e-8))\n\n        if not self.do_bg:\n            if self.batch_dice:\n                dc = dc[1:]\n            else:\n                dc = dc[:, 1:]\n        dc = dc.mean()\n\n        return -dc\n\n\nclass MemoryEfficientSoftDiceLoss(nn.Module):\n    def __init__(self, apply_nonlin: Callable = None, batch_dice: bool = False, do_bg: bool = True, smooth: float = 1.,\n                 ddp: bool = True):\n        \"\"\"\n        saves 1.6 GB on Dataset017 3d_lowres\n        \"\"\"\n        super(MemoryEfficientSoftDiceLoss, self).__init__()\n\n        self.do_bg = do_bg\n        self.batch_dice = batch_dice\n        self.apply_nonlin = apply_nonlin\n        self.smooth = smooth\n        self.ddp = ddp\n\n    def forward(self, x, y, loss_mask=None):\n        if self.apply_nonlin is not None:\n            x = self.apply_nonlin(x)\n\n        # make everything shape (b, c)\n        axes = tuple(range(2, x.ndim))\n\n        with torch.no_grad():\n            if x.ndim != y.ndim:\n                y = y.view((y.shape[0], 1, *y.shape[1:]))\n\n            if x.shape == y.shape:\n                # if this is the case then gt is probably already a one hot encoding\n                y_onehot = y\n            else:\n                y_onehot = torch.zeros(x.shape, device=x.device, dtype=torch.bool)\n                y_onehot.scatter_(1, y.long(), 1)\n\n            if not self.do_bg:\n                y_onehot = y_onehot[:, 1:]\n\n            sum_gt = y_onehot.sum(axes) if loss_mask is None else (y_onehot * loss_mask).sum(axes)\n\n        # this one MUST be outside the with torch.no_grad(): context. Otherwise no gradients for you\n        if not self.do_bg:\n            x = x[:, 1:]\n\n        if loss_mask is None:\n            intersect = (x * y_onehot).sum(axes)\n            sum_pred = x.sum(axes)\n        else:\n            intersect = (x * y_onehot * loss_mask).sum(axes)\n            sum_pred = (x * loss_mask).sum(axes)\n\n        if self.batch_dice:\n            if self.ddp:\n                intersect = AllGatherGrad.apply(intersect).sum(0)\n                sum_pred = AllGatherGrad.apply(sum_pred).sum(0)\n                sum_gt = AllGatherGrad.apply(sum_gt).sum(0)\n\n            intersect = intersect.sum(0)\n            sum_pred = sum_pred.sum(0)\n            sum_gt = sum_gt.sum(0)\n\n        dc = (2 * intersect + self.smooth) / (torch.clip(sum_gt + sum_pred + self.smooth, 1e-8))\n\n        dc = dc.mean()\n        return -dc\n\n\ndef get_tp_fp_fn_tn(net_output, gt, axes=None, mask=None, square=False):\n    \"\"\"\n    net_output must be (b, c, x, y(, z)))\n    gt must be a label map (shape (b, 1, x, y(, z)) OR shape (b, x, y(, z))) or one hot encoding (b, c, x, y(, z))\n    if mask is provided it must have shape (b, 1, x, y(, z)))\n    :param net_output:\n    :param gt:\n    :param axes: can be (, ) = no summation\n    :param mask: mask must be 1 for valid pixels and 0 for invalid pixels\n    :param square: if True then fp, tp and fn will be squared before summation\n    :return:\n    \"\"\"\n    if axes is None:\n        axes = tuple(range(2, net_output.ndim))\n\n    with torch.no_grad():\n        if net_output.ndim != gt.ndim:\n            gt = gt.view((gt.shape[0], 1, *gt.shape[1:]))\n\n        if net_output.shape == gt.shape:\n            # if this is the case then gt is probably already a one hot encoding\n            y_onehot = gt\n        else:\n            y_onehot = torch.zeros(net_output.shape, device=net_output.device)\n            y_onehot.scatter_(1, gt.long(), 1)\n\n    tp = net_output * y_onehot\n    fp = net_output * (1 - y_onehot)\n    fn = (1 - net_output) * y_onehot\n    tn = (1 - net_output) * (1 - y_onehot)\n\n    if mask is not None:\n        with torch.no_grad():\n            mask_here = torch.tile(mask, (1, tp.shape[1], *[1 for _ in range(2, tp.ndim)]))\n        tp *= mask_here\n        fp *= mask_here\n        fn *= mask_here\n        tn *= mask_here\n        # benchmark whether tiling the mask would be faster (torch.tile). It probably is for large batch sizes\n        # OK it barely makes a difference but the implementation above is a tiny bit faster + uses less vram\n        # (using nnUNetv2_train 998 3d_fullres 0)\n        # tp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tp, dim=1)), dim=1)\n        # fp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fp, dim=1)), dim=1)\n        # fn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fn, dim=1)), dim=1)\n        # tn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tn, dim=1)), dim=1)\n\n    if square:\n        tp = tp ** 2\n        fp = fp ** 2\n        fn = fn ** 2\n        tn = tn ** 2\n\n    if len(axes) > 0:\n        tp = tp.sum(dim=axes, keepdim=False)\n        fp = fp.sum(dim=axes, keepdim=False)\n        fn = fn.sum(dim=axes, keepdim=False)\n        tn = tn.sum(dim=axes, keepdim=False)\n\n    return tp, fp, fn, tn\n\n\nif __name__ == '__main__':\n    from nnunetv2.utilities.helpers import softmax_helper_dim1\n    pred = torch.rand((2, 3, 32, 32, 32))\n    ref = torch.randint(0, 3, (2, 32, 32, 32))\n\n    dl_old = SoftDiceLoss(apply_nonlin=softmax_helper_dim1, batch_dice=True, do_bg=False, smooth=0, ddp=False)\n    dl_new = MemoryEfficientSoftDiceLoss(apply_nonlin=softmax_helper_dim1, batch_dice=True, do_bg=False, smooth=0, ddp=False)\n    res_old = dl_old(pred, ref)\n    res_new = dl_new(pred, ref)\n    print(res_old, res_new)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/loss/robust_ce_loss.py",
    "content": "import torch\nfrom torch import nn, Tensor\nimport numpy as np\n\n\nclass RobustCrossEntropyLoss(nn.CrossEntropyLoss):\n    \"\"\"\n    this is just a compatibility layer because my target tensor is float and has an extra dimension\n\n    input must be logits, not probabilities!\n    \"\"\"\n    def forward(self, input: Tensor, target: Tensor) -> Tensor:\n        if target.ndim == input.ndim:\n            assert target.shape[1] == 1\n            target = target[:, 0]\n        return super().forward(input, target.long())\n\n\nclass TopKLoss(RobustCrossEntropyLoss):\n    \"\"\"\n    input must be logits, not probabilities!\n    \"\"\"\n    def __init__(self, weight=None, ignore_index: int = -100, k: float = 10, label_smoothing: float = 0):\n        self.k = k\n        super(TopKLoss, self).__init__(weight, False, ignore_index, reduce=False, label_smoothing=label_smoothing)\n\n    def forward(self, inp, target):\n        target = target[:, 0].long()\n        res = super(TopKLoss, self).forward(inp, target)\n        num_voxels = np.prod(res.shape, dtype=np.int64)\n        res, _ = torch.topk(res.view((-1, )), int(num_voxels * self.k / 100), sorted=False)\n        return res.mean()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/lr_scheduler/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/lr_scheduler/polylr.py",
    "content": "from torch.optim.lr_scheduler import _LRScheduler\n\n\nclass PolyLRScheduler(_LRScheduler):\n    def __init__(self, optimizer, initial_lr: float, max_steps: int, exponent: float = 0.9, current_step: int = None):\n        self.optimizer = optimizer\n        self.initial_lr = initial_lr\n        self.max_steps = max_steps\n        self.exponent = exponent\n        self.ctr = 0\n        super().__init__(optimizer, current_step if current_step is not None else -1, False)\n\n    def step(self, current_step=None):\n        if current_step is None or current_step == -1:\n            current_step = self.ctr\n            self.ctr += 1\n\n        new_lr = self.initial_lr * (1 - current_step / self.max_steps) ** self.exponent\n        for param_group in self.optimizer.param_groups:\n            param_group['lr'] = new_lr\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/nnUNetTrainer.py",
    "content": "import inspect\nimport multiprocessing\nimport os\nimport shutil\nimport sys\nimport warnings\nfrom copy import deepcopy\nfrom datetime import datetime\nfrom time import time, sleep\nfrom typing import Union, Tuple, List\n\nimport numpy as np\nimport torch\nfrom batchgenerators.dataloading.single_threaded_augmenter import SingleThreadedAugmenter\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform, Compose\nfrom batchgenerators.transforms.color_transforms import BrightnessMultiplicativeTransform, \\\n    ContrastAugmentationTransform, GammaTransform\nfrom batchgenerators.transforms.noise_transforms import GaussianNoiseTransform, GaussianBlurTransform\nfrom batchgenerators.transforms.resample_transforms import SimulateLowResolutionTransform\nfrom batchgenerators.transforms.spatial_transforms import SpatialTransform, MirrorTransform\nfrom batchgenerators.transforms.utility_transforms import RemoveLabelTransform, RenameTransform, NumpyToTensor\nfrom batchgenerators.utilities.file_and_folder_operations import join, load_json, isfile, save_json, maybe_mkdir_p\nfrom torch._dynamo import OptimizedModule\n\nfrom nnunetv2.configuration import ANISO_THRESHOLD, default_num_processes\nfrom nnunetv2.evaluation.evaluate_predictions import compute_metrics_on_folder\nfrom nnunetv2.inference.export_prediction import export_prediction_from_logits, resample_and_save\nfrom nnunetv2.inference.predict_from_raw_data import nnUNetPredictor\nfrom nnunetv2.inference.sliding_window_prediction import compute_gaussian\nfrom nnunetv2.paths import nnUNet_preprocessed, nnUNet_results\nfrom nnunetv2.training.data_augmentation.compute_initial_patch_size import get_patch_size\nfrom nnunetv2.training.data_augmentation.custom_transforms.cascade_transforms import MoveSegAsOneHotToData, \\\n    ApplyRandomBinaryOperatorTransform, RemoveRandomConnectedComponentFromOneHotEncodingTransform\nfrom nnunetv2.training.data_augmentation.custom_transforms.deep_supervision_donwsampling import \\\n    DownsampleSegForDSTransform2\nfrom nnunetv2.training.data_augmentation.custom_transforms.limited_length_multithreaded_augmenter import \\\n    LimitedLenWrapper\nfrom nnunetv2.training.data_augmentation.custom_transforms.masking import MaskTransform\nfrom nnunetv2.training.data_augmentation.custom_transforms.region_based_training import \\\n    ConvertSegmentationToRegionsTransform\nfrom nnunetv2.training.data_augmentation.custom_transforms.transforms_for_dummy_2d import Convert2DTo3DTransform, \\\n    Convert3DTo2DTransform\nfrom nnunetv2.training.dataloading.data_loader_2d import nnUNetDataLoader2D\nfrom nnunetv2.training.dataloading.data_loader_3d import nnUNetDataLoader3D\nfrom nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset\nfrom nnunetv2.training.dataloading.utils import get_case_identifiers, unpack_dataset\nfrom nnunetv2.training.logging.nnunet_logger import nnUNetLogger\nfrom nnunetv2.training.loss.compound_losses import DC_and_CE_loss, DC_and_BCE_loss\nfrom nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper\nfrom nnunetv2.training.loss.dice import get_tp_fp_fn_tn, MemoryEfficientSoftDiceLoss\nfrom nnunetv2.training.lr_scheduler.polylr import PolyLRScheduler\nfrom nnunetv2.utilities.collate_outputs import collate_outputs\nfrom nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA\nfrom nnunetv2.utilities.file_path_utilities import check_workers_alive_and_busy\nfrom nnunetv2.utilities.get_network_from_plans import get_network_from_plans\nfrom nnunetv2.utilities.helpers import empty_cache, dummy_context\nfrom nnunetv2.utilities.label_handling.label_handling import convert_labelmap_to_one_hot, determine_num_input_channels\nfrom nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\nfrom sklearn.model_selection import KFold\nfrom torch import autocast, nn\nfrom torch import distributed as dist\nfrom torch.cuda import device_count\nfrom torch.cuda.amp import GradScaler\nfrom torch.nn.parallel import DistributedDataParallel as DDP\n\n\nclass nnUNetTrainer(object):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        # From https://grugbrain.dev/. Worth a read ya big brains ;-)\n\n        # apex predator of grug is complexity\n        # complexity bad\n        # say again:\n        # complexity very bad\n        # you say now:\n        # complexity very, very bad\n        # given choice between complexity or one on one against t-rex, grug take t-rex: at least grug see t-rex\n        # complexity is spirit demon that enter codebase through well-meaning but ultimately very clubbable non grug-brain developers and project managers who not fear complexity spirit demon or even know about sometime\n        # one day code base understandable and grug can get work done, everything good!\n        # next day impossible: complexity demon spirit has entered code and very dangerous situation!\n\n        # OK OK I am guilty. But I tried.\n        # https://www.osnews.com/images/comics/wtfm.jpg\n        # https://i.pinimg.com/originals/26/b2/50/26b250a738ea4abc7a5af4d42ad93af0.jpg\n\n        self.is_ddp = dist.is_available() and dist.is_initialized()\n        self.local_rank = 0 if not self.is_ddp else dist.get_rank()\n\n        self.device = device\n\n        # print what device we are using\n        if self.is_ddp:  # implicitly it's clear that we use cuda in this case\n            print(f\"I am local rank {self.local_rank}. {device_count()} GPUs are available. The world size is \"\n                  f\"{dist.get_world_size()}.\"\n                  f\"Setting device to {self.device}\")\n            self.device = torch.device(type='cuda', index=self.local_rank)\n        else:\n            if self.device.type == 'cuda':\n                # we might want to let the user pick this but for now please pick the correct GPU with CUDA_VISIBLE_DEVICES=X\n                self.device = torch.device(type='cuda', index=0)\n            print(f\"Using device: {self.device}\")\n\n        # loading and saving this class for continuing from checkpoint should not happen based on pickling. This\n        # would also pickle the network etc. Bad, bad. Instead we just reinstantiate and then load the checkpoint we\n        # need. So let's save the init args\n        self.my_init_kwargs = {}\n        for k in inspect.signature(self.__init__).parameters.keys():\n            self.my_init_kwargs[k] = locals()[k]\n\n        ###  Saving all the init args into class variables for later access\n        self.plans_manager = PlansManager(plans)\n        self.configuration_manager = self.plans_manager.get_configuration(configuration)\n        self.configuration_name = configuration\n        self.dataset_json = dataset_json\n        self.fold = fold\n        self.unpack_dataset = unpack_dataset\n\n        ### Setting all the folder names. We need to make sure things don't crash in case we are just running\n        # inference and some of the folders may not be defined!\n        self.preprocessed_dataset_folder_base = join(nnUNet_preprocessed, self.plans_manager.dataset_name) \\\n            if nnUNet_preprocessed is not None else None\n        self.output_folder_base = join(nnUNet_results, self.plans_manager.dataset_name,\n                                       self.__class__.__name__ + '__' + self.plans_manager.plans_name + \"__\" + configuration) \\\n            if nnUNet_results is not None else None\n        self.output_folder = join(self.output_folder_base, f'fold_{fold}')\n\n        self.preprocessed_dataset_folder = join(self.preprocessed_dataset_folder_base,\n                                                self.configuration_manager.data_identifier)\n        # unlike the previous nnunet folder_with_segs_from_previous_stage is now part of the plans. For now it has to\n        # be a different configuration in the same plans\n        # IMPORTANT! the mapping must be bijective, so lowres must point to fullres and vice versa (using\n        # \"previous_stage\" and \"next_stage\"). Otherwise it won't work!\n        self.is_cascaded = self.configuration_manager.previous_stage_name is not None\n        self.folder_with_segs_from_previous_stage = \\\n            join(nnUNet_results, self.plans_manager.dataset_name,\n                 self.__class__.__name__ + '__' + self.plans_manager.plans_name + \"__\" +\n                 self.configuration_manager.previous_stage_name, 'predicted_next_stage', self.configuration_name) \\\n                if self.is_cascaded else None\n\n        ### Some hyperparameters for you to fiddle with\n        self.initial_lr = 1e-2\n        self.weight_decay = 3e-5\n        self.oversample_foreground_percent = 0.33\n        self.num_iterations_per_epoch = 250\n        self.num_val_iterations_per_epoch = 50\n        self.num_epochs = 1000\n        self.current_epoch = 0\n        self.enable_deep_supervision = True\n\n        ### Dealing with labels/regions\n        self.label_manager = self.plans_manager.get_label_manager(dataset_json)\n        # labels can either be a list of int (regular training) or a list of tuples of int (region-based training)\n        # needed for predictions. We do sigmoid in case of (overlapping) regions\n\n        self.num_input_channels = None  # -> self.initialize()\n        self.network = None  # -> self._get_network()\n        self.optimizer = self.lr_scheduler = None  # -> self.initialize\n        self.grad_scaler = GradScaler() if self.device.type == 'cuda' else None\n        self.loss = None  # -> self.initialize\n\n        ### Simple logging. Don't take that away from me!\n        # initialize log file. This is just our log for the print statements etc. Not to be confused with lightning\n        # logging\n        timestamp = datetime.now()\n        maybe_mkdir_p(self.output_folder)\n        self.log_file = join(self.output_folder, \"training_log_%d_%d_%d_%02.0d_%02.0d_%02.0d.txt\" %\n                             (timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute,\n                              timestamp.second))\n        self.logger = nnUNetLogger()\n\n        ### placeholders\n        self.dataloader_train = self.dataloader_val = None  # see on_train_start\n\n        ### initializing stuff for remembering things and such\n        self._best_ema = None\n\n        ### inference things\n        self.inference_allowed_mirroring_axes = None  # this variable is set in\n        # self.configure_rotation_dummyDA_mirroring_and_inital_patch_size and will be saved in checkpoints\n\n        ### checkpoint saving stuff\n        self.save_every = 50\n        self.disable_checkpointing = False\n\n        ## DDP batch size and oversampling can differ between workers and needs adaptation\n        # we need to change the batch size in DDP because we don't use any of those distributed samplers\n        self._set_batch_size_and_oversample()\n\n        self.was_initialized = False\n\n        self.print_to_log_file(\"\\n#######################################################################\\n\"\n                               \"Please cite the following paper when using nnU-Net:\\n\"\n                               \"Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). \"\n                               \"nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. \"\n                               \"Nature methods, 18(2), 203-211.\\n\"\n                               \"#######################################################################\\n\",\n                               also_print_to_console=True, add_timestamp=False)\n\n    def initialize(self):\n        if not self.was_initialized:\n            self.num_input_channels = determine_num_input_channels(self.plans_manager, self.configuration_manager,\n                                                                   self.dataset_json)\n\n            self.network = self.build_network_architecture(\n                self.plans_manager,\n                self.dataset_json,\n                self.configuration_manager,\n                self.num_input_channels,\n                self.enable_deep_supervision,\n            ).to(self.device)\n            # compile network for free speedup\n            if self._do_i_compile():\n                self.print_to_log_file('Compiling network...')\n                self.network = torch.compile(self.network)\n\n            self.optimizer, self.lr_scheduler = self.configure_optimizers()\n            # if ddp, wrap in DDP wrapper\n            if self.is_ddp:\n                self.network = torch.nn.SyncBatchNorm.convert_sync_batchnorm(self.network)\n                self.network = DDP(self.network, device_ids=[self.local_rank])\n\n            self.loss = self._build_loss()\n            self.was_initialized = True\n        else:\n            raise RuntimeError(\"You have called self.initialize even though the trainer was already initialized. \"\n                               \"That should not happen.\")\n\n    def _do_i_compile(self):\n        return ('nnUNet_compile' in os.environ.keys()) and (os.environ['nnUNet_compile'].lower() in ('true', '1', 't'))\n\n    def _save_debug_information(self):\n        # saving some debug information\n        if self.local_rank == 0:\n            dct = {}\n            for k in self.__dir__():\n                if not k.startswith(\"__\"):\n                    if not callable(getattr(self, k)) or k in ['loss', ]:\n                        dct[k] = str(getattr(self, k))\n                    elif k in ['network', ]:\n                        dct[k] = str(getattr(self, k).__class__.__name__)\n                    else:\n                        # print(k)\n                        pass\n                if k in ['dataloader_train', 'dataloader_val']:\n                    if hasattr(getattr(self, k), 'generator'):\n                        dct[k + '.generator'] = str(getattr(self, k).generator)\n                    if hasattr(getattr(self, k), 'num_processes'):\n                        dct[k + '.num_processes'] = str(getattr(self, k).num_processes)\n                    if hasattr(getattr(self, k), 'transform'):\n                        dct[k + '.transform'] = str(getattr(self, k).transform)\n            import subprocess\n            hostname = subprocess.getoutput(['hostname'])\n            dct['hostname'] = hostname\n            torch_version = torch.__version__\n            if self.device.type == 'cuda':\n                gpu_name = torch.cuda.get_device_name()\n                dct['gpu_name'] = gpu_name\n                cudnn_version = torch.backends.cudnn.version()\n            else:\n                cudnn_version = 'None'\n            dct['device'] = str(self.device)\n            dct['torch_version'] = torch_version\n            dct['cudnn_version'] = cudnn_version\n            save_json(dct, join(self.output_folder, \"debug.json\"))\n\n    @staticmethod\n    def build_network_architecture(plans_manager: PlansManager,\n                                   dataset_json,\n                                   configuration_manager: ConfigurationManager,\n                                   num_input_channels,\n                                   enable_deep_supervision: bool = True) -> nn.Module:\n        \"\"\"\n        This is where you build the architecture according to the plans. There is no obligation to use\n        get_network_from_plans, this is just a utility we use for the nnU-Net default architectures. You can do what\n        you want. Even ignore the plans and just return something static (as long as it can process the requested\n        patch size)\n        but don't bug us with your bugs arising from fiddling with this :-P\n        This is the function that is called in inference as well! This is needed so that all network architecture\n        variants can be loaded at inference time (inference will use the same nnUNetTrainer that was used for\n        training, so if you change the network architecture during training by deriving a new trainer class then\n        inference will know about it).\n\n        If you need to know how many segmentation outputs your custom architecture needs to have, use the following snippet:\n        > label_manager = plans_manager.get_label_manager(dataset_json)\n        > label_manager.num_segmentation_heads\n        (why so complicated? -> We can have either classical training (classes) or regions. If we have regions,\n        the number of outputs is != the number of classes. Also there is the ignore label for which no output\n        should be generated. label_manager takes care of all that for you.)\n\n        \"\"\"\n        return get_network_from_plans(plans_manager, dataset_json, configuration_manager,\n                                      num_input_channels, deep_supervision=enable_deep_supervision)\n\n    def _get_deep_supervision_scales(self):\n        if self.enable_deep_supervision:\n            deep_supervision_scales = list(list(i) for i in 1 / np.cumprod(np.vstack(\n                self.configuration_manager.pool_op_kernel_sizes), axis=0))[:-1]\n        else:\n            deep_supervision_scales = None  # for train and val_transforms\n        return deep_supervision_scales\n\n    def _set_batch_size_and_oversample(self):\n        if not self.is_ddp:\n            # set batch size to what the plan says, leave oversample untouched\n            self.batch_size = self.configuration_manager.batch_size\n        else:\n            # batch size is distributed over DDP workers and we need to change oversample_percent for each worker\n            batch_sizes = []\n            oversample_percents = []\n\n            world_size = dist.get_world_size()\n            my_rank = dist.get_rank()\n\n            global_batch_size = self.configuration_manager.batch_size\n            assert global_batch_size >= world_size, 'Cannot run DDP if the batch size is smaller than the number of ' \\\n                                                    'GPUs... Duh.'\n\n            batch_size_per_GPU = np.ceil(global_batch_size / world_size).astype(int)\n\n            for rank in range(world_size):\n                if (rank + 1) * batch_size_per_GPU > global_batch_size:\n                    batch_size = batch_size_per_GPU - ((rank + 1) * batch_size_per_GPU - global_batch_size)\n                else:\n                    batch_size = batch_size_per_GPU\n\n                batch_sizes.append(batch_size)\n\n                sample_id_low = 0 if len(batch_sizes) == 0 else np.sum(batch_sizes[:-1])\n                sample_id_high = np.sum(batch_sizes)\n\n                if sample_id_high / global_batch_size < (1 - self.oversample_foreground_percent):\n                    oversample_percents.append(0.0)\n                elif sample_id_low / global_batch_size > (1 - self.oversample_foreground_percent):\n                    oversample_percents.append(1.0)\n                else:\n                    percent_covered_by_this_rank = sample_id_high / global_batch_size - sample_id_low / global_batch_size\n                    oversample_percent_here = 1 - (((1 - self.oversample_foreground_percent) -\n                                                    sample_id_low / global_batch_size) / percent_covered_by_this_rank)\n                    oversample_percents.append(oversample_percent_here)\n\n            print(\"worker\", my_rank, \"oversample\", oversample_percents[my_rank])\n            print(\"worker\", my_rank, \"batch_size\", batch_sizes[my_rank])\n            # self.print_to_log_file(\"worker\", my_rank, \"oversample\", oversample_percents[my_rank])\n            # self.print_to_log_file(\"worker\", my_rank, \"batch_size\", batch_sizes[my_rank])\n\n            self.batch_size = batch_sizes[my_rank]\n            self.oversample_foreground_percent = oversample_percents[my_rank]\n\n    def _build_loss(self):\n        if self.label_manager.has_regions:\n            loss = DC_and_BCE_loss({},\n                                   {'batch_dice': self.configuration_manager.batch_dice,\n                                    'do_bg': True, 'smooth': 1e-5, 'ddp': self.is_ddp},\n                                   use_ignore_label=self.label_manager.ignore_label is not None,\n                                   dice_class=MemoryEfficientSoftDiceLoss)\n        else:\n            loss = DC_and_CE_loss({'batch_dice': self.configuration_manager.batch_dice,\n                                   'smooth': 1e-5, 'do_bg': False, 'ddp': self.is_ddp}, {}, weight_ce=1, weight_dice=1,\n                                  ignore_label=self.label_manager.ignore_label, dice_class=MemoryEfficientSoftDiceLoss)\n\n        # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n        # this gives higher resolution outputs more weight in the loss\n\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n            weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        \"\"\"\n        This function is stupid and certainly one of the weakest spots of this implementation. Not entirely sure how we can fix it.\n        \"\"\"\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n        # todo rotation should be defined dynamically based on patch size (more isotropic patch sizes = more rotation)\n        if dim == 2:\n            do_dummy_2d_data_aug = False\n            # todo revisit this parametrization\n            if max(patch_size) / min(patch_size) > 1.5:\n                rotation_for_DA = {\n                    'x': (-15. / 360 * 2. * np.pi, 15. / 360 * 2. * np.pi),\n                    'y': (0, 0),\n                    'z': (0, 0)\n                }\n            else:\n                rotation_for_DA = {\n                    'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),\n                    'y': (0, 0),\n                    'z': (0, 0)\n                }\n            mirror_axes = (0, 1)\n        elif dim == 3:\n            # todo this is not ideal. We could also have patch_size (64, 16, 128) in which case a full 180deg 2d rot would be bad\n            # order of the axes is determined by spacing, not image size\n            do_dummy_2d_data_aug = (max(patch_size) / patch_size[0]) > ANISO_THRESHOLD\n            if do_dummy_2d_data_aug:\n                # why do we rotate 180 deg here all the time? We should also restrict it\n                rotation_for_DA = {\n                    'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),\n                    'y': (0, 0),\n                    'z': (0, 0)\n                }\n            else:\n                rotation_for_DA = {\n                    'x': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),\n                    'y': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),\n                    'z': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),\n                }\n            mirror_axes = (0, 1, 2)\n        else:\n            raise RuntimeError()\n\n        # todo this function is stupid. It doesn't even use the correct scale range (we keep things as they were in the\n        #  old nnunet for now)\n        initial_patch_size = get_patch_size(patch_size[-dim:],\n                                            *rotation_for_DA.values(),\n                                            (0.85, 1.25))\n        if do_dummy_2d_data_aug:\n            initial_patch_size[0] = patch_size[0]\n\n        self.print_to_log_file(f'do_dummy_2d_data_aug: {do_dummy_2d_data_aug}')\n        self.inference_allowed_mirroring_axes = mirror_axes\n\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n    def print_to_log_file(self, *args, also_print_to_console=True, add_timestamp=True):\n        if self.local_rank == 0:\n            timestamp = time()\n            dt_object = datetime.fromtimestamp(timestamp)\n\n            if add_timestamp:\n                args = (f\"{dt_object}:\", *args)\n\n            successful = False\n            max_attempts = 5\n            ctr = 0\n            while not successful and ctr < max_attempts:\n                try:\n                    with open(self.log_file, 'a+') as f:\n                        for a in args:\n                            f.write(str(a))\n                            f.write(\" \")\n                        f.write(\"\\n\")\n                    successful = True\n                except IOError:\n                    print(f\"{datetime.fromtimestamp(timestamp)}: failed to log: \", sys.exc_info())\n                    sleep(0.5)\n                    ctr += 1\n            if also_print_to_console:\n                print(*args)\n        elif also_print_to_console:\n            print(*args)\n\n    def print_plans(self):\n        if self.local_rank == 0:\n            dct = deepcopy(self.plans_manager.plans)\n            del dct['configurations']\n            self.print_to_log_file(f\"\\nThis is the configuration used by this \"\n                                   f\"training:\\nConfiguration name: {self.configuration_name}\\n\",\n                                   self.configuration_manager, '\\n', add_timestamp=False)\n            self.print_to_log_file('These are the global plan.json settings:\\n', dct, '\\n', add_timestamp=False)\n\n    def configure_optimizers(self):\n        optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,\n                                    momentum=0.99, nesterov=True)\n        lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)\n        return optimizer, lr_scheduler\n\n    def plot_network_architecture(self):\n        if self._do_i_compile():\n            self.print_to_log_file(\"Unable to plot network architecture: nnUNet_compile is enabled!\")\n            return\n\n        if self.local_rank == 0:\n            try:\n                # raise NotImplementedError('hiddenlayer no longer works and we do not have a viable alternative :-(')\n                # pip install git+https://github.com/saugatkandel/hiddenlayer.git\n\n                # from torchviz import make_dot\n                # # not viable.\n                # make_dot(tuple(self.network(torch.rand((1, self.num_input_channels,\n                #                                         *self.configuration_manager.patch_size),\n                #                                        device=self.device)))).render(\n                #     join(self.output_folder, \"network_architecture.pdf\"), format='pdf')\n                # self.optimizer.zero_grad()\n\n                # broken.\n\n                import hiddenlayer as hl\n                g = hl.build_graph(self.network,\n                                   torch.rand((1, self.num_input_channels,\n                                               *self.configuration_manager.patch_size),\n                                              device=self.device),\n                                   transforms=None)\n                g.save(join(self.output_folder, \"network_architecture.pdf\"))\n                del g\n            except Exception as e:\n                self.print_to_log_file(\"Unable to plot network architecture:\")\n                self.print_to_log_file(e)\n\n                # self.print_to_log_file(\"\\nprinting the network instead:\\n\")\n                # self.print_to_log_file(self.network)\n                # self.print_to_log_file(\"\\n\")\n            finally:\n                empty_cache(self.device)\n\n    def do_split(self):\n        \"\"\"\n        The default split is a 5 fold CV on all available training cases. nnU-Net will create a split (it is seeded,\n        so always the same) and save it as splits_final.pkl file in the preprocessed data directory.\n        Sometimes you may want to create your own split for various reasons. For this you will need to create your own\n        splits_final.pkl file. If this file is present, nnU-Net is going to use it and whatever splits are defined in\n        it. You can create as many splits in this file as you want. Note that if you define only 4 splits (fold 0-3)\n        and then set fold=4 when training (that would be the fifth split), nnU-Net will print a warning and proceed to\n        use a random 80:20 data split.\n        :return:\n        \"\"\"\n        if self.fold == \"all\":\n            # if fold==all then we use all images for training and validation\n            case_identifiers = get_case_identifiers(self.preprocessed_dataset_folder)\n            tr_keys = case_identifiers\n            val_keys = tr_keys\n        else:\n            splits_file = join(self.preprocessed_dataset_folder_base, \"splits_final.json\")\n            dataset = nnUNetDataset(self.preprocessed_dataset_folder, case_identifiers=None,\n                                    num_images_properties_loading_threshold=0,\n                                    folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage)\n            # if the split file does not exist we need to create it\n            if not isfile(splits_file):\n                self.print_to_log_file(\"Creating new 5-fold cross-validation split...\")\n                splits = []\n                all_keys_sorted = np.sort(list(dataset.keys()))\n                kfold = KFold(n_splits=5, shuffle=True, random_state=12345)\n                for i, (train_idx, test_idx) in enumerate(kfold.split(all_keys_sorted)):\n                    train_keys = np.array(all_keys_sorted)[train_idx]\n                    test_keys = np.array(all_keys_sorted)[test_idx]\n                    splits.append({})\n                    splits[-1]['train'] = list(train_keys)\n                    splits[-1]['val'] = list(test_keys)\n                save_json(splits, splits_file)\n\n            else:\n                self.print_to_log_file(\"Using splits from existing split file:\", splits_file)\n                splits = load_json(splits_file)\n                self.print_to_log_file(f\"The split file contains {len(splits)} splits.\")\n\n            self.print_to_log_file(\"Desired fold for training: %d\" % self.fold)\n            if self.fold < len(splits):\n                tr_keys = splits[self.fold]['train']\n                val_keys = splits[self.fold]['val']\n                self.print_to_log_file(\"This split has %d training and %d validation cases.\"\n                                       % (len(tr_keys), len(val_keys)))\n            else:\n                self.print_to_log_file(\"INFO: You requested fold %d for training but splits \"\n                                       \"contain only %d folds. I am now creating a \"\n                                       \"random (but seeded) 80:20 split!\" % (self.fold, len(splits)))\n                # if we request a fold that is not in the split file, create a random 80:20 split\n                rnd = np.random.RandomState(seed=12345 + self.fold)\n                keys = np.sort(list(dataset.keys()))\n                idx_tr = rnd.choice(len(keys), int(len(keys) * 0.8), replace=False)\n                idx_val = [i for i in range(len(keys)) if i not in idx_tr]\n                tr_keys = [keys[i] for i in idx_tr]\n                val_keys = [keys[i] for i in idx_val]\n                self.print_to_log_file(\"This random 80:20 split has %d training and %d validation cases.\"\n                                       % (len(tr_keys), len(val_keys)))\n            if any([i in val_keys for i in tr_keys]):\n                self.print_to_log_file('WARNING: Some validation cases are also in the training set. Please check the '\n                                       'splits.json or ignore if this is intentional.')\n        return tr_keys, val_keys\n\n    def get_tr_and_val_datasets(self):\n        # create dataset split\n        tr_keys, val_keys = self.do_split()\n\n        # load the datasets for training and validation. Note that we always draw random samples so we really don't\n        # care about distributing training cases across GPUs.\n        dataset_tr = nnUNetDataset(self.preprocessed_dataset_folder, tr_keys,\n                                   folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage,\n                                   num_images_properties_loading_threshold=0)\n        dataset_val = nnUNetDataset(self.preprocessed_dataset_folder, val_keys,\n                                    folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage,\n                                    num_images_properties_loading_threshold=0)\n        return dataset_tr, dataset_val\n\n    def get_dataloaders(self):\n        # we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether\n        # we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n\n        # needed for deep supervision: how much do we need to downscale the segmentation targets for the different\n        # outputs?\n\n        deep_supervision_scales = self._get_deep_supervision_scales()\n\n        (\n            rotation_for_DA,\n            do_dummy_2d_data_aug,\n            initial_patch_size,\n            mirror_axes,\n        ) = self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n\n        # training pipeline\n        tr_transforms = self.get_training_transforms(\n            patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,\n            order_resampling_data=3, order_resampling_seg=1,\n            use_mask_for_norm=self.configuration_manager.use_mask_for_norm,\n            is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.foreground_labels,\n            regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,\n            ignore_label=self.label_manager.ignore_label)\n\n        # validation pipeline\n        val_transforms = self.get_validation_transforms(deep_supervision_scales,\n                                                        is_cascaded=self.is_cascaded,\n                                                        foreground_labels=self.label_manager.foreground_labels,\n                                                        regions=self.label_manager.foreground_regions if\n                                                        self.label_manager.has_regions else None,\n                                                        ignore_label=self.label_manager.ignore_label)\n\n        dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)\n\n        allowed_num_processes = get_allowed_n_proc_DA()\n        if allowed_num_processes == 0:\n            mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)\n            mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)\n        else:\n            mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, data_loader=dl_tr, transform=tr_transforms,\n                                             num_processes=allowed_num_processes, num_cached=6, seeds=None,\n                                             pin_memory=self.device.type == 'cuda', wait_time=0.02)\n            mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, data_loader=dl_val,\n                                           transform=val_transforms, num_processes=max(1, allowed_num_processes // 2),\n                                           num_cached=3, seeds=None, pin_memory=self.device.type == 'cuda',\n                                           wait_time=0.02)\n        return mt_gen_train, mt_gen_val\n\n    def get_plain_dataloaders(self, initial_patch_size: Tuple[int, ...], dim: int):\n        dataset_tr, dataset_val = self.get_tr_and_val_datasets()\n\n        if dim == 2:\n            dl_tr = nnUNetDataLoader2D(dataset_tr, self.batch_size,\n                                       initial_patch_size,\n                                       self.configuration_manager.patch_size,\n                                       self.label_manager,\n                                       oversample_foreground_percent=self.oversample_foreground_percent,\n                                       sampling_probabilities=None, pad_sides=None)\n            dl_val = nnUNetDataLoader2D(dataset_val, self.batch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.label_manager,\n                                        oversample_foreground_percent=self.oversample_foreground_percent,\n                                        sampling_probabilities=None, pad_sides=None)\n        else:\n            dl_tr = nnUNetDataLoader3D(dataset_tr, self.batch_size,\n                                       initial_patch_size,\n                                       self.configuration_manager.patch_size,\n                                       self.label_manager,\n                                       oversample_foreground_percent=self.oversample_foreground_percent,\n                                       sampling_probabilities=None, pad_sides=None)\n            dl_val = nnUNetDataLoader3D(dataset_val, self.batch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.label_manager,\n                                        oversample_foreground_percent=self.oversample_foreground_percent,\n                                        sampling_probabilities=None, pad_sides=None)\n        return dl_tr, dl_val\n\n    @staticmethod\n    def get_training_transforms(\n        patch_size: Union[np.ndarray, Tuple[int]],\n        rotation_for_DA: dict,\n        deep_supervision_scales: Union[List, Tuple, None],\n        mirror_axes: Tuple[int, ...],\n        do_dummy_2d_data_aug: bool,\n        order_resampling_data: int = 3,\n        order_resampling_seg: int = 1,\n        border_val_seg: int = -1,\n        use_mask_for_norm: List[bool] = None,\n        is_cascaded: bool = False,\n        foreground_labels: Union[Tuple[int, ...], List[int]] = None,\n        regions: List[Union[List[int], Tuple[int, ...], int]] = None,\n        ignore_label: int = None,\n    ) -> AbstractTransform:\n        tr_transforms = []\n        if do_dummy_2d_data_aug:\n            ignore_axes = (0,)\n            tr_transforms.append(Convert3DTo2DTransform())\n            patch_size_spatial = patch_size[1:]\n        else:\n            patch_size_spatial = patch_size\n            ignore_axes = None\n\n        tr_transforms.append(SpatialTransform(\n            patch_size_spatial, patch_center_dist_from_border=None,\n            do_elastic_deform=False, alpha=(0, 0), sigma=(0, 0),\n            do_rotation=True, angle_x=rotation_for_DA['x'], angle_y=rotation_for_DA['y'], angle_z=rotation_for_DA['z'],\n            p_rot_per_axis=1,  # todo experiment with this\n            do_scale=True, scale=(0.7, 1.4),\n            border_mode_data=\"constant\", border_cval_data=0, order_data=order_resampling_data,\n            border_mode_seg=\"constant\", border_cval_seg=border_val_seg, order_seg=order_resampling_seg,\n            random_crop=False,  # random cropping is part of our dataloaders\n            p_el_per_sample=0, p_scale_per_sample=0.2, p_rot_per_sample=0.2,\n            independent_scale_for_each_axis=False  # todo experiment with this\n        ))\n\n        if do_dummy_2d_data_aug:\n            tr_transforms.append(Convert2DTo3DTransform())\n\n        tr_transforms.append(GaussianNoiseTransform(p_per_sample=0.1))\n        tr_transforms.append(GaussianBlurTransform((0.5, 1.), different_sigma_per_channel=True, p_per_sample=0.2,\n                                                   p_per_channel=0.5))\n        tr_transforms.append(BrightnessMultiplicativeTransform(multiplier_range=(0.75, 1.25), p_per_sample=0.15))\n        tr_transforms.append(ContrastAugmentationTransform(p_per_sample=0.15))\n        tr_transforms.append(SimulateLowResolutionTransform(zoom_range=(0.5, 1), per_channel=True,\n                                                            p_per_channel=0.5,\n                                                            order_downsample=0, order_upsample=3, p_per_sample=0.25,\n                                                            ignore_axes=ignore_axes))\n        tr_transforms.append(GammaTransform((0.7, 1.5), True, True, retain_stats=True, p_per_sample=0.1))\n        tr_transforms.append(GammaTransform((0.7, 1.5), False, True, retain_stats=True, p_per_sample=0.3))\n\n        if mirror_axes is not None and len(mirror_axes) > 0:\n            tr_transforms.append(MirrorTransform(mirror_axes))\n\n        if use_mask_for_norm is not None and any(use_mask_for_norm):\n            tr_transforms.append(MaskTransform([i for i in range(len(use_mask_for_norm)) if use_mask_for_norm[i]],\n                                               mask_idx_in_seg=0, set_outside_to=0))\n\n        tr_transforms.append(RemoveLabelTransform(-1, 0))\n\n        if is_cascaded:\n            assert foreground_labels is not None, 'We need foreground_labels for cascade augmentations'\n            tr_transforms.append(MoveSegAsOneHotToData(1, foreground_labels, 'seg', 'data'))\n            tr_transforms.append(ApplyRandomBinaryOperatorTransform(\n                channel_idx=list(range(-len(foreground_labels), 0)),\n                p_per_sample=0.4,\n                key=\"data\",\n                strel_size=(1, 8),\n                p_per_label=1))\n            tr_transforms.append(\n                RemoveRandomConnectedComponentFromOneHotEncodingTransform(\n                    channel_idx=list(range(-len(foreground_labels), 0)),\n                    key=\"data\",\n                    p_per_sample=0.2,\n                    fill_with_other_class_p=0,\n                    dont_do_if_covers_more_than_x_percent=0.15))\n\n        tr_transforms.append(RenameTransform('seg', 'target', True))\n\n        if regions is not None:\n            # the ignore label must also be converted\n            tr_transforms.append(ConvertSegmentationToRegionsTransform(list(regions) + [ignore_label]\n                                                                       if ignore_label is not None else regions,\n                                                                       'target', 'target'))\n\n        if deep_supervision_scales is not None:\n            tr_transforms.append(DownsampleSegForDSTransform2(deep_supervision_scales, 0, input_key='target',\n                                                              output_key='target'))\n        tr_transforms.append(NumpyToTensor(['data', 'target'], 'float'))\n        tr_transforms = Compose(tr_transforms)\n        return tr_transforms\n\n    @staticmethod\n    def get_validation_transforms(\n        deep_supervision_scales: Union[List, Tuple, None],\n        is_cascaded: bool = False,\n        foreground_labels: Union[Tuple[int, ...], List[int]] = None,\n        regions: List[Union[List[int], Tuple[int, ...], int]] = None,\n        ignore_label: int = None,\n    ) -> AbstractTransform:\n        val_transforms = []\n        val_transforms.append(RemoveLabelTransform(-1, 0))\n\n        if is_cascaded:\n            val_transforms.append(MoveSegAsOneHotToData(1, foreground_labels, 'seg', 'data'))\n\n        val_transforms.append(RenameTransform('seg', 'target', True))\n\n        if regions is not None:\n            # the ignore label must also be converted\n            val_transforms.append(ConvertSegmentationToRegionsTransform(list(regions) + [ignore_label]\n                                                                        if ignore_label is not None else regions,\n                                                                        'target', 'target'))\n\n        if deep_supervision_scales is not None:\n            val_transforms.append(DownsampleSegForDSTransform2(deep_supervision_scales, 0, input_key='target',\n                                                               output_key='target'))\n\n        val_transforms.append(NumpyToTensor(['data', 'target'], 'float'))\n        val_transforms = Compose(val_transforms)\n        return val_transforms\n\n    def set_deep_supervision_enabled(self, enabled: bool):\n        \"\"\"\n        This function is specific for the default architecture in nnU-Net. If you change the architecture, there are\n        chances you need to change this as well!\n        \"\"\"\n        if self.is_ddp:\n            self.network.module.decoder.deep_supervision = enabled\n        else:\n            self.network.decoder.deep_supervision = enabled\n\n    def on_train_start(self):\n        if not self.was_initialized:\n            self.initialize()\n\n        maybe_mkdir_p(self.output_folder)\n\n        # make sure deep supervision is on in the network\n        self.set_deep_supervision_enabled(self.enable_deep_supervision)\n\n        self.print_plans()\n        empty_cache(self.device)\n\n        # maybe unpack\n        if self.unpack_dataset and self.local_rank == 0:\n            self.print_to_log_file('unpacking dataset...')\n            unpack_dataset(self.preprocessed_dataset_folder, unpack_segmentation=True, overwrite_existing=False,\n                           num_processes=max(1, round(get_allowed_n_proc_DA() // 2)))\n            self.print_to_log_file('unpacking done...')\n\n        if self.is_ddp:\n            dist.barrier()\n\n        # dataloaders must be instantiated here because they need access to the training data which may not be present\n        # when doing inference\n        self.dataloader_train, self.dataloader_val = self.get_dataloaders()\n\n        # copy plans and dataset.json so that they can be used for restoring everything we need for inference\n        save_json(self.plans_manager.plans, join(self.output_folder_base, 'plans.json'), sort_keys=False)\n        save_json(self.dataset_json, join(self.output_folder_base, 'dataset.json'), sort_keys=False)\n\n        # we don't really need the fingerprint but its still handy to have it with the others\n        shutil.copy(join(self.preprocessed_dataset_folder_base, 'dataset_fingerprint.json'),\n                    join(self.output_folder_base, 'dataset_fingerprint.json'))\n\n        # produces a pdf in output folder\n        self.plot_network_architecture()\n\n        self._save_debug_information()\n\n        # print(f\"batch size: {self.batch_size}\")\n        # print(f\"oversample: {self.oversample_foreground_percent}\")\n\n    def on_train_end(self):\n        # dirty hack because on_epoch_end increments the epoch counter and this is executed afterwards.\n        # This will lead to the wrong current epoch to be stored\n        self.current_epoch -= 1\n        self.save_checkpoint(join(self.output_folder, \"checkpoint_final.pth\"))\n        self.current_epoch += 1\n\n        # now we can delete latest\n        if self.local_rank == 0 and isfile(join(self.output_folder, \"checkpoint_latest.pth\")):\n            os.remove(join(self.output_folder, \"checkpoint_latest.pth\"))\n\n        # shut down dataloaders\n        old_stdout = sys.stdout\n        with open(os.devnull, 'w') as f:\n            sys.stdout = f\n            if self.dataloader_train is not None:\n                self.dataloader_train._finish()\n            if self.dataloader_val is not None:\n                self.dataloader_val._finish()\n            sys.stdout = old_stdout\n\n        empty_cache(self.device)\n        self.print_to_log_file(\"Training done.\")\n\n    def on_train_epoch_start(self):\n        self.network.train()\n        self.lr_scheduler.step(self.current_epoch)\n        self.print_to_log_file('')\n        self.print_to_log_file(f'Epoch {self.current_epoch}')\n        self.print_to_log_file(\n            f\"Current learning rate: {np.round(self.optimizer.param_groups[0]['lr'], decimals=5)}\")\n        # lrs are the same for all workers so we don't need to gather them in case of DDP training\n        self.logger.log('lrs', self.optimizer.param_groups[0]['lr'], self.current_epoch)\n\n    def train_step(self, batch: dict) -> dict:\n        data = batch['data']\n        target = batch['target']\n\n        data = data.to(self.device, non_blocking=True)\n        if isinstance(target, list):\n            target = [i.to(self.device, non_blocking=True) for i in target]\n        else:\n            target = target.to(self.device, non_blocking=True)\n\n        self.optimizer.zero_grad(set_to_none=True)\n        # Autocast is a little bitch.\n        # If the device_type is 'cpu' then it's slow as heck and needs to be disabled.\n        # If the device_type is 'mps' then it will complain that mps is not implemented, even if enabled=False is set. Whyyyyyyy. (this is why we don't make use of enabled=False)\n        # So autocast will only be active if we have a cuda device.\n        with autocast(self.device.type, enabled=True) if self.device.type == 'cuda' else dummy_context():\n            output = self.network(data)\n            # del data\n            l = self.loss(output, target)\n\n        if self.grad_scaler is not None:\n            self.grad_scaler.scale(l).backward()\n            self.grad_scaler.unscale_(self.optimizer)\n            torch.nn.utils.clip_grad_norm_(self.network.parameters(), 12)\n            self.grad_scaler.step(self.optimizer)\n            self.grad_scaler.update()\n        else:\n            l.backward()\n            torch.nn.utils.clip_grad_norm_(self.network.parameters(), 12)\n            self.optimizer.step()\n        return {'loss': l.detach().cpu().numpy()}\n\n    def on_train_epoch_end(self, train_outputs: List[dict]):\n        outputs = collate_outputs(train_outputs)\n\n        if self.is_ddp:\n            losses_tr = [None for _ in range(dist.get_world_size())]\n            dist.all_gather_object(losses_tr, outputs['loss'])\n            loss_here = np.vstack(losses_tr).mean()\n        else:\n            loss_here = np.mean(outputs['loss'])\n\n        self.logger.log('train_losses', loss_here, self.current_epoch)\n\n    def on_validation_epoch_start(self):\n        self.network.eval()\n\n    def validation_step(self, batch: dict) -> dict:\n        data = batch['data']\n        target = batch['target']\n\n        data = data.to(self.device, non_blocking=True)\n        if isinstance(target, list):\n            target = [i.to(self.device, non_blocking=True) for i in target]\n        else:\n            target = target.to(self.device, non_blocking=True)\n\n        # Autocast is a little bitch.\n        # If the device_type is 'cpu' then it's slow as heck and needs to be disabled.\n        # If the device_type is 'mps' then it will complain that mps is not implemented, even if enabled=False is set. Whyyyyyyy. (this is why we don't make use of enabled=False)\n        # So autocast will only be active if we have a cuda device.\n        with autocast(self.device.type, enabled=True) if self.device.type == 'cuda' else dummy_context():\n            output = self.network(data)\n            del data\n            l = self.loss(output, target)\n\n        # we only need the output with the highest output resolution (if DS enabled)\n        if self.enable_deep_supervision:\n            output = output[0]\n            target = target[0]\n\n        # the following is needed for online evaluation. Fake dice (green line)\n        axes = [0] + list(range(2, output.ndim))\n\n        if self.label_manager.has_regions:\n            predicted_segmentation_onehot = (torch.sigmoid(output) > 0.5).long()\n        else:\n            # no need for softmax\n            output_seg = output.argmax(1)[:, None]\n            predicted_segmentation_onehot = torch.zeros(output.shape, device=output.device, dtype=torch.float32)\n            predicted_segmentation_onehot.scatter_(1, output_seg, 1)\n            del output_seg\n\n        if self.label_manager.has_ignore_label:\n            if not self.label_manager.has_regions:\n                mask = (target != self.label_manager.ignore_label).float()\n                # CAREFUL that you don't rely on target after this line!\n                target[target == self.label_manager.ignore_label] = 0\n            else:\n                mask = 1 - target[:, -1:]\n                # CAREFUL that you don't rely on target after this line!\n                target = target[:, :-1]\n        else:\n            mask = None\n\n        tp, fp, fn, _ = get_tp_fp_fn_tn(predicted_segmentation_onehot, target, axes=axes, mask=mask)\n\n        tp_hard = tp.detach().cpu().numpy()\n        fp_hard = fp.detach().cpu().numpy()\n        fn_hard = fn.detach().cpu().numpy()\n        if not self.label_manager.has_regions:\n            # if we train with regions all segmentation heads predict some kind of foreground. In conventional\n            # (softmax training) there needs tobe one output for the background. We are not interested in the\n            # background Dice\n            # [1:] in order to remove background\n            tp_hard = tp_hard[1:]\n            fp_hard = fp_hard[1:]\n            fn_hard = fn_hard[1:]\n\n        return {'loss': l.detach().cpu().numpy(), 'tp_hard': tp_hard, 'fp_hard': fp_hard, 'fn_hard': fn_hard}\n\n    def on_validation_epoch_end(self, val_outputs: List[dict]):\n        outputs_collated = collate_outputs(val_outputs)\n        tp = np.sum(outputs_collated['tp_hard'], 0)\n        fp = np.sum(outputs_collated['fp_hard'], 0)\n        fn = np.sum(outputs_collated['fn_hard'], 0)\n\n        if self.is_ddp:\n            world_size = dist.get_world_size()\n\n            tps = [None for _ in range(world_size)]\n            dist.all_gather_object(tps, tp)\n            tp = np.vstack([i[None] for i in tps]).sum(0)\n\n            fps = [None for _ in range(world_size)]\n            dist.all_gather_object(fps, fp)\n            fp = np.vstack([i[None] for i in fps]).sum(0)\n\n            fns = [None for _ in range(world_size)]\n            dist.all_gather_object(fns, fn)\n            fn = np.vstack([i[None] for i in fns]).sum(0)\n\n            losses_val = [None for _ in range(world_size)]\n            dist.all_gather_object(losses_val, outputs_collated['loss'])\n            loss_here = np.vstack(losses_val).mean()\n        else:\n            loss_here = np.mean(outputs_collated['loss'])\n\n        global_dc_per_class = [i for i in [2 * i / (2 * i + j + k) for i, j, k in zip(tp, fp, fn)]]\n        mean_fg_dice = np.nanmean(global_dc_per_class)\n        self.logger.log('mean_fg_dice', mean_fg_dice, self.current_epoch)\n        self.logger.log('dice_per_class_or_region', global_dc_per_class, self.current_epoch)\n        self.logger.log('val_losses', loss_here, self.current_epoch)\n\n    def on_epoch_start(self):\n        self.logger.log('epoch_start_timestamps', time(), self.current_epoch)\n\n    def on_epoch_end(self):\n        self.logger.log('epoch_end_timestamps', time(), self.current_epoch)\n\n        self.print_to_log_file('train_loss', np.round(self.logger.my_fantastic_logging['train_losses'][-1], decimals=4))\n        self.print_to_log_file('val_loss', np.round(self.logger.my_fantastic_logging['val_losses'][-1], decimals=4))\n        self.print_to_log_file('Pseudo dice', [np.round(i, decimals=4) for i in\n                                               self.logger.my_fantastic_logging['dice_per_class_or_region'][-1]])\n        self.print_to_log_file(\n            f\"Epoch time: {np.round(self.logger.my_fantastic_logging['epoch_end_timestamps'][-1] - self.logger.my_fantastic_logging['epoch_start_timestamps'][-1], decimals=2)} s\")\n\n        # handling periodic checkpointing\n        current_epoch = self.current_epoch\n        if (current_epoch + 1) % self.save_every == 0 and current_epoch != (self.num_epochs - 1):\n            self.save_checkpoint(join(self.output_folder, 'checkpoint_latest.pth'))\n\n        # handle 'best' checkpointing. ema_fg_dice is computed by the logger and can be accessed like this\n        if self._best_ema is None or self.logger.my_fantastic_logging['ema_fg_dice'][-1] > self._best_ema:\n            self._best_ema = self.logger.my_fantastic_logging['ema_fg_dice'][-1]\n            self.print_to_log_file(f\"Yayy! New best EMA pseudo Dice: {np.round(self._best_ema, decimals=4)}\")\n            self.save_checkpoint(join(self.output_folder, 'checkpoint_best.pth'))\n\n        if self.local_rank == 0:\n            self.logger.plot_progress_png(self.output_folder)\n\n        self.current_epoch += 1\n\n    def save_checkpoint(self, filename: str) -> None:\n        if self.local_rank == 0:\n            if not self.disable_checkpointing:\n                if self.is_ddp:\n                    mod = self.network.module\n                else:\n                    mod = self.network\n                if isinstance(mod, OptimizedModule):\n                    mod = mod._orig_mod\n\n                checkpoint = {\n                    'network_weights': mod.state_dict(),\n                    'optimizer_state': self.optimizer.state_dict(),\n                    'grad_scaler_state': self.grad_scaler.state_dict() if self.grad_scaler is not None else None,\n                    'logging': self.logger.get_checkpoint(),\n                    '_best_ema': self._best_ema,\n                    'current_epoch': self.current_epoch + 1,\n                    'init_args': self.my_init_kwargs,\n                    'trainer_name': self.__class__.__name__,\n                    'inference_allowed_mirroring_axes': self.inference_allowed_mirroring_axes,\n                }\n                torch.save(checkpoint, filename)\n            else:\n                self.print_to_log_file('No checkpoint written, checkpointing is disabled')\n\n    def load_checkpoint(self, filename_or_checkpoint: Union[dict, str]) -> None:\n        if not self.was_initialized:\n            self.initialize()\n\n        if isinstance(filename_or_checkpoint, str):\n            checkpoint = torch.load(filename_or_checkpoint, map_location=self.device)\n        # if state dict comes from nn.DataParallel but we use non-parallel model here then the state dict keys do not\n        # match. Use heuristic to make it match\n        new_state_dict = {}\n        for k, value in checkpoint['network_weights'].items():\n            key = k\n            if key not in self.network.state_dict().keys() and key.startswith('module.'):\n                key = key[7:]\n            new_state_dict[key] = value\n\n        self.my_init_kwargs = checkpoint['init_args']\n        self.current_epoch = checkpoint['current_epoch']\n        self.logger.load_checkpoint(checkpoint['logging'])\n        self._best_ema = checkpoint['_best_ema']\n        self.inference_allowed_mirroring_axes = checkpoint[\n            'inference_allowed_mirroring_axes'] if 'inference_allowed_mirroring_axes' in checkpoint.keys() else self.inference_allowed_mirroring_axes\n\n        # messing with state dict naming schemes. Facepalm.\n        if self.is_ddp:\n            if isinstance(self.network.module, OptimizedModule):\n                self.network.module._orig_mod.load_state_dict(new_state_dict)\n            else:\n                self.network.module.load_state_dict(new_state_dict)\n        else:\n            if isinstance(self.network, OptimizedModule):\n                self.network._orig_mod.load_state_dict(new_state_dict)\n            else:\n                self.network.load_state_dict(new_state_dict)\n        self.optimizer.load_state_dict(checkpoint['optimizer_state'])\n        if self.grad_scaler is not None:\n            if checkpoint['grad_scaler_state'] is not None:\n                self.grad_scaler.load_state_dict(checkpoint['grad_scaler_state'])\n\n    def perform_actual_validation(self, save_probabilities: bool = False):\n        self.set_deep_supervision_enabled(False)\n        self.network.eval()\n\n        predictor = nnUNetPredictor(tile_step_size=0.5, use_gaussian=True, use_mirroring=True,\n                                    perform_everything_on_gpu=True, device=self.device, verbose=False,\n                                    verbose_preprocessing=False, allow_tqdm=False)\n        predictor.manual_initialization(self.network, self.plans_manager, self.configuration_manager, None,\n                                        self.dataset_json, self.__class__.__name__,\n                                        self.inference_allowed_mirroring_axes)\n\n        with multiprocessing.get_context(\"spawn\").Pool(default_num_processes) as segmentation_export_pool:\n            worker_list = [i for i in segmentation_export_pool._pool]\n            validation_output_folder = join(self.output_folder, 'validation')\n            maybe_mkdir_p(validation_output_folder)\n\n            # we cannot use self.get_tr_and_val_datasets() here because we might be DDP and then we have to distribute\n            # the validation keys across the workers.\n            _, val_keys = self.do_split()\n            if self.is_ddp:\n                val_keys = val_keys[self.local_rank:: dist.get_world_size()]\n\n            dataset_val = nnUNetDataset(self.preprocessed_dataset_folder, val_keys,\n                                        folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage,\n                                        num_images_properties_loading_threshold=0)\n\n            next_stages = self.configuration_manager.next_stage_names\n\n            if next_stages is not None:\n                _ = [maybe_mkdir_p(join(self.output_folder_base, 'predicted_next_stage', n)) for n in next_stages]\n\n            results = []\n\n            for k in dataset_val.keys():\n                proceed = not check_workers_alive_and_busy(segmentation_export_pool, worker_list, results,\n                                                 allowed_num_queued=2)\n                while not proceed:\n                    sleep(0.1)\n                    proceed = not check_workers_alive_and_busy(segmentation_export_pool, worker_list, results,\n                                                     allowed_num_queued=2)\n\n                self.print_to_log_file(f\"predicting {k}\")\n                data, seg, properties = dataset_val.load_case(k)\n\n                if self.is_cascaded:\n                    data = np.vstack((data, convert_labelmap_to_one_hot(seg[-1], self.label_manager.foreground_labels,\n                                                                        output_dtype=data.dtype)))\n                with warnings.catch_warnings():\n                    # ignore 'The given NumPy array is not writable' warning\n                    warnings.simplefilter(\"ignore\")\n                    data = torch.from_numpy(data)\n\n                output_filename_truncated = join(validation_output_folder, k)\n\n                try:\n                    prediction = predictor.predict_sliding_window_return_logits(data)\n                except RuntimeError:\n                    predictor.perform_everything_on_gpu = False\n                    prediction = predictor.predict_sliding_window_return_logits(data)\n                    predictor.perform_everything_on_gpu = True\n\n                prediction = prediction.cpu()\n\n                # this needs to go into background processes\n                results.append(\n                    segmentation_export_pool.starmap_async(\n                        export_prediction_from_logits, (\n                            (prediction, properties, self.configuration_manager, self.plans_manager,\n                             self.dataset_json, output_filename_truncated, save_probabilities),\n                        )\n                    )\n                )\n                # for debug purposes\n                # export_prediction(prediction_for_export, properties, self.configuration, self.plans, self.dataset_json,\n                #              output_filename_truncated, save_probabilities)\n\n                # if needed, export the softmax prediction for the next stage\n                if next_stages is not None:\n                    for n in next_stages:\n                        next_stage_config_manager = self.plans_manager.get_configuration(n)\n                        expected_preprocessed_folder = join(nnUNet_preprocessed, self.plans_manager.dataset_name,\n                                                            next_stage_config_manager.data_identifier)\n\n                        try:\n                            # we do this so that we can use load_case and do not have to hard code how loading training cases is implemented\n                            tmp = nnUNetDataset(expected_preprocessed_folder, [k],\n                                                num_images_properties_loading_threshold=0)\n                            d, s, p = tmp.load_case(k)\n                        except FileNotFoundError:\n                            self.print_to_log_file(\n                                f\"Predicting next stage {n} failed for case {k} because the preprocessed file is missing! \"\n                                f\"Run the preprocessing for this configuration first!\")\n                            continue\n\n                        target_shape = d.shape[1:]\n                        output_folder = join(self.output_folder_base, 'predicted_next_stage', n)\n                        output_file = join(output_folder, k + '.npz')\n\n                        # resample_and_save(prediction, target_shape, output_file, self.plans_manager, self.configuration_manager, properties,\n                        #                   self.dataset_json)\n                        results.append(segmentation_export_pool.starmap_async(\n                            resample_and_save, (\n                                (prediction, target_shape, output_file, self.plans_manager,\n                                 self.configuration_manager,\n                                 properties,\n                                 self.dataset_json),\n                            )\n                        ))\n\n            _ = [r.get() for r in results]\n\n        if self.is_ddp:\n            dist.barrier()\n\n        if self.local_rank == 0:\n            metrics = compute_metrics_on_folder(join(self.preprocessed_dataset_folder_base, 'gt_segmentations'),\n                                                validation_output_folder,\n                                                join(validation_output_folder, 'summary.json'),\n                                                self.plans_manager.image_reader_writer_class(),\n                                                self.dataset_json[\"file_ending\"],\n                                                self.label_manager.foreground_regions if self.label_manager.has_regions else\n                                                self.label_manager.foreground_labels,\n                                                self.label_manager.ignore_label, chill=True)\n            self.print_to_log_file(\"Validation complete\", also_print_to_console=True)\n            self.print_to_log_file(\"Mean Validation Dice: \", (metrics['foreground_mean'][\"Dice\"]), also_print_to_console=True)\n\n        self.set_deep_supervision_enabled(True)\n        compute_gaussian.cache_clear()\n\n    def run_training(self):\n        self.on_train_start()\n\n        for epoch in range(self.current_epoch, self.num_epochs):\n            self.on_epoch_start()\n\n            self.on_train_epoch_start()\n            train_outputs = []\n            for batch_id in range(self.num_iterations_per_epoch):\n                train_outputs.append(self.train_step(next(self.dataloader_train)))\n            self.on_train_epoch_end(train_outputs)\n\n            with torch.no_grad():\n                self.on_validation_epoch_start()\n                val_outputs = []\n                for batch_id in range(self.num_val_iterations_per_epoch):\n                    val_outputs.append(self.validation_step(next(self.dataloader_val)))\n                self.on_validation_epoch_end(val_outputs)\n\n            self.on_epoch_end()\n\n        self.on_train_end()\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/nnUNetTrainer_swin.py",
    "content": "from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\r\nimport torch\r\nfrom dynamic_network_architectures.architectures.unet import ResidualEncoderUNet, PlainConvUNet\r\nfrom dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_batchnorm\r\nfrom dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0, InitWeights_He\r\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\r\nfrom nnunetv2.utilities.plans_handling.plans_handler import ConfigurationManager, PlansManager\r\nfrom torch import nn\r\nfrom nnunetv2.training.nnUNetTrainer.vit import Swin\r\n\r\n\r\nclass nnUNetTrainer_swin(nnUNetTrainer):\r\n    def __init__(\r\n        self,\r\n        plans: dict,\r\n        configuration: str,\r\n        fold: int,\r\n        dataset_json: dict,\r\n        unpack_dataset: bool = True,\r\n        device: torch.device = torch.device(\"cuda\"),\r\n    ):\r\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\r\n        self.enable_deep_supervision = False\r\n        self.num_epochs = 250\r\n\r\n    @staticmethod\r\n    def build_network_architecture(plans_manager: PlansManager,\r\n                                   dataset_json,\r\n                                   configuration_manager: ConfigurationManager,\r\n                                   num_input_channels,\r\n                                   enable_deep_supervision=False) -> nn.Module:\r\n        num_stages = len(configuration_manager.conv_kernel_sizes)\r\n\r\n        dim = len(configuration_manager.conv_kernel_sizes[0])\r\n        conv_op = convert_dim_to_conv_op(dim)\r\n\r\n        label_manager = plans_manager.get_label_manager(dataset_json)\r\n\r\n        segmentation_network_class_name = 'swin'\r\n        mapping = {\r\n            'swin': Swin\r\n        }\r\n\r\n        assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \\\r\n                                                                  'is non-standard (maybe your own?). Yo\\'ll have to dive ' \\\r\n                                                                  'into either this ' \\\r\n                                                                  'function (get_network_from_plans) or ' \\\r\n                                                                  'the init of your nnUNetModule to accommodate that.'\r\n        network_class = mapping[segmentation_network_class_name]\r\n\r\n        conv_or_blocks_per_stage = {\r\n            'n_conv_per_stage'\r\n            if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,\r\n            'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder\r\n        }\r\n        # network class name!!\r\n        model = network_class(\r\n            input_channels=num_input_channels,\r\n            num_classes=label_manager.num_segmentation_heads,\r\n        )\r\n        model.apply(InitWeights_He(1e-2))\r\n        if network_class == ResidualEncoderUNet:\r\n            model.apply(init_last_bn_before_add_to_0)\r\n        return model\r\n\r\n    def set_deep_supervision_enabled(self, enabled: bool):\r\n        return\r\n\r\n\r\nclass nnUNetTrainer_swin_pre(nnUNetTrainer):\r\n    def __init__(\r\n        self,\r\n        plans: dict,\r\n        configuration: str,\r\n        fold: int,\r\n        dataset_json: dict,\r\n        unpack_dataset: bool = True,\r\n        device: torch.device = torch.device(\"cuda\"),\r\n    ):\r\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\r\n        self.enable_deep_supervision = False\r\n        self.num_epochs = 250\r\n\r\n    @staticmethod\r\n    def build_network_architecture(plans_manager: PlansManager,\r\n                                   dataset_json,\r\n                                   configuration_manager: ConfigurationManager,\r\n                                   num_input_channels,\r\n                                   enable_deep_supervision: bool = False) -> nn.Module:\r\n        num_stages = len(configuration_manager.conv_kernel_sizes)\r\n\r\n        dim = len(configuration_manager.conv_kernel_sizes[0])\r\n        conv_op = convert_dim_to_conv_op(dim)\r\n\r\n        label_manager = plans_manager.get_label_manager(dataset_json)\r\n\r\n        segmentation_network_class_name = 'swin'\r\n        mapping = {\r\n            'swin': Swin\r\n        }\r\n\r\n        assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \\\r\n                                                                  'is non-standard (maybe your own?). Yo\\'ll have to dive ' \\\r\n                                                                  'into either this ' \\\r\n                                                                  'function (get_network_from_plans) or ' \\\r\n                                                                  'the init of your nnUNetModule to accommodate that.'\r\n        network_class = mapping[segmentation_network_class_name]\r\n\r\n        conv_or_blocks_per_stage = {\r\n            'n_conv_per_stage'\r\n            if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,\r\n            'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder\r\n        }\r\n        # network class name!!\r\n        model = network_class(\r\n            input_channels=num_input_channels,\r\n            num_classes=label_manager.num_segmentation_heads,\r\n        )\r\n\r\n        try:\r\n            model_dict = torch.load(\"/home/linshan/VoCo/runs/logs_10k_swinv2_abdomen/current_model.pth\",\r\n                                    map_location=torch.device('cpu'))\r\n            try:\r\n                state_dict = model_dict  # [\"state_dict\"]\r\n            except:\r\n                state_dict = model_dict[\"state_dict\"]\r\n\r\n            # if mri\r\n            state_dict = delete_patch_embed(state_dict)\r\n\r\n            if \"module.\" in list(state_dict.keys())[0]:\r\n                print(\"Tag 'module.' found in state dict - fixing!\")\r\n                for key in list(state_dict.keys()):\r\n                    state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\r\n            if \"swin_vit\" in list(state_dict.keys())[0]:\r\n                print(\"Tag 'swin_vit' found in state dict - fixing!\")\r\n                for key in list(state_dict.keys()):\r\n                    state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\r\n            # We now load model weights, setting param `strict` to False, i.e.:\r\n            # this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves\r\n            # the decoder weights untouched (CNN UNet decoder).\r\n            model.load_state_dict(state_dict, strict=False)\r\n            print(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\r\n        except ValueError:\r\n            raise ValueError(\"Self-supervised pre-trained weights not available\")\r\n\r\n        return model\r\n\r\n    def set_deep_supervision_enabled(self, enabled: bool):\r\n        return\r\n\r\n\r\ndef delete_patch_embed(state_dict):\r\n    for key in list(state_dict.keys()):\r\n        state_dict[key.replace(\"swinViT.patch_embed\", \"bad\")] = state_dict.pop(key)\r\n    for key in list(state_dict.keys()):\r\n        state_dict[key.replace(\"encoder1.layer\", \"bad\")] = state_dict.pop(key)\r\n\r\n    return state_dict\r\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/benchmarking/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs.py",
    "content": "import torch\nfrom batchgenerators.utilities.file_and_folder_operations import save_json, join, isfile, load_json\n\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom torch import distributed as dist\n\n\nclass nnUNetTrainerBenchmark_5epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        assert self.fold == 0, \"It makes absolutely no sense to specify a certain fold. Stick with 0 so that we can parse the results.\"\n        self.disable_checkpointing = True\n        self.num_epochs = 5\n        assert torch.cuda.is_available(), \"This only works on GPU\"\n        self.crashed_with_runtime_error = False\n\n    def perform_actual_validation(self, save_probabilities: bool = False):\n        pass\n\n    def save_checkpoint(self, filename: str) -> None:\n        # do not trust people to remember that self.disable_checkpointing must be True for this trainer\n        pass\n\n    def run_training(self):\n        try:\n            super().run_training()\n        except RuntimeError:\n            self.crashed_with_runtime_error = True\n\n    def on_train_end(self):\n        super().on_train_end()\n\n        if not self.is_ddp or self.local_rank == 0:\n            torch_version = torch.__version__\n            cudnn_version = torch.backends.cudnn.version()\n            gpu_name = torch.cuda.get_device_name()\n            if self.crashed_with_runtime_error:\n                fastest_epoch = 'Not enough VRAM!'\n            else:\n                epoch_times = [i - j for i, j in zip(self.logger.my_fantastic_logging['epoch_end_timestamps'],\n                                                     self.logger.my_fantastic_logging['epoch_start_timestamps'])]\n                fastest_epoch = min(epoch_times)\n\n            if self.is_ddp:\n                num_gpus = dist.get_world_size()\n            else:\n                num_gpus = 1\n\n            benchmark_result_file = join(self.output_folder, 'benchmark_result.json')\n            if isfile(benchmark_result_file):\n                old_results = load_json(benchmark_result_file)\n            else:\n                old_results = {}\n            # generate some unique key\n            my_key = f\"{cudnn_version}__{torch_version.replace(' ', '')}__{gpu_name.replace(' ', '')}__gpus_{num_gpus}\"\n            old_results[my_key] = {\n                'torch_version': torch_version,\n                'cudnn_version': cudnn_version,\n                'gpu_name': gpu_name,\n                'fastest_epoch': fastest_epoch,\n                'num_gpus': num_gpus,\n            }\n            save_json(old_results,\n                      join(self.output_folder, 'benchmark_result.json'))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs_noDataLoading.py",
    "content": "import torch\n\nfrom nnunetv2.training.nnUNetTrainer.variants.benchmarking.nnUNetTrainerBenchmark_5epochs import (\n    nnUNetTrainerBenchmark_5epochs,\n)\nfrom nnunetv2.utilities.label_handling.label_handling import determine_num_input_channels\n\n\nclass nnUNetTrainerBenchmark_5epochs_noDataLoading(nnUNetTrainerBenchmark_5epochs):\n    def __init__(\n        self,\n        plans: dict,\n        configuration: str,\n        fold: int,\n        dataset_json: dict,\n        unpack_dataset: bool = True,\n        device: torch.device = torch.device(\"cuda\"),\n    ):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self._set_batch_size_and_oversample()\n        num_input_channels = determine_num_input_channels(\n            self.plans_manager, self.configuration_manager, self.dataset_json\n        )\n        patch_size = self.configuration_manager.patch_size\n        dummy_data = torch.rand((self.batch_size, num_input_channels, *patch_size), device=self.device)\n        if self.enable_deep_supervision:\n            dummy_target = [\n                torch.round(\n                    torch.rand((self.batch_size, 1, *[int(i * j) for i, j in zip(patch_size, k)]), device=self.device)\n                    * max(self.label_manager.all_labels)\n                )\n                for k in self._get_deep_supervision_scales()\n            ]\n        else:\n            raise NotImplementedError(\"This trainer does not support deep supervision\")\n        self.dummy_batch = {\"data\": dummy_data, \"target\": dummy_target}\n\n    def get_dataloaders(self):\n        return None, None\n\n    def run_training(self):\n        try:\n            self.on_train_start()\n\n            for epoch in range(self.current_epoch, self.num_epochs):\n                self.on_epoch_start()\n\n                self.on_train_epoch_start()\n                train_outputs = []\n                for batch_id in range(self.num_iterations_per_epoch):\n                    train_outputs.append(self.train_step(self.dummy_batch))\n                self.on_train_epoch_end(train_outputs)\n\n                with torch.no_grad():\n                    self.on_validation_epoch_start()\n                    val_outputs = []\n                    for batch_id in range(self.num_val_iterations_per_epoch):\n                        val_outputs.append(self.validation_step(self.dummy_batch))\n                    self.on_validation_epoch_end(val_outputs)\n\n                self.on_epoch_end()\n\n            self.on_train_end()\n        except RuntimeError:\n            self.crashed_with_runtime_error = True\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py",
    "content": "from typing import List, Union, Tuple\n\nimport numpy as np\nimport torch\nfrom batchgenerators.dataloading.single_threaded_augmenter import SingleThreadedAugmenter\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform, Compose\nfrom batchgenerators.transforms.color_transforms import BrightnessTransform, ContrastAugmentationTransform, \\\n    GammaTransform\nfrom batchgenerators.transforms.local_transforms import BrightnessGradientAdditiveTransform, LocalGammaTransform\nfrom batchgenerators.transforms.noise_transforms import MedianFilterTransform, GaussianBlurTransform, \\\n    GaussianNoiseTransform, BlankRectangleTransform, SharpeningTransform\nfrom batchgenerators.transforms.resample_transforms import SimulateLowResolutionTransform\nfrom batchgenerators.transforms.spatial_transforms import SpatialTransform, Rot90Transform, TransposeAxesTransform, \\\n    MirrorTransform\nfrom batchgenerators.transforms.utility_transforms import OneOfTransform, RemoveLabelTransform, RenameTransform, \\\n    NumpyToTensor\n\nfrom nnunetv2.configuration import ANISO_THRESHOLD\nfrom nnunetv2.training.data_augmentation.compute_initial_patch_size import get_patch_size\nfrom nnunetv2.training.data_augmentation.custom_transforms.cascade_transforms import MoveSegAsOneHotToData, \\\n    ApplyRandomBinaryOperatorTransform, RemoveRandomConnectedComponentFromOneHotEncodingTransform\nfrom nnunetv2.training.data_augmentation.custom_transforms.deep_supervision_donwsampling import \\\n    DownsampleSegForDSTransform2\nfrom nnunetv2.training.data_augmentation.custom_transforms.limited_length_multithreaded_augmenter import \\\n    LimitedLenWrapper\nfrom nnunetv2.training.data_augmentation.custom_transforms.masking import MaskTransform\nfrom nnunetv2.training.data_augmentation.custom_transforms.region_based_training import \\\n    ConvertSegmentationToRegionsTransform\nfrom nnunetv2.training.data_augmentation.custom_transforms.transforms_for_dummy_2d import Convert3DTo2DTransform, \\\n    Convert2DTo3DTransform\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA\n\n\nclass nnUNetTrainerDA5(nnUNetTrainer):\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        \"\"\"\n        This function is stupid and certainly one of the weakest spots of this implementation. Not entirely sure how we can fix it.\n        \"\"\"\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n        # todo rotation should be defined dynamically based on patch size (more isotropic patch sizes = more rotation)\n        if dim == 2:\n            do_dummy_2d_data_aug = False\n            # todo revisit this parametrization\n            if max(patch_size) / min(patch_size) > 1.5:\n                rotation_for_DA = {\n                    'x': (-15. / 360 * 2. * np.pi, 15. / 360 * 2. * np.pi),\n                    'y': (0, 0),\n                    'z': (0, 0)\n                }\n            else:\n                rotation_for_DA = {\n                    'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),\n                    'y': (0, 0),\n                    'z': (0, 0)\n                }\n            mirror_axes = (0, 1)\n        elif dim == 3:\n            # todo this is not ideal. We could also have patch_size (64, 16, 128) in which case a full 180deg 2d rot would be bad\n            # order of the axes is determined by spacing, not image size\n            do_dummy_2d_data_aug = (max(patch_size) / patch_size[0]) > ANISO_THRESHOLD\n            if do_dummy_2d_data_aug:\n                # why do we rotate 180 deg here all the time? We should also restrict it\n                rotation_for_DA = {\n                    'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),\n                    'y': (0, 0),\n                    'z': (0, 0)\n                }\n            else:\n                rotation_for_DA = {\n                    'x': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),\n                    'y': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),\n                    'z': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),\n                }\n            mirror_axes = (0, 1, 2)\n        else:\n            raise RuntimeError()\n\n        # todo this function is stupid. It doesn't even use the correct scale range (we keep things as they were in the\n        #  old nnunet for now)\n        initial_patch_size = get_patch_size(patch_size[-dim:],\n                                            *rotation_for_DA.values(),\n                                            (0.7, 1.43))\n        if do_dummy_2d_data_aug:\n            initial_patch_size[0] = patch_size[0]\n\n        self.print_to_log_file(f'do_dummy_2d_data_aug: {do_dummy_2d_data_aug}')\n        self.inference_allowed_mirroring_axes = mirror_axes\n\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n    @staticmethod\n    def get_training_transforms(patch_size: Union[np.ndarray, Tuple[int]],\n                                rotation_for_DA: dict,\n                                deep_supervision_scales: Union[List, Tuple, None],\n                                mirror_axes: Tuple[int, ...],\n                                do_dummy_2d_data_aug: bool,\n                                order_resampling_data: int = 3,\n                                order_resampling_seg: int = 1,\n                                border_val_seg: int = -1,\n                                use_mask_for_norm: List[bool] = None,\n                                is_cascaded: bool = False,\n                                foreground_labels: Union[Tuple[int, ...], List[int]] = None,\n                                regions: List[Union[List[int], Tuple[int, ...], int]] = None,\n                                ignore_label: int = None) -> AbstractTransform:\n        matching_axes = np.array([sum([i == j for j in patch_size]) for i in patch_size])\n        valid_axes = list(np.where(matching_axes == np.max(matching_axes))[0])\n\n        tr_transforms = []\n\n        if do_dummy_2d_data_aug:\n            ignore_axes = (0,)\n            tr_transforms.append(Convert3DTo2DTransform())\n            patch_size_spatial = patch_size[1:]\n        else:\n            patch_size_spatial = patch_size\n            ignore_axes = None\n\n        tr_transforms.append(\n            SpatialTransform(\n                patch_size_spatial,\n                patch_center_dist_from_border=None,\n                do_elastic_deform=False,\n                do_rotation=True,\n                angle_x=rotation_for_DA['x'],\n                angle_y=rotation_for_DA['y'],\n                angle_z=rotation_for_DA['z'],\n                p_rot_per_axis=0.5,\n                do_scale=True,\n                scale=(0.7, 1.43),\n                border_mode_data=\"constant\",\n                border_cval_data=0,\n                order_data=order_resampling_data,\n                border_mode_seg=\"constant\",\n                border_cval_seg=-1,\n                order_seg=order_resampling_seg,\n                random_crop=False,\n                p_el_per_sample=0.2,\n                p_scale_per_sample=0.2,\n                p_rot_per_sample=0.4,\n                independent_scale_for_each_axis=True,\n            )\n        )\n\n        if do_dummy_2d_data_aug:\n            tr_transforms.append(Convert2DTo3DTransform())\n\n        if np.any(matching_axes > 1):\n            tr_transforms.append(\n                Rot90Transform(\n                    (0, 1, 2, 3), axes=valid_axes, data_key='data', label_key='seg', p_per_sample=0.5\n                ),\n            )\n\n        if np.any(matching_axes > 1):\n            tr_transforms.append(\n                TransposeAxesTransform(valid_axes, data_key='data', label_key='seg', p_per_sample=0.5)\n            )\n\n        tr_transforms.append(OneOfTransform([\n            MedianFilterTransform(\n                (2, 8),\n                same_for_each_channel=False,\n                p_per_sample=0.2,\n                p_per_channel=0.5\n            ),\n            GaussianBlurTransform((0.3, 1.5),\n                                  different_sigma_per_channel=True,\n                                  p_per_sample=0.2,\n                                  p_per_channel=0.5)\n        ]))\n\n        tr_transforms.append(GaussianNoiseTransform(p_per_sample=0.1))\n\n        tr_transforms.append(BrightnessTransform(0,\n                                                 0.5,\n                                                 per_channel=True,\n                                                 p_per_sample=0.1,\n                                                 p_per_channel=0.5\n                                                 )\n                             )\n\n        tr_transforms.append(OneOfTransform(\n            [\n                ContrastAugmentationTransform(\n                    contrast_range=(0.5, 2),\n                    preserve_range=True,\n                    per_channel=True,\n                    data_key='data',\n                    p_per_sample=0.2,\n                    p_per_channel=0.5\n                ),\n                ContrastAugmentationTransform(\n                    contrast_range=(0.5, 2),\n                    preserve_range=False,\n                    per_channel=True,\n                    data_key='data',\n                    p_per_sample=0.2,\n                    p_per_channel=0.5\n                ),\n            ]\n        ))\n\n        tr_transforms.append(\n            SimulateLowResolutionTransform(zoom_range=(0.25, 1),\n                                           per_channel=True,\n                                           p_per_channel=0.5,\n                                           order_downsample=0,\n                                           order_upsample=3,\n                                           p_per_sample=0.15,\n                                           ignore_axes=ignore_axes\n                                           )\n        )\n\n        tr_transforms.append(\n            GammaTransform((0.7, 1.5), invert_image=True, per_channel=True, retain_stats=True, p_per_sample=0.1))\n        tr_transforms.append(\n            GammaTransform((0.7, 1.5), invert_image=True, per_channel=True, retain_stats=True, p_per_sample=0.1))\n\n        if mirror_axes is not None and len(mirror_axes) > 0:\n            tr_transforms.append(MirrorTransform(mirror_axes))\n\n        tr_transforms.append(\n            BlankRectangleTransform([[max(1, p // 10), p // 3] for p in patch_size],\n                                    rectangle_value=np.mean,\n                                    num_rectangles=(1, 5),\n                                    force_square=False,\n                                    p_per_sample=0.4,\n                                    p_per_channel=0.5\n                                    )\n        )\n\n        tr_transforms.append(\n            BrightnessGradientAdditiveTransform(\n                _brightnessadditive_localgamma_transform_scale,\n                (-0.5, 1.5),\n                max_strength=_brightness_gradient_additive_max_strength,\n                mean_centered=False,\n                same_for_all_channels=False,\n                p_per_sample=0.3,\n                p_per_channel=0.5\n            )\n        )\n\n        tr_transforms.append(\n            LocalGammaTransform(\n                _brightnessadditive_localgamma_transform_scale,\n                (-0.5, 1.5),\n                _local_gamma_gamma,\n                same_for_all_channels=False,\n                p_per_sample=0.3,\n                p_per_channel=0.5\n            )\n        )\n\n        tr_transforms.append(\n            SharpeningTransform(\n                strength=(0.1, 1),\n                same_for_each_channel=False,\n                p_per_sample=0.2,\n                p_per_channel=0.5\n            )\n        )\n\n        if use_mask_for_norm is not None and any(use_mask_for_norm):\n            tr_transforms.append(MaskTransform([i for i in range(len(use_mask_for_norm)) if use_mask_for_norm[i]],\n                                               mask_idx_in_seg=0, set_outside_to=0))\n\n        tr_transforms.append(RemoveLabelTransform(-1, 0))\n\n        if is_cascaded:\n            if ignore_label is not None:\n                raise NotImplementedError('ignore label not yet supported in cascade')\n            assert foreground_labels is not None, 'We need all_labels for cascade augmentations'\n            use_labels = [i for i in foreground_labels if i != 0]\n            tr_transforms.append(MoveSegAsOneHotToData(1, use_labels, 'seg', 'data'))\n            tr_transforms.append(ApplyRandomBinaryOperatorTransform(\n                channel_idx=list(range(-len(use_labels), 0)),\n                p_per_sample=0.4,\n                key=\"data\",\n                strel_size=(1, 8),\n                p_per_label=1))\n            tr_transforms.append(\n                RemoveRandomConnectedComponentFromOneHotEncodingTransform(\n                    channel_idx=list(range(-len(use_labels), 0)),\n                    key=\"data\",\n                    p_per_sample=0.2,\n                    fill_with_other_class_p=0,\n                    dont_do_if_covers_more_than_x_percent=0.15))\n\n        tr_transforms.append(RenameTransform('seg', 'target', True))\n\n        if regions is not None:\n            # the ignore label must also be converted\n            tr_transforms.append(ConvertSegmentationToRegionsTransform(list(regions) + [ignore_label]\n                                                                       if ignore_label is not None else regions,\n                                                                       'target', 'target'))\n\n        if deep_supervision_scales is not None:\n            tr_transforms.append(DownsampleSegForDSTransform2(deep_supervision_scales, 0, input_key='target',\n                                                              output_key='target'))\n        tr_transforms.append(NumpyToTensor(['data', 'target'], 'float'))\n        tr_transforms = Compose(tr_transforms)\n        return tr_transforms\n\n\nclass nnUNetTrainerDA5ord0(nnUNetTrainerDA5):\n    def get_dataloaders(self):\n        \"\"\"\n        changed order_resampling_data, order_resampling_seg\n        \"\"\"\n        # we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether\n        # we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n\n        # needed for deep supervision: how much do we need to downscale the segmentation targets for the different\n        # outputs?\n        deep_supervision_scales = self._get_deep_supervision_scales()\n\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n\n        # training pipeline\n        tr_transforms = self.get_training_transforms(\n            patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,\n            order_resampling_data=0, order_resampling_seg=0,\n            use_mask_for_norm=self.configuration_manager.use_mask_for_norm,\n            is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,\n            regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,\n            ignore_label=self.label_manager.ignore_label)\n\n        # validation pipeline\n        val_transforms = self.get_validation_transforms(deep_supervision_scales,\n                                                        is_cascaded=self.is_cascaded,\n                                                        foreground_labels=self.label_manager.all_labels,\n                                                        regions=self.label_manager.foreground_regions if\n                                                        self.label_manager.has_regions else None,\n                                                        ignore_label=self.label_manager.ignore_label)\n\n        dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)\n\n        allowed_num_processes = get_allowed_n_proc_DA()\n        if allowed_num_processes == 0:\n            mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)\n            mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)\n        else:\n            mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,\n                                             allowed_num_processes, 6, None, True, 0.02)\n            mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,\n                                           max(1, allowed_num_processes // 2), 3, None, True, 0.02)\n\n        return mt_gen_train, mt_gen_val\n\n\ndef _brightnessadditive_localgamma_transform_scale(x, y):\n    return np.exp(np.random.uniform(np.log(x[y] // 6), np.log(x[y])))\n\n\ndef _brightness_gradient_additive_max_strength(_x, _y):\n    return np.random.uniform(-5, -1) if np.random.uniform() < 0.5 else np.random.uniform(1, 5)\n\n\ndef _local_gamma_gamma():\n    return np.random.uniform(0.01, 0.8) if np.random.uniform() < 0.5 else np.random.uniform(1.5, 4)\n\n\nclass nnUNetTrainerDA5Segord0(nnUNetTrainerDA5):\n    def get_dataloaders(self):\n        \"\"\"\n        changed order_resampling_data, order_resampling_seg\n        \"\"\"\n        # we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether\n        # we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n\n        # needed for deep supervision: how much do we need to downscale the segmentation targets for the different\n        # outputs?\n        deep_supervision_scales = self._get_deep_supervision_scales()\n\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n\n        # training pipeline\n        tr_transforms = self.get_training_transforms(\n            patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,\n            order_resampling_data=3, order_resampling_seg=0,\n            use_mask_for_norm=self.configuration_manager.use_mask_for_norm,\n            is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,\n            regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,\n            ignore_label=self.label_manager.ignore_label)\n\n        # validation pipeline\n        val_transforms = self.get_validation_transforms(deep_supervision_scales,\n                                                        is_cascaded=self.is_cascaded,\n                                                        foreground_labels=self.label_manager.all_labels,\n                                                        regions=self.label_manager.foreground_regions if\n                                                        self.label_manager.has_regions else None,\n                                                        ignore_label=self.label_manager.ignore_label)\n\n        dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)\n\n        allowed_num_processes = get_allowed_n_proc_DA()\n        if allowed_num_processes == 0:\n            mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)\n            mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)\n        else:\n            mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,\n                                             allowed_num_processes, 6, None, True, 0.02)\n            mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,\n                                           max(1, allowed_num_processes // 2), 3, None, True, 0.02)\n\n        return mt_gen_train, mt_gen_val\n\n\nclass nnUNetTrainerDA5_10epochs(nnUNetTrainerDA5):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 10\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDAOrd0.py",
    "content": "from batchgenerators.dataloading.single_threaded_augmenter import SingleThreadedAugmenter\n\nfrom nnunetv2.training.data_augmentation.custom_transforms.limited_length_multithreaded_augmenter import \\\n    LimitedLenWrapper\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA\n\n\nclass nnUNetTrainerDAOrd0(nnUNetTrainer):\n    def get_dataloaders(self):\n        \"\"\"\n        changed order_resampling_data, order_resampling_seg\n        \"\"\"\n        # we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether\n        # we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n\n        # needed for deep supervision: how much do we need to downscale the segmentation targets for the different\n        # outputs?\n        deep_supervision_scales = self._get_deep_supervision_scales()\n\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n\n        # training pipeline\n        tr_transforms = self.get_training_transforms(\n            patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,\n            order_resampling_data=0, order_resampling_seg=0,\n            use_mask_for_norm=self.configuration_manager.use_mask_for_norm,\n            is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,\n            regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,\n            ignore_label=self.label_manager.ignore_label)\n\n        # validation pipeline\n        val_transforms = self.get_validation_transforms(deep_supervision_scales,\n                                                        is_cascaded=self.is_cascaded,\n                                                        foreground_labels=self.label_manager.all_labels,\n                                                        regions=self.label_manager.foreground_regions if\n                                                        self.label_manager.has_regions else None,\n                                                        ignore_label=self.label_manager.ignore_label)\n\n        dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)\n\n        allowed_num_processes = get_allowed_n_proc_DA()\n        if allowed_num_processes == 0:\n            mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)\n            mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)\n        else:\n            mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,\n                                             allowed_num_processes, 6, None, True, 0.02)\n            mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,\n                                           max(1, allowed_num_processes // 2), 3, None, True, 0.02)\n\n        return mt_gen_train, mt_gen_val\n\n\nclass nnUNetTrainer_DASegOrd0(nnUNetTrainer):\n    def get_dataloaders(self):\n        \"\"\"\n        changed order_resampling_data, order_resampling_seg\n        \"\"\"\n        # we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether\n        # we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n\n        # needed for deep supervision: how much do we need to downscale the segmentation targets for the different\n        # outputs?\n        deep_supervision_scales = self._get_deep_supervision_scales()\n\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n\n        # training pipeline\n        tr_transforms = self.get_training_transforms(\n            patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,\n            order_resampling_data=3, order_resampling_seg=0,\n            use_mask_for_norm=self.configuration_manager.use_mask_for_norm,\n            is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,\n            regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,\n            ignore_label=self.label_manager.ignore_label)\n\n        # validation pipeline\n        val_transforms = self.get_validation_transforms(deep_supervision_scales,\n                                                        is_cascaded=self.is_cascaded,\n                                                        foreground_labels=self.label_manager.all_labels,\n                                                        regions=self.label_manager.foreground_regions if\n                                                        self.label_manager.has_regions else None,\n                                                        ignore_label=self.label_manager.ignore_label)\n\n        dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)\n\n        allowed_num_processes = get_allowed_n_proc_DA()\n        if allowed_num_processes == 0:\n            mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)\n            mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)\n        else:\n            mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,\n                                             allowed_num_processes, 6, None, True, 0.02)\n            mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,\n                                           max(1, allowed_num_processes // 2), 3, None, True, 0.02)\n\n        return mt_gen_train, mt_gen_val\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoDA.py",
    "content": "from typing import Union, Tuple, List\n\nfrom batchgenerators.transforms.abstract_transforms import AbstractTransform\n\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nimport numpy as np\n\n\nclass nnUNetTrainerNoDA(nnUNetTrainer):\n    @staticmethod\n    def get_training_transforms(patch_size: Union[np.ndarray, Tuple[int]],\n                                rotation_for_DA: dict,\n                                deep_supervision_scales: Union[List, Tuple, None],\n                                mirror_axes: Tuple[int, ...],\n                                do_dummy_2d_data_aug: bool,\n                                order_resampling_data: int = 1,\n                                order_resampling_seg: int = 0,\n                                border_val_seg: int = -1,\n                                use_mask_for_norm: List[bool] = None,\n                                is_cascaded: bool = False,\n                                foreground_labels: Union[Tuple[int, ...], List[int]] = None,\n                                regions: List[Union[List[int], Tuple[int, ...], int]] = None,\n                                ignore_label: int = None) -> AbstractTransform:\n        return nnUNetTrainer.get_validation_transforms(deep_supervision_scales, is_cascaded, foreground_labels,\n                                                       regions, ignore_label)\n\n    def get_plain_dataloaders(self, initial_patch_size: Tuple[int, ...], dim: int):\n        return super().get_plain_dataloaders(\n            initial_patch_size=self.configuration_manager.patch_size,\n            dim=dim\n        )\n\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        # we need to disable mirroring here so that no mirroring will be applied in inferene!\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        mirror_axes = None\n        self.inference_allowed_mirroring_axes = None\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoMirroring.py",
    "content": "from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\n\n\nclass nnUNetTrainerNoMirroring(nnUNetTrainer):\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        mirror_axes = None\n        self.inference_allowed_mirroring_axes = None\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n\nclass nnUNetTrainer_onlyMirror01(nnUNetTrainer):\n    \"\"\"\n    Only mirrors along spatial axes 0 and 1 for 3D and 0 for 2D\n    \"\"\"\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        patch_size = self.configuration_manager.patch_size\n        dim = len(patch_size)\n        if dim == 2:\n            mirror_axes = (0, )\n        else:\n            mirror_axes = (0, 1)\n        self.inference_allowed_mirroring_axes = mirror_axes\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerCELoss.py",
    "content": "import torch\nfrom nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom nnunetv2.training.loss.robust_ce_loss import RobustCrossEntropyLoss\nimport numpy as np\n\n\nclass nnUNetTrainerCELoss(nnUNetTrainer):\n    def _build_loss(self):\n        assert not self.label_manager.has_regions, \"regions not supported by this trainer\"\n        loss = RobustCrossEntropyLoss(\n            weight=None, ignore_index=self.label_manager.ignore_label if self.label_manager.has_ignore_label else -100\n        )\n\n        # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n        # this gives higher resolution outputs more weight in the loss\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n            weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n\n\nclass nnUNetTrainerCELoss_5epochs(nnUNetTrainerCELoss):\n    def __init__(\n        self,\n        plans: dict,\n        configuration: str,\n        fold: int,\n        dataset_json: dict,\n        unpack_dataset: bool = True,\n        device: torch.device = torch.device(\"cuda\"),\n    ):\n        \"\"\"used for debugging plans etc\"\"\"\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 5\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerDiceLoss.py",
    "content": "import numpy as np\nimport torch\n\nfrom nnunetv2.training.loss.compound_losses import DC_and_BCE_loss, DC_and_CE_loss\nfrom nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper\nfrom nnunetv2.training.loss.dice import MemoryEfficientSoftDiceLoss\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom nnunetv2.utilities.helpers import softmax_helper_dim1\n\n\nclass nnUNetTrainerDiceLoss(nnUNetTrainer):\n    def _build_loss(self):\n        loss = MemoryEfficientSoftDiceLoss(**{'batch_dice': self.configuration_manager.batch_dice,\n                                    'do_bg': self.label_manager.has_regions, 'smooth': 1e-5, 'ddp': self.is_ddp},\n                            apply_nonlin=torch.sigmoid if self.label_manager.has_regions else softmax_helper_dim1)\n\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n\n            # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n            # this gives higher resolution outputs more weight in the loss\n            weights = np.array([1 / (2 ** i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n\n\nclass nnUNetTrainerDiceCELoss_noSmooth(nnUNetTrainer):\n    def _build_loss(self):\n        # set smooth to 0\n        if self.label_manager.has_regions:\n            loss = DC_and_BCE_loss({},\n                                   {'batch_dice': self.configuration_manager.batch_dice,\n                                    'do_bg': True, 'smooth': 0, 'ddp': self.is_ddp},\n                                   use_ignore_label=self.label_manager.ignore_label is not None,\n                                   dice_class=MemoryEfficientSoftDiceLoss)\n        else:\n            loss = DC_and_CE_loss({'batch_dice': self.configuration_manager.batch_dice,\n                                   'smooth': 0, 'do_bg': False, 'ddp': self.is_ddp}, {}, weight_ce=1, weight_dice=1,\n                                  ignore_label=self.label_manager.ignore_label,\n                                  dice_class=MemoryEfficientSoftDiceLoss)\n\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n\n            # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n            # this gives higher resolution outputs more weight in the loss\n            weights = np.array([1 / (2 ** i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerTopkLoss.py",
    "content": "from nnunetv2.training.loss.compound_losses import DC_and_topk_loss\nfrom nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nimport numpy as np\nfrom nnunetv2.training.loss.robust_ce_loss import TopKLoss\n\n\nclass nnUNetTrainerTopk10Loss(nnUNetTrainer):\n    def _build_loss(self):\n        assert not self.label_manager.has_regions, \"regions not supported by this trainer\"\n        loss = TopKLoss(\n            ignore_index=self.label_manager.ignore_label if self.label_manager.has_ignore_label else -100, k=10\n        )\n\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n\n            # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n            # this gives higher resolution outputs more weight in the loss\n            weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n\n\nclass nnUNetTrainerTopk10LossLS01(nnUNetTrainer):\n    def _build_loss(self):\n        assert not self.label_manager.has_regions, \"regions not supported by this trainer\"\n        loss = TopKLoss(\n            ignore_index=self.label_manager.ignore_label if self.label_manager.has_ignore_label else -100,\n            k=10,\n            label_smoothing=0.1,\n        )\n\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n\n            # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n            # this gives higher resolution outputs more weight in the loss\n            weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n\n\nclass nnUNetTrainerDiceTopK10Loss(nnUNetTrainer):\n    def _build_loss(self):\n        assert not self.label_manager.has_regions, \"regions not supported by this trainer\"\n        loss = DC_and_topk_loss(\n            {\"batch_dice\": self.configuration_manager.batch_dice, \"smooth\": 1e-5, \"do_bg\": False, \"ddp\": self.is_ddp},\n            {\"k\": 10, \"label_smoothing\": 0.0},\n            weight_ce=1,\n            weight_dice=1,\n            ignore_label=self.label_manager.ignore_label,\n        )\n        if self.enable_deep_supervision:\n            deep_supervision_scales = self._get_deep_supervision_scales()\n\n            # we give each output a weight which decreases exponentially (division by 2) as the resolution decreases\n            # this gives higher resolution outputs more weight in the loss\n            weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])\n            weights[-1] = 0\n\n            # we don't use the lowest 2 outputs. Normalize weights so that they sum to 1\n            weights = weights / weights.sum()\n            # now wrap the loss\n            loss = DeepSupervisionWrapper(loss, weights)\n        return loss\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/lr_schedule/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/lr_schedule/nnUNetTrainerCosAnneal.py",
    "content": "import torch\nfrom torch.optim.lr_scheduler import CosineAnnealingLR\n\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\n\n\nclass nnUNetTrainerCosAnneal(nnUNetTrainer):\n    def configure_optimizers(self):\n        optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,\n                                    momentum=0.99, nesterov=True)\n        lr_scheduler = CosineAnnealingLR(optimizer, T_max=self.num_epochs)\n        return optimizer, lr_scheduler\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/network_architecture/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerBN.py",
    "content": "from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet, PlainConvUNet\nfrom dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_batchnorm\nfrom dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0, InitWeights_He\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom nnunetv2.utilities.plans_handling.plans_handler import ConfigurationManager, PlansManager\nfrom torch import nn\n\n\nclass nnUNetTrainerBN(nnUNetTrainer):\n    @staticmethod\n    def build_network_architecture(plans_manager: PlansManager,\n                                   dataset_json,\n                                   configuration_manager: ConfigurationManager,\n                                   num_input_channels,\n                                   enable_deep_supervision: bool = True) -> nn.Module:\n        num_stages = len(configuration_manager.conv_kernel_sizes)\n\n        dim = len(configuration_manager.conv_kernel_sizes[0])\n        conv_op = convert_dim_to_conv_op(dim)\n\n        label_manager = plans_manager.get_label_manager(dataset_json)\n\n        segmentation_network_class_name = configuration_manager.UNet_class_name\n        mapping = {\n            'PlainConvUNet': PlainConvUNet,\n            'ResidualEncoderUNet': ResidualEncoderUNet\n        }\n        kwargs = {\n            'PlainConvUNet': {\n                'conv_bias': True,\n                'norm_op': get_matching_batchnorm(conv_op),\n                'norm_op_kwargs': {'eps': 1e-5, 'affine': True},\n                'dropout_op': None, 'dropout_op_kwargs': None,\n                'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},\n            },\n            'ResidualEncoderUNet': {\n                'conv_bias': True,\n                'norm_op': get_matching_batchnorm(conv_op),\n                'norm_op_kwargs': {'eps': 1e-5, 'affine': True},\n                'dropout_op': None, 'dropout_op_kwargs': None,\n                'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},\n            }\n        }\n        assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \\\n                                                                  'is non-standard (maybe your own?). Yo\\'ll have to dive ' \\\n                                                                  'into either this ' \\\n                                                                  'function (get_network_from_plans) or ' \\\n                                                                  'the init of your nnUNetModule to accommodate that.'\n        network_class = mapping[segmentation_network_class_name]\n\n        conv_or_blocks_per_stage = {\n            'n_conv_per_stage'\n            if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,\n            'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder\n        }\n        # network class name!!\n        model = network_class(\n            input_channels=num_input_channels,\n            n_stages=num_stages,\n            features_per_stage=[min(configuration_manager.UNet_base_num_features * 2 ** i,\n                                    configuration_manager.unet_max_num_features) for i in range(num_stages)],\n            conv_op=conv_op,\n            kernel_sizes=configuration_manager.conv_kernel_sizes,\n            strides=configuration_manager.pool_op_kernel_sizes,\n            num_classes=label_manager.num_segmentation_heads,\n            deep_supervision=enable_deep_supervision,\n            **conv_or_blocks_per_stage,\n            **kwargs[segmentation_network_class_name]\n        )\n        model.apply(InitWeights_He(1e-2))\n        if network_class == ResidualEncoderUNet:\n            model.apply(init_last_bn_before_add_to_0)\n        return model\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerNoDeepSupervision.py",
    "content": "from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nimport torch\n\n\nclass nnUNetTrainerNoDeepSupervision(nnUNetTrainer):\n    def __init__(\n        self,\n        plans: dict,\n        configuration: str,\n        fold: int,\n        dataset_json: dict,\n        unpack_dataset: bool = True,\n        device: torch.device = torch.device(\"cuda\"),\n    ):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.enable_deep_supervision = False\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/optimizer/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdam.py",
    "content": "import torch\nfrom torch.optim import Adam, AdamW\n\nfrom nnunetv2.training.lr_scheduler.polylr import PolyLRScheduler\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\n\n\nclass nnUNetTrainerAdam(nnUNetTrainer):\n    def configure_optimizers(self):\n        optimizer = AdamW(self.network.parameters(),\n                          lr=self.initial_lr,\n                          weight_decay=self.weight_decay,\n                          amsgrad=True)\n        # optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,\n        #                             momentum=0.99, nesterov=True)\n        lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)\n        return optimizer, lr_scheduler\n\n\nclass nnUNetTrainerVanillaAdam(nnUNetTrainer):\n    def configure_optimizers(self):\n        optimizer = Adam(self.network.parameters(),\n                         lr=self.initial_lr,\n                         weight_decay=self.weight_decay)\n        # optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,\n        #                             momentum=0.99, nesterov=True)\n        lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)\n        return optimizer, lr_scheduler\n\n\nclass nnUNetTrainerVanillaAdam1en3(nnUNetTrainerVanillaAdam):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 1e-3\n\n\nclass nnUNetTrainerVanillaAdam3en4(nnUNetTrainerVanillaAdam):\n    # https://twitter.com/karpathy/status/801621764144971776?lang=en\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 3e-4\n\n\nclass nnUNetTrainerAdam1en3(nnUNetTrainerAdam):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 1e-3\n\n\nclass nnUNetTrainerAdam3en4(nnUNetTrainerAdam):\n    # https://twitter.com/karpathy/status/801621764144971776?lang=en\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 3e-4\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdan.py",
    "content": "import torch\n\nfrom nnunetv2.training.lr_scheduler.polylr import PolyLRScheduler\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nfrom torch.optim.lr_scheduler import CosineAnnealingLR\ntry:\n    from adan_pytorch import Adan\nexcept ImportError:\n    Adan = None\n\n\nclass nnUNetTrainerAdan(nnUNetTrainer):\n    def configure_optimizers(self):\n        if Adan is None:\n            raise RuntimeError('This trainer requires adan_pytorch to be installed, install with \"pip install adan-pytorch\"')\n        optimizer = Adan(self.network.parameters(),\n                         lr=self.initial_lr,\n                         # betas=(0.02, 0.08, 0.01), defaults\n                         weight_decay=self.weight_decay)\n        # optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,\n        #                             momentum=0.99, nesterov=True)\n        lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)\n        return optimizer, lr_scheduler\n\n\nclass nnUNetTrainerAdan1en3(nnUNetTrainerAdan):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 1e-3\n\n\nclass nnUNetTrainerAdan3en4(nnUNetTrainerAdan):\n    # https://twitter.com/karpathy/status/801621764144971776?lang=en\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 3e-4\n\n\nclass nnUNetTrainerAdan1en1(nnUNetTrainerAdan):\n    # this trainer makes no sense -> nan!\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.initial_lr = 1e-1\n\n\nclass nnUNetTrainerAdanCosAnneal(nnUNetTrainerAdan):\n    # def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n    #              device: torch.device = torch.device('cuda')):\n    #     super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n    #     self.num_epochs = 15\n\n    def configure_optimizers(self):\n        if Adan is None:\n            raise RuntimeError('This trainer requires adan_pytorch to be installed, install with \"pip install adan-pytorch\"')\n        optimizer = Adan(self.network.parameters(),\n                         lr=self.initial_lr,\n                         # betas=(0.02, 0.08, 0.01), defaults\n                         weight_decay=self.weight_decay)\n        # optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,\n        #                             momentum=0.99, nesterov=True)\n        lr_scheduler = CosineAnnealingLR(optimizer, T_max=self.num_epochs)\n        return optimizer, lr_scheduler\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/sampling/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/sampling/nnUNetTrainer_probabilisticOversampling.py",
    "content": "from typing import Tuple\n\nimport torch\n\nfrom nnunetv2.training.dataloading.data_loader_2d import nnUNetDataLoader2D\nfrom nnunetv2.training.dataloading.data_loader_3d import nnUNetDataLoader3D\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\nimport numpy as np\n\n\nclass nnUNetTrainer_probabilisticOversampling(nnUNetTrainer):\n    \"\"\"\n    sampling of foreground happens randomly and not for the last 33% of samples in a batch\n    since most trainings happen with batch size 2 and nnunet guarantees at least one fg sample, effectively this can\n    be 50%\n    Here we compute the actual oversampling percentage used by nnUNetTrainer in order to be as consistent as possible.\n    If we switch to this oversampling then we can keep it at a constant 0.33 or whatever.\n    \"\"\"\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.oversample_foreground_percent = float(np.mean(\n            [not sample_idx < round(self.configuration_manager.batch_size * (1 - self.oversample_foreground_percent))\n             for sample_idx in range(self.configuration_manager.batch_size)]))\n        self.print_to_log_file(f\"self.oversample_foreground_percent {self.oversample_foreground_percent}\")\n\n    def get_plain_dataloaders(self, initial_patch_size: Tuple[int, ...], dim: int):\n        dataset_tr, dataset_val = self.get_tr_and_val_datasets()\n\n        if dim == 2:\n            dl_tr = nnUNetDataLoader2D(dataset_tr,\n                                       self.batch_size,\n                                       initial_patch_size,\n                                       self.configuration_manager.patch_size,\n                                       self.label_manager,\n                                       oversample_foreground_percent=self.oversample_foreground_percent,\n                                       sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)\n            dl_val = nnUNetDataLoader2D(dataset_val,\n                                        self.batch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.label_manager,\n                                        oversample_foreground_percent=self.oversample_foreground_percent,\n                                        sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)\n        else:\n            dl_tr = nnUNetDataLoader3D(dataset_tr,\n                                       self.batch_size,\n                                       initial_patch_size,\n                                       self.configuration_manager.patch_size,\n                                       self.label_manager,\n                                       oversample_foreground_percent=self.oversample_foreground_percent,\n                                       sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)\n            dl_val = nnUNetDataLoader3D(dataset_val,\n                                        self.batch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.configuration_manager.patch_size,\n                                        self.label_manager,\n                                        oversample_foreground_percent=self.oversample_foreground_percent,\n                                        sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)\n        return dl_tr, dl_val\n\n\nclass nnUNetTrainer_probabilisticOversampling_033(nnUNetTrainer_probabilisticOversampling):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.oversample_foreground_percent = 0.33\n\n\nclass nnUNetTrainer_probabilisticOversampling_010(nnUNetTrainer_probabilisticOversampling):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.oversample_foreground_percent = 0.1\n\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/training_length/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs.py",
    "content": "import torch\n\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\n\n\nclass nnUNetTrainer_5epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        \"\"\"used for debugging plans etc\"\"\"\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 5\n\n\nclass nnUNetTrainer_1epoch(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        \"\"\"used for debugging plans etc\"\"\"\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 1\n\n\nclass nnUNetTrainer_10epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        \"\"\"used for debugging plans etc\"\"\"\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 10\n\n\nclass nnUNetTrainer_20epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 20\n\n\nclass nnUNetTrainer_50epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 50\n\n\nclass nnUNetTrainer_100epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 100\n\n\nclass nnUNetTrainer_250epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 250\n\n\nclass nnUNetTrainer_2000epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 2000\n\n    \nclass nnUNetTrainer_4000epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 4000\n\n\nclass nnUNetTrainer_8000epochs(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 8000\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs_NoMirroring.py",
    "content": "import torch\n\nfrom nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer\n\n\nclass nnUNetTrainer_250epochs_NoMirroring(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 250\n\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        mirror_axes = None\n        self.inference_allowed_mirroring_axes = None\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n\nclass nnUNetTrainer_2000epochs_NoMirroring(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 2000\n\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        mirror_axes = None\n        self.inference_allowed_mirroring_axes = None\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n    \nclass nnUNetTrainer_4000epochs_NoMirroring(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 4000\n\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        mirror_axes = None\n        self.inference_allowed_mirroring_axes = None\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n\nclass nnUNetTrainer_8000epochs_NoMirroring(nnUNetTrainer):\n    def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,\n                 device: torch.device = torch.device('cuda')):\n        super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)\n        self.num_epochs = 8000\n\n    def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):\n        rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \\\n            super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()\n        mirror_axes = None\n        self.inference_allowed_mirroring_axes = None\n        return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/vit.py",
    "content": "from typing import Union, Type, List, Tuple\r\n\r\nimport torch\r\nfrom dynamic_network_architectures.building_blocks.residual_encoders import ResidualEncoder\r\nfrom dynamic_network_architectures.building_blocks.residual import BasicBlockD, BottleneckD\r\nfrom torch import nn\r\nfrom torch.nn.modules.conv import _ConvNd\r\nfrom torch.nn.modules.dropout import _DropoutNd\r\n\r\nfrom dynamic_network_architectures.building_blocks.plain_conv_encoder import PlainConvEncoder\r\nfrom dynamic_network_architectures.building_blocks.unet_decoder import UNetDecoder\r\nfrom dynamic_network_architectures.building_blocks.helper import convert_conv_op_to_dim\r\n\r\nimport numpy as np\r\nfrom monai.networks.nets.swin_unetr import *\r\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\r\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\r\nfrom monai.utils import ensure_tuple_rep\r\nimport argparse\r\nimport torch.nn.functional as F\r\n\r\n\r\nclass Swin(nn.Module):\r\n    def __init__(self, input_channels: int,\r\n                 num_classes: int):\r\n\r\n        super(Swin, self).__init__()\r\n\r\n        spatial_dims = 3\r\n        feature_size = 48\r\n\r\n        patch_size = ensure_tuple_rep(2, spatial_dims)\r\n        window_size = ensure_tuple_rep(7, spatial_dims)\r\n\r\n        self.swinViT = SwinViT(\r\n            in_chans=input_channels,\r\n            embed_dim=feature_size,\r\n            window_size=window_size,\r\n            patch_size=patch_size,\r\n            depths=[2, 2, 2, 2],\r\n            num_heads=[3, 6, 12, 24],\r\n            mlp_ratio=4.0,\r\n            qkv_bias=True,\r\n            drop_rate=0.0,\r\n            attn_drop_rate=0.0,\r\n            drop_path_rate=0.0,\r\n            norm_layer=torch.nn.LayerNorm,\r\n            use_checkpoint=True,\r\n            spatial_dims=spatial_dims,\r\n            use_v2=True,\r\n        )\r\n        norm_name = 'instance'\r\n        self.encoder1 = UnetrBasicBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=input_channels,\r\n            out_channels=feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder2 = UnetrBasicBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=feature_size,\r\n            out_channels=feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder3 = UnetrBasicBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=2 * feature_size,\r\n            out_channels=2 * feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder4 = UnetrBasicBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=4 * feature_size,\r\n            out_channels=4 * feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.encoder10 = UnetrBasicBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=16 * feature_size,\r\n            out_channels=16 * feature_size,\r\n            kernel_size=3,\r\n            stride=1,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder5 = UnetrUpBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=16 * feature_size,\r\n            out_channels=8 * feature_size,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder4 = UnetrUpBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=feature_size * 8,\r\n            out_channels=feature_size * 4,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder3 = UnetrUpBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=feature_size * 4,\r\n            out_channels=feature_size * 2,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n        self.decoder2 = UnetrUpBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=feature_size * 2,\r\n            out_channels=feature_size,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n\r\n        self.decoder1 = UnetrUpBlock(\r\n            spatial_dims=spatial_dims,\r\n            in_channels=feature_size,\r\n            out_channels=feature_size,\r\n            kernel_size=3,\r\n            upsample_kernel_size=2,\r\n            norm_name=norm_name,\r\n            res_block=True,\r\n        )\r\n        self.out = UnetOutBlock(spatial_dims=spatial_dims, in_channels=feature_size, out_channels=num_classes)\r\n\r\n    def forward(self, x_in):\r\n        hidden_states_out = self.swinViT(x_in)\r\n\r\n        enc0 = self.encoder1(x_in)\r\n        enc1 = self.encoder2(hidden_states_out[0])\r\n        enc2 = self.encoder3(hidden_states_out[1])\r\n        enc3 = self.encoder4(hidden_states_out[2])\r\n        dec4 = self.encoder10(hidden_states_out[4])\r\n\r\n        dec3 = self.decoder5(dec4, hidden_states_out[3])\r\n        dec2 = self.decoder4(dec3, enc3)\r\n        dec1 = self.decoder3(dec2, enc2)\r\n        dec0 = self.decoder2(dec1, enc1)\r\n        out = self.decoder1(dec0, enc0)\r\n\r\n        return self.out(out)\r\n\r\n    def compute_conv_feature_map_size(self, input_size):\r\n        \"\"\"\r\n        IMPORTANT: input_size is the input_size of the encoder!\r\n        :param input_size:\r\n        :return:\r\n        \"\"\"\r\n        # first we need to compute the skip sizes. Skip bottleneck because all output feature maps of our ops will at\r\n        # least have the size of the skip above that (therefore -1)\r\n        skip_sizes = []\r\n        for s in range(len(self.encoder.strides) - 1):\r\n            skip_sizes.append([i // j for i, j in zip(input_size, self.encoder.strides[s])])\r\n            input_size = skip_sizes[-1]\r\n        # print(skip_sizes)\r\n\r\n        assert len(skip_sizes) == len(self.stages)\r\n\r\n        # our ops are the other way around, so let's match things up\r\n        output = np.int64(0)\r\n        for s in range(len(self.stages)):\r\n            # print(skip_sizes[-(s+1)], self.encoder.output_channels[-(s+2)])\r\n            # conv blocks\r\n            output += self.stages[s].compute_conv_feature_map_size(skip_sizes[-(s+1)])\r\n            # trans conv\r\n            output += np.prod([self.encoder.output_channels[-(s+2)], *skip_sizes[-(s+1)]], dtype=np.int64)\r\n            # segmentation\r\n            if self.deep_supervision or (s == (len(self.stages) - 1)):\r\n                output += np.prod([self.num_classes, *skip_sizes[-(s+1)]], dtype=np.int64)\r\n        return output\r\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/collate_outputs.py",
    "content": "from typing import List\n\nimport numpy as np\n\n\ndef collate_outputs(outputs: List[dict]):\n    \"\"\"\n    used to collate default train_step and validation_step outputs. If you want something different then you gotta\n    extend this\n\n    we expect outputs to be a list of dictionaries where each of the dict has the same set of keys\n    \"\"\"\n    collated = {}\n    for k in outputs[0].keys():\n        if np.isscalar(outputs[0][k]):\n            collated[k] = [o[k] for o in outputs]\n        elif isinstance(outputs[0][k], np.ndarray):\n            collated[k] = np.vstack([o[k][None] for o in outputs])\n        elif isinstance(outputs[0][k], list):\n            collated[k] = [item for o in outputs for item in o[k]]\n        else:\n            raise ValueError(f'Cannot collate input of type {type(outputs[0][k])}. '\n                             f'Modify collate_outputs to add this functionality')\n    return collated"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/dataset_name_id_conversion.py",
    "content": "#    Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nfrom typing import Union\n\nfrom nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw, nnUNet_results\nfrom batchgenerators.utilities.file_and_folder_operations import *\nimport numpy as np\n\n\ndef find_candidate_datasets(dataset_id: int):\n    startswith = \"Dataset%03.0d\" % dataset_id\n    if nnUNet_preprocessed is not None and isdir(nnUNet_preprocessed):\n        candidates_preprocessed = subdirs(nnUNet_preprocessed, prefix=startswith, join=False)\n    else:\n        candidates_preprocessed = []\n\n    if nnUNet_raw is not None and isdir(nnUNet_raw):\n        candidates_raw = subdirs(nnUNet_raw, prefix=startswith, join=False)\n    else:\n        candidates_raw = []\n\n    candidates_trained_models = []\n    if nnUNet_results is not None and isdir(nnUNet_results):\n        candidates_trained_models += subdirs(nnUNet_results, prefix=startswith, join=False)\n\n    all_candidates = candidates_preprocessed + candidates_raw + candidates_trained_models\n    unique_candidates = np.unique(all_candidates)\n    return unique_candidates\n\n\ndef convert_id_to_dataset_name(dataset_id: int):\n    unique_candidates = find_candidate_datasets(dataset_id)\n    if len(unique_candidates) > 1:\n        raise RuntimeError(\"More than one dataset name found for dataset id %d. Please correct that. (I looked in the \"\n                           \"following folders:\\n%s\\n%s\\n%s\" % (dataset_id, nnUNet_raw, nnUNet_preprocessed, nnUNet_results))\n    if len(unique_candidates) == 0:\n        raise RuntimeError(f\"Could not find a dataset with the ID {dataset_id}. Make sure the requested dataset ID \"\n                           f\"exists and that nnU-Net knows where raw and preprocessed data are located \"\n                           f\"(see Documentation - Installation). Here are your currently defined folders:\\n\"\n                           f\"nnUNet_preprocessed={os.environ.get('nnUNet_preprocessed') if os.environ.get('nnUNet_preprocessed') is not None else 'None'}\\n\"\n                           f\"nnUNet_results={os.environ.get('nnUNet_results') if os.environ.get('nnUNet_results') is not None else 'None'}\\n\"\n                           f\"nnUNet_raw={os.environ.get('nnUNet_raw') if os.environ.get('nnUNet_raw') is not None else 'None'}\\n\"\n                           f\"If something is not right, adapt your environment variables.\")\n    return unique_candidates[0]\n\n\ndef convert_dataset_name_to_id(dataset_name: str):\n    assert dataset_name.startswith(\"Dataset\")\n    dataset_id = int(dataset_name[7:10])\n    return dataset_id\n\n\ndef maybe_convert_to_dataset_name(dataset_name_or_id: Union[int, str]) -> str:\n    if isinstance(dataset_name_or_id, str) and dataset_name_or_id.startswith(\"Dataset\"):\n        return dataset_name_or_id\n    if isinstance(dataset_name_or_id, str):\n        try:\n            dataset_name_or_id = int(dataset_name_or_id)\n        except ValueError:\n            raise ValueError(\"dataset_name_or_id was a string and did not start with 'Dataset' so we tried to \"\n                             \"convert it to a dataset ID (int). That failed, however. Please give an integer number \"\n                             \"('1', '2', etc) or a correct dataset name. Your input: %s\" % dataset_name_or_id)\n    return convert_id_to_dataset_name(dataset_name_or_id)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/ddp_allgather.py",
    "content": "#    Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nfrom typing import Any, Optional, Tuple\n\nimport torch\nfrom torch import distributed\n\n\ndef print_if_rank0(*args):\n    if distributed.get_rank() == 0:\n        print(*args)\n\n\nclass AllGatherGrad(torch.autograd.Function):\n    # stolen from pytorch lightning\n    @staticmethod\n    def forward(\n        ctx: Any,\n        tensor: torch.Tensor,\n        group: Optional[\"torch.distributed.ProcessGroup\"] = None,\n    ) -> torch.Tensor:\n        ctx.group = group\n\n        gathered_tensor = [torch.zeros_like(tensor) for _ in range(torch.distributed.get_world_size())]\n\n        torch.distributed.all_gather(gathered_tensor, tensor, group=group)\n        gathered_tensor = torch.stack(gathered_tensor, dim=0)\n\n        return gathered_tensor\n\n    @staticmethod\n    def backward(ctx: Any, *grad_output: torch.Tensor) -> Tuple[torch.Tensor, None]:\n        grad_output = torch.cat(grad_output)\n\n        torch.distributed.all_reduce(grad_output, op=torch.distributed.ReduceOp.SUM, async_op=False, group=ctx.group)\n\n        return grad_output[torch.distributed.get_rank()], None\n\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/default_n_proc_DA.py",
    "content": "import subprocess\nimport os\n\n\ndef get_allowed_n_proc_DA():\n    \"\"\"\n    This function is used to set the number of processes used on different Systems. It is specific to our cluster\n    infrastructure at DKFZ. You can modify it to suit your needs. Everything is allowed.\n\n    IMPORTANT: if the environment variable nnUNet_n_proc_DA is set it will overwrite anything in this script\n    (see first line).\n\n    Interpret the output as the number of processes used for data augmentation PER GPU.\n\n    The way it is implemented here is simply a look up table. We know the hostnames, CPU and GPU configurations of our\n    systems and set the numbers accordingly. For example, a system with 4 GPUs and 48 threads can use 12 threads per\n    GPU without overloading the CPU (technically 11 because we have a main process as well), so that's what we use.\n    \"\"\"\n\n    if 'nnUNet_n_proc_DA' in os.environ.keys():\n        use_this = int(os.environ['nnUNet_n_proc_DA'])\n    else:\n        hostname = subprocess.getoutput(['hostname'])\n        if hostname in ['Fabian', ]:\n            use_this = 12\n        elif hostname in ['hdf19-gpu16', 'hdf19-gpu17', 'hdf19-gpu18', 'hdf19-gpu19', 'e230-AMDworkstation']:\n            use_this = 16\n        elif hostname.startswith('e230-dgx1'):\n            use_this = 10\n        elif hostname.startswith('hdf18-gpu') or hostname.startswith('e132-comp'):\n            use_this = 16\n        elif hostname.startswith('e230-dgx2'):\n            use_this = 6\n        elif hostname.startswith('e230-dgxa100-'):\n            use_this = 28\n        elif hostname.startswith('lsf22-gpu'):\n            use_this = 28\n        elif hostname.startswith('hdf19-gpu') or hostname.startswith('e071-gpu'):\n            use_this = 12\n        else:\n            use_this = 12  # default value\n\n    use_this = min(use_this, os.cpu_count())\n    return use_this\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/file_path_utilities.py",
    "content": "from multiprocessing import Pool\nfrom typing import Union, Tuple\nimport numpy as np\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.paths import nnUNet_results\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\n\n\ndef convert_trainer_plans_config_to_identifier(trainer_name, plans_identifier, configuration):\n    return f'{trainer_name}__{plans_identifier}__{configuration}'\n\n\ndef convert_identifier_to_trainer_plans_config(identifier: str):\n    return os.path.basename(identifier).split('__')\n\n\ndef get_output_folder(dataset_name_or_id: Union[str, int], trainer_name: str = 'nnUNetTrainer',\n                      plans_identifier: str = 'nnUNetPlans', configuration: str = '3d_fullres',\n                      fold: Union[str, int] = None) -> str:\n    tmp = join(nnUNet_results, maybe_convert_to_dataset_name(dataset_name_or_id),\n               convert_trainer_plans_config_to_identifier(trainer_name, plans_identifier, configuration))\n    if fold is not None:\n        tmp = join(tmp, f'fold_{fold}')\n    return tmp\n\n\ndef parse_dataset_trainer_plans_configuration_from_path(path: str):\n    folders = split_path(path)\n    # this here can be a little tricky because we are making assumptions. Let's hope this never fails lol\n\n    # safer to make this depend on two conditions, the fold_x and the DatasetXXX\n    # first let's see if some fold_X is present\n    fold_x_present = [i.startswith('fold_') for i in folders]\n    if any(fold_x_present):\n        idx = fold_x_present.index(True)\n        # OK now two entries before that there should be DatasetXXX\n        assert len(folders[:idx]) >= 2, 'Bad path, cannot extract what I need. Your path needs to be at least ' \\\n                                        'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'\n        if folders[idx - 2].startswith('Dataset'):\n            split = folders[idx - 1].split('__')\n            assert len(split) == 3, 'Bad path, cannot extract what I need. Your path needs to be at least ' \\\n                                        'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'\n            return folders[idx - 2], *split\n    else:\n        # we can only check for dataset followed by a string that is separable into three strings by splitting with '__'\n        # look for DatasetXXX\n        dataset_folder = [i.startswith('Dataset') for i in folders]\n        if any(dataset_folder):\n            idx = dataset_folder.index(True)\n            assert len(folders) >= (idx + 1), 'Bad path, cannot extract what I need. Your path needs to be at least ' \\\n                                        'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'\n            split = folders[idx + 1].split('__')\n            assert len(split) == 3, 'Bad path, cannot extract what I need. Your path needs to be at least ' \\\n                                       'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'\n            return folders[idx], *split\n\n\ndef get_ensemble_name(model1_folder, model2_folder, folds: Tuple[int, ...]):\n    identifier = 'ensemble___' + os.path.basename(model1_folder) + '___' + \\\n                 os.path.basename(model2_folder) + '___' + folds_tuple_to_string(folds)\n    return identifier\n\n\ndef get_ensemble_name_from_d_tr_c(dataset, tr1, p1, c1, tr2, p2, c2, folds: Tuple[int, ...]):\n    model1_folder = get_output_folder(dataset, tr1, p1, c1)\n    model2_folder = get_output_folder(dataset, tr2, p2, c2)\n\n    get_ensemble_name(model1_folder, model2_folder, folds)\n\n\ndef convert_ensemble_folder_to_model_identifiers_and_folds(ensemble_folder: str):\n    prefix, *models, folds = os.path.basename(ensemble_folder).split('___')\n    return models, folds\n\n\ndef folds_tuple_to_string(folds: Union[List[int], Tuple[int, ...]]):\n    s = str(folds[0])\n    for f in folds[1:]:\n        s += f\"_{f}\"\n    return s\n\n\ndef folds_string_to_tuple(folds_string: str):\n    folds = folds_string.split('_')\n    res = []\n    for f in folds:\n        try:\n            res.append(int(f))\n        except ValueError:\n            res.append(f)\n    return res\n\n\ndef check_workers_alive_and_busy(export_pool: Pool, worker_list: List, results_list: List, allowed_num_queued: int = 0):\n    \"\"\"\n\n    returns True if the number of results that are not ready is greater than the number of available workers + allowed_num_queued\n    \"\"\"\n    alive = [i.is_alive() for i in worker_list]\n    if not all(alive):\n        raise RuntimeError('Some background workers are no longer alive')\n\n    not_ready = [not i.ready() for i in results_list]\n    if sum(not_ready) >= (len(export_pool._pool) + allowed_num_queued):\n        return True\n    return False\n\n\nif __name__ == '__main__':\n    ### well at this point I could just write tests...\n    path = '/home/fabian/results/nnUNet_remake/Dataset002_Heart/nnUNetModule__nnUNetPlans__3d_fullres'\n    print(parse_dataset_trainer_plans_configuration_from_path(path))\n    path = 'Dataset002_Heart/nnUNetModule__nnUNetPlans__3d_fullres'\n    print(parse_dataset_trainer_plans_configuration_from_path(path))\n    path = '/home/fabian/results/nnUNet_remake/Dataset002_Heart/nnUNetModule__nnUNetPlans__3d_fullres/fold_all'\n    print(parse_dataset_trainer_plans_configuration_from_path(path))\n    try:\n        path = '/home/fabian/results/nnUNet_remake/Dataset002_Heart/'\n        print(parse_dataset_trainer_plans_configuration_from_path(path))\n    except AssertionError:\n        print('yayy, assertion works')\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/find_class_by_name.py",
    "content": "import importlib\nimport pkgutil\n\nfrom batchgenerators.utilities.file_and_folder_operations import *\n\n\ndef recursive_find_python_class(folder: str, class_name: str, current_module: str):\n    tr = None\n    for importer, modname, ispkg in pkgutil.iter_modules([folder]):\n        # print(modname, ispkg)\n        if not ispkg:\n            m = importlib.import_module(current_module + \".\" + modname)\n            if hasattr(m, class_name):\n                tr = getattr(m, class_name)\n                break\n\n    if tr is None:\n        for importer, modname, ispkg in pkgutil.iter_modules([folder]):\n            if ispkg:\n                next_current_module = current_module + \".\" + modname\n                tr = recursive_find_python_class(join(folder, modname), class_name, current_module=next_current_module)\n            if tr is not None:\n                break\n    return tr"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/get_network_from_plans.py",
    "content": "from dynamic_network_architectures.architectures.unet import PlainConvUNet, ResidualEncoderUNet\nfrom dynamic_network_architectures.building_blocks.helper import get_matching_instancenorm, convert_dim_to_conv_op\nfrom dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0\nfrom nnunetv2.utilities.network_initialization import InitWeights_He\nfrom nnunetv2.utilities.plans_handling.plans_handler import ConfigurationManager, PlansManager\nfrom torch import nn\n\n\ndef get_network_from_plans(plans_manager: PlansManager,\n                           dataset_json: dict,\n                           configuration_manager: ConfigurationManager,\n                           num_input_channels: int,\n                           deep_supervision: bool = True):\n    \"\"\"\n    we may have to change this in the future to accommodate other plans -> network mappings\n\n    num_input_channels can differ depending on whether we do cascade. Its best to make this info available in the\n    trainer rather than inferring it again from the plans here.\n    \"\"\"\n    num_stages = len(configuration_manager.conv_kernel_sizes)\n\n    dim = len(configuration_manager.conv_kernel_sizes[0])\n    conv_op = convert_dim_to_conv_op(dim)\n\n    label_manager = plans_manager.get_label_manager(dataset_json)\n\n    segmentation_network_class_name = configuration_manager.UNet_class_name\n    mapping = {\n        'PlainConvUNet': PlainConvUNet,\n        'ResidualEncoderUNet': ResidualEncoderUNet\n    }\n    kwargs = {\n        'PlainConvUNet': {\n            'conv_bias': True,\n            'norm_op': get_matching_instancenorm(conv_op),\n            'norm_op_kwargs': {'eps': 1e-5, 'affine': True},\n            'dropout_op': None, 'dropout_op_kwargs': None,\n            'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},\n        },\n        'ResidualEncoderUNet': {\n            'conv_bias': True,\n            'norm_op': get_matching_instancenorm(conv_op),\n            'norm_op_kwargs': {'eps': 1e-5, 'affine': True},\n            'dropout_op': None, 'dropout_op_kwargs': None,\n            'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},\n        }\n    }\n    assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \\\n                                                              'is non-standard (maybe your own?). Yo\\'ll have to dive ' \\\n                                                              'into either this ' \\\n                                                              'function (get_network_from_plans) or ' \\\n                                                              'the init of your nnUNetModule to accommodate that.'\n    network_class = mapping[segmentation_network_class_name]\n\n    conv_or_blocks_per_stage = {\n        'n_conv_per_stage'\n        if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,\n        'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder\n    }\n    # network class name!!\n    model = network_class(\n        input_channels=num_input_channels,\n        n_stages=num_stages,\n        features_per_stage=[min(configuration_manager.UNet_base_num_features * 2 ** i,\n                                configuration_manager.unet_max_num_features) for i in range(num_stages)],\n        conv_op=conv_op,\n        kernel_sizes=configuration_manager.conv_kernel_sizes,\n        strides=configuration_manager.pool_op_kernel_sizes,\n        num_classes=label_manager.num_segmentation_heads,\n        deep_supervision=deep_supervision,\n        **conv_or_blocks_per_stage,\n        **kwargs[segmentation_network_class_name]\n    )\n    model.apply(InitWeights_He(1e-2))\n    if network_class == ResidualEncoderUNet:\n        model.apply(init_last_bn_before_add_to_0)\n    return model\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/helpers.py",
    "content": "import torch\n\n\ndef softmax_helper_dim0(x: torch.Tensor) -> torch.Tensor:\n    return torch.softmax(x, 0)\n\n\ndef softmax_helper_dim1(x: torch.Tensor) -> torch.Tensor:\n    return torch.softmax(x, 1)\n\n\ndef empty_cache(device: torch.device):\n    if device.type == 'cuda':\n        torch.cuda.empty_cache()\n    elif device.type == 'mps':\n        from torch import mps\n        mps.empty_cache()\n    else:\n        pass\n\n\nclass dummy_context(object):\n    def __enter__(self):\n        pass\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        pass\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/json_export.py",
    "content": "from collections.abc import Iterable\n\nimport numpy as np\nimport torch\n\n\ndef recursive_fix_for_json_export(my_dict: dict):\n    # json is stupid. 'cannot serialize object of type bool_/int64/float64'. Come on bro.\n    keys = list(my_dict.keys())  # cannot iterate over keys() if we change keys....\n    for k in keys:\n        if isinstance(k, (np.int64, np.int32, np.int8, np.uint8)):\n            tmp = my_dict[k]\n            del my_dict[k]\n            my_dict[int(k)] = tmp\n            del tmp\n            k = int(k)\n\n        if isinstance(my_dict[k], dict):\n            recursive_fix_for_json_export(my_dict[k])\n        elif isinstance(my_dict[k], np.ndarray):\n            assert my_dict[k].ndim == 1, 'only 1d arrays are supported'\n            my_dict[k] = fix_types_iterable(my_dict[k], output_type=list)\n        elif isinstance(my_dict[k], (np.bool_,)):\n            my_dict[k] = bool(my_dict[k])\n        elif isinstance(my_dict[k], (np.int64, np.int32, np.int8, np.uint8)):\n            my_dict[k] = int(my_dict[k])\n        elif isinstance(my_dict[k], (np.float32, np.float64, np.float16)):\n            my_dict[k] = float(my_dict[k])\n        elif isinstance(my_dict[k], list):\n            my_dict[k] = fix_types_iterable(my_dict[k], output_type=type(my_dict[k]))\n        elif isinstance(my_dict[k], tuple):\n            my_dict[k] = fix_types_iterable(my_dict[k], output_type=tuple)\n        elif isinstance(my_dict[k], torch.device):\n            my_dict[k] = str(my_dict[k])\n        else:\n            pass  # pray it can be serialized\n\n\ndef fix_types_iterable(iterable, output_type):\n    # this sh!t is hacky as hell and will break if you use it for anything outside nnunet. Keep you hands off of this.\n    out = []\n    for i in iterable:\n        if type(i) in (np.int64, np.int32, np.int8, np.uint8):\n            out.append(int(i))\n        elif isinstance(i, dict):\n            recursive_fix_for_json_export(i)\n            out.append(i)\n        elif type(i) in (np.float32, np.float64, np.float16):\n            out.append(float(i))\n        elif type(i) in (np.bool_,):\n            out.append(bool(i))\n        elif isinstance(i, str):\n            out.append(i)\n        elif isinstance(i, Iterable):\n            # print('recursive call on', i, type(i))\n            out.append(fix_types_iterable(i, type(i)))\n        else:\n            out.append(i)\n    return output_type(out)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/label_handling/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/label_handling/label_handling.py",
    "content": "from __future__ import annotations\nfrom time import time\nfrom typing import Union, List, Tuple, Type\n\nimport numpy as np\nimport torch\nfrom acvl_utils.cropping_and_padding.bounding_boxes import bounding_box_to_slice\nfrom batchgenerators.utilities.file_and_folder_operations import join\n\nimport nnunetv2\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\nfrom nnunetv2.utilities.helpers import softmax_helper_dim0\n\nfrom typing import TYPE_CHECKING\n\n# see https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/\nif TYPE_CHECKING:\n    from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager\n\n\nclass LabelManager(object):\n    def __init__(self, label_dict: dict, regions_class_order: Union[List[int], None], force_use_labels: bool = False,\n                 inference_nonlin=None):\n        self._sanity_check(label_dict)\n        self.label_dict = label_dict\n        self.regions_class_order = regions_class_order\n        self._force_use_labels = force_use_labels\n\n        if force_use_labels:\n            self._has_regions = False\n        else:\n            self._has_regions: bool = any(\n                [isinstance(i, (tuple, list)) and len(i) > 1 for i in self.label_dict.values()])\n\n        self._ignore_label: Union[None, int] = self._determine_ignore_label()\n        self._all_labels: List[int] = self._get_all_labels()\n\n        self._regions: Union[None, List[Union[int, Tuple[int, ...]]]] = self._get_regions()\n\n        if self.has_ignore_label:\n            assert self.ignore_label == max(\n                self.all_labels) + 1, 'If you use the ignore label it must have the highest ' \\\n                                      'label value! It cannot be 0 or in between other labels. ' \\\n                                      'Sorry bro.'\n\n        if inference_nonlin is None:\n            self.inference_nonlin = torch.sigmoid if self.has_regions else softmax_helper_dim0\n        else:\n            self.inference_nonlin = inference_nonlin\n\n    def _sanity_check(self, label_dict: dict):\n        if not 'background' in label_dict.keys():\n            raise RuntimeError('Background label not declared (remember that this should be label 0!)')\n        bg_label = label_dict['background']\n        if isinstance(bg_label, (tuple, list)):\n            raise RuntimeError(f\"Background label must be 0. Not a list. Not a tuple. Your background label: {bg_label}\")\n        assert int(bg_label) == 0, f\"Background label must be 0. Your background label: {bg_label}\"\n        # not sure if we want to allow regions that contain background. I don't immediately see how this could cause\n        # problems so we allow it for now. That doesn't mean that this is explicitly supported. It could be that this\n        # just crashes.\n\n    def _get_all_labels(self) -> List[int]:\n        all_labels = []\n        for k, r in self.label_dict.items():\n            # ignore label is not going to be used, hence the name. Duh.\n            if k == 'ignore':\n                continue\n            if isinstance(r, (tuple, list)):\n                for ri in r:\n                    all_labels.append(int(ri))\n            else:\n                all_labels.append(int(r))\n        all_labels = list(np.unique(all_labels))\n        all_labels.sort()\n        return all_labels\n\n    def _get_regions(self) -> Union[None, List[Union[int, Tuple[int, ...]]]]:\n        if not self._has_regions or self._force_use_labels:\n            return None\n        else:\n            assert self.regions_class_order is not None, 'if region-based training is requested then you need to ' \\\n                                                         'define regions_class_order!'\n            regions = []\n            for k, r in self.label_dict.items():\n                # ignore ignore label\n                if k == 'ignore':\n                    continue\n                # ignore regions that are background\n                if (np.isscalar(r) and r == 0) \\\n                        or \\\n                        (isinstance(r, (tuple, list)) and len(np.unique(r)) == 1 and np.unique(r)[0] == 0):\n                    continue\n                if isinstance(r, list):\n                    r = tuple(r)\n                regions.append(r)\n            assert len(self.regions_class_order) == len(regions), 'regions_class_order must have as ' \\\n                                                                  'many entries as there are ' \\\n                                                                  'regions'\n            return regions\n\n    def _determine_ignore_label(self) -> Union[None, int]:\n        ignore_label = self.label_dict.get('ignore')\n        if ignore_label is not None:\n            assert isinstance(ignore_label, int), f'Ignore label has to be an integer. It cannot be a region ' \\\n                                                  f'(list/tuple). Got {type(ignore_label)}.'\n        return ignore_label\n\n    @property\n    def has_regions(self) -> bool:\n        return self._has_regions\n\n    @property\n    def has_ignore_label(self) -> bool:\n        return self.ignore_label is not None\n\n    @property\n    def all_regions(self) -> Union[None, List[Union[int, Tuple[int, ...]]]]:\n        return self._regions\n\n    @property\n    def all_labels(self) -> List[int]:\n        return self._all_labels\n\n    @property\n    def ignore_label(self) -> Union[None, int]:\n        return self._ignore_label\n\n    def apply_inference_nonlin(self, logits: Union[np.ndarray, torch.Tensor]) -> \\\n            Union[np.ndarray, torch.Tensor]:\n        \"\"\"\n        logits has to have shape (c, x, y(, z)) where c is the number of classes/regions\n        \"\"\"\n        if isinstance(logits, np.ndarray):\n            logits = torch.from_numpy(logits)\n\n        with torch.no_grad():\n            # softmax etc is not implemented for half\n            logits = logits.float()\n            probabilities = self.inference_nonlin(logits)\n\n        return probabilities\n\n    def convert_probabilities_to_segmentation(self, predicted_probabilities: Union[np.ndarray, torch.Tensor]) -> \\\n            Union[np.ndarray, torch.Tensor]:\n        \"\"\"\n        assumes that inference_nonlinearity was already applied!\n\n        predicted_probabilities has to have shape (c, x, y(, z)) where c is the number of classes/regions\n        \"\"\"\n        if not isinstance(predicted_probabilities, (np.ndarray, torch.Tensor)):\n            raise RuntimeError(f\"Unexpected input type. Expected np.ndarray or torch.Tensor,\"\n                               f\" got {type(predicted_probabilities)}\")\n\n        if self.has_regions:\n            assert self.regions_class_order is not None, 'if region-based training is requested then you need to ' \\\n                                                         'define regions_class_order!'\n            # check correct number of outputs\n        assert predicted_probabilities.shape[0] == self.num_segmentation_heads, \\\n            f'unexpected number of channels in predicted_probabilities. Expected {self.num_segmentation_heads}, ' \\\n            f'got {predicted_probabilities.shape[0]}. Remember that predicted_probabilities should have shape ' \\\n            f'(c, x, y(, z)).'\n\n        if self.has_regions:\n            if isinstance(predicted_probabilities, np.ndarray):\n                segmentation = np.zeros(predicted_probabilities.shape[1:], dtype=np.uint16)\n            else:\n                # no uint16 in torch\n                segmentation = torch.zeros(predicted_probabilities.shape[1:], dtype=torch.int16,\n                                           device=predicted_probabilities.device)\n            for i, c in enumerate(self.regions_class_order):\n                segmentation[predicted_probabilities[i] > 0.5] = c\n        else:\n            segmentation = predicted_probabilities.argmax(0)\n\n        return segmentation\n\n    def convert_logits_to_segmentation(self, predicted_logits: Union[np.ndarray, torch.Tensor]) -> \\\n            Union[np.ndarray, torch.Tensor]:\n        input_is_numpy = isinstance(predicted_logits, np.ndarray)\n        probabilities = self.apply_inference_nonlin(predicted_logits)\n        if input_is_numpy and isinstance(probabilities, torch.Tensor):\n            probabilities = probabilities.cpu().numpy()\n        return self.convert_probabilities_to_segmentation(probabilities)\n\n    def revert_cropping_on_probabilities(self, predicted_probabilities: Union[torch.Tensor, np.ndarray],\n                                         bbox: List[List[int]],\n                                         original_shape: Union[List[int], Tuple[int, ...]]):\n        \"\"\"\n        ONLY USE THIS WITH PROBABILITIES, DO NOT USE LOGITS AND DO NOT USE FOR SEGMENTATION MAPS!!!\n\n        predicted_probabilities must be (c, x, y(, z))\n\n        Why do we do this here? Well if we pad probabilities we need to make sure that convert_logits_to_segmentation\n        correctly returns background in the padded areas. Also we want to ba able to look at the padded probabilities\n        and not have strange artifacts.\n        Only LabelManager knows how this needs to be done. So let's let him/her do it, ok?\n        \"\"\"\n        # revert cropping\n        probs_reverted_cropping = np.zeros((predicted_probabilities.shape[0], *original_shape),\n                                           dtype=predicted_probabilities.dtype) \\\n            if isinstance(predicted_probabilities, np.ndarray) else \\\n            torch.zeros((predicted_probabilities.shape[0], *original_shape), dtype=predicted_probabilities.dtype)\n\n        if not self.has_regions:\n            probs_reverted_cropping[0] = 1\n\n        slicer = bounding_box_to_slice(bbox)\n        probs_reverted_cropping[tuple([slice(None)] + list(slicer))] = predicted_probabilities\n        return probs_reverted_cropping\n\n    @staticmethod\n    def filter_background(classes_or_regions: Union[List[int], List[Union[int, Tuple[int, ...]]]]):\n        # heck yeah\n        # This is definitely taking list comprehension too far. Enjoy.\n        return [i for i in classes_or_regions if\n                ((not isinstance(i, (tuple, list))) and i != 0)\n                or\n                (isinstance(i, (tuple, list)) and not (\n                        len(np.unique(i)) == 1 and np.unique(i)[0] == 0))]\n\n    @property\n    def foreground_regions(self):\n        return self.filter_background(self.all_regions)\n\n    @property\n    def foreground_labels(self):\n        return self.filter_background(self.all_labels)\n\n    @property\n    def num_segmentation_heads(self):\n        if self.has_regions:\n            return len(self.foreground_regions)\n        else:\n            return len(self.all_labels)\n\n\ndef get_labelmanager_class_from_plans(plans: dict) -> Type[LabelManager]:\n    if 'label_manager' not in plans.keys():\n        print('No label manager specified in plans. Using default: LabelManager')\n        return LabelManager\n    else:\n        labelmanager_class = recursive_find_python_class(join(nnunetv2.__path__[0], \"utilities\", \"label_handling\"),\n                                                         plans['label_manager'],\n                                                         current_module=\"nnunetv2.utilities.label_handling\")\n        return labelmanager_class\n\n\ndef convert_labelmap_to_one_hot(segmentation: Union[np.ndarray, torch.Tensor],\n                                all_labels: Union[List, torch.Tensor, np.ndarray, tuple],\n                                output_dtype=None) -> Union[np.ndarray, torch.Tensor]:\n    \"\"\"\n    if output_dtype is None then we use np.uint8/torch.uint8\n    if input is torch.Tensor then output will be on the same device\n\n    np.ndarray is faster than torch.Tensor\n\n    if segmentation is torch.Tensor, this function will be faster if it is LongTensor. If it is somethine else we have\n    to cast which takes time.\n\n    IMPORTANT: This function only works properly if your labels are consecutive integers, so something like 0, 1, 2, 3, ...\n    DO NOT use it with 0, 32, 123, 255, ... or whatever (fix your labels, yo)\n    \"\"\"\n    if isinstance(segmentation, torch.Tensor):\n        result = torch.zeros((len(all_labels), *segmentation.shape),\n                             dtype=output_dtype if output_dtype is not None else torch.uint8,\n                             device=segmentation.device)\n        # variant 1, 2x faster than 2\n        result.scatter_(0, segmentation[None].long(), 1)  # why does this have to be long!?\n        # variant 2, slower than 1\n        # for i, l in enumerate(all_labels):\n        #     result[i] = segmentation == l\n    else:\n        result = np.zeros((len(all_labels), *segmentation.shape),\n                          dtype=output_dtype if output_dtype is not None else np.uint8)\n        # variant 1, fastest in my testing\n        for i, l in enumerate(all_labels):\n            result[i] = segmentation == l\n        # variant 2. Takes about twice as long so nah\n        # result = np.eye(len(all_labels))[segmentation].transpose((3, 0, 1, 2))\n    return result\n\n\ndef determine_num_input_channels(plans_manager: PlansManager,\n                                 configuration_or_config_manager: Union[str, ConfigurationManager],\n                                 dataset_json: dict) -> int:\n    if isinstance(configuration_or_config_manager, str):\n        config_manager = plans_manager.get_configuration(configuration_or_config_manager)\n    else:\n        config_manager = configuration_or_config_manager\n\n    label_manager = plans_manager.get_label_manager(dataset_json)\n    num_modalities = len(dataset_json['modality']) if 'modality' in dataset_json.keys() else len(dataset_json['channel_names'])\n\n    # cascade has different number of input channels\n    if config_manager.previous_stage_name is not None:\n        num_label_inputs = len(label_manager.foreground_labels)\n        num_input_channels = num_modalities + num_label_inputs\n    else:\n        num_input_channels = num_modalities\n    return num_input_channels\n\n\nif __name__ == '__main__':\n    # this code used to be able to differentiate variant 1 and 2 to measure time.\n    num_labels = 7\n    seg = np.random.randint(0, num_labels, size=(256, 256, 256), dtype=np.uint8)\n    seg_torch = torch.from_numpy(seg)\n    st = time()\n    onehot_npy = convert_labelmap_to_one_hot(seg, np.arange(num_labels))\n    time_1 = time()\n    onehot_npy2 = convert_labelmap_to_one_hot(seg, np.arange(num_labels))\n    time_2 = time()\n    onehot_torch = convert_labelmap_to_one_hot(seg_torch, np.arange(num_labels))\n    time_torch = time()\n    onehot_torch2 = convert_labelmap_to_one_hot(seg_torch, np.arange(num_labels))\n    time_torch2 = time()\n    print(\n        f'np: {time_1 - st}, np2: {time_2 - time_1}, torch: {time_torch - time_2}, torch2: {time_torch2 - time_torch}')\n    onehot_torch = onehot_torch.numpy()\n    onehot_torch2 = onehot_torch2.numpy()\n    print(np.all(onehot_torch == onehot_npy))\n    print(np.all(onehot_torch2 == onehot_npy))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/network_initialization.py",
    "content": "from torch import nn\n\n\nclass InitWeights_He(object):\n    def __init__(self, neg_slope=1e-2):\n        self.neg_slope = neg_slope\n\n    def __call__(self, module):\n        if isinstance(module, nn.Conv3d) or isinstance(module, nn.Conv2d) or isinstance(module, nn.ConvTranspose2d) or isinstance(module, nn.ConvTranspose3d):\n            module.weight = nn.init.kaiming_normal_(module.weight, a=self.neg_slope)\n            if module.bias is not None:\n                module.bias = nn.init.constant_(module.bias, 0)\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/overlay_plots.py",
    "content": "#    Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nimport multiprocessing\nfrom multiprocessing.pool import Pool\nfrom typing import Tuple, Union\n\nimport numpy as np\nimport pandas as pd\nfrom batchgenerators.utilities.file_and_folder_operations import *\nfrom nnunetv2.configuration import default_num_processes\nfrom nnunetv2.imageio.base_reader_writer import BaseReaderWriter\nfrom nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json\nfrom nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed\nfrom nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\nfrom nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \\\n    get_filenames_of_train_images_and_targets\n\ncolor_cycle = (\n    \"000000\",\n    \"4363d8\",\n    \"f58231\",\n    \"3cb44b\",\n    \"e6194B\",\n    \"911eb4\",\n    \"ffe119\",\n    \"bfef45\",\n    \"42d4f4\",\n    \"f032e6\",\n    \"000075\",\n    \"9A6324\",\n    \"808000\",\n    \"800000\",\n    \"469990\",\n)\n\n\ndef hex_to_rgb(hex: str):\n    assert len(hex) == 6\n    return tuple(int(hex[i:i + 2], 16) for i in (0, 2, 4))\n\n\ndef generate_overlay(input_image: np.ndarray, segmentation: np.ndarray, mapping: dict = None,\n                     color_cycle: Tuple[str, ...] = color_cycle,\n                     overlay_intensity: float = 0.6):\n    \"\"\"\n    image can be 2d greyscale or 2d RGB (color channel in last dimension!)\n\n    Segmentation must be label map of same shape as image (w/o color channels)\n\n    mapping can be label_id -> idx_in_cycle or None\n\n    returned image is scaled to [0, 255] (uint8)!!!\n    \"\"\"\n    # create a copy of image\n    image = np.copy(input_image)\n\n    if image.ndim == 2:\n        image = np.tile(image[:, :, None], (1, 1, 3))\n    elif image.ndim == 3:\n        if image.shape[2] == 1:\n            image = np.tile(image, (1, 1, 3))\n        else:\n            raise RuntimeError(f'if 3d image is given the last dimension must be the color channels (3 channels). '\n                               f'Only 2D images are supported. Your image shape: {image.shape}')\n    else:\n        raise RuntimeError(\"unexpected image shape. only 2D images and 2D images with color channels (color in \"\n                           \"last dimension) are supported\")\n\n    # rescale image to [0, 255]\n    image = image - image.min()\n    image = image / image.max() * 255\n\n    # create output\n    if mapping is None:\n        uniques = np.sort(pd.unique(segmentation.ravel()))  # np.unique(segmentation)\n        mapping = {i: c for c, i in enumerate(uniques)}\n\n    for l in mapping.keys():\n        image[segmentation == l] += overlay_intensity * np.array(hex_to_rgb(color_cycle[mapping[l]]))\n\n    # rescale result to [0, 255]\n    image = image / image.max() * 255\n    return image.astype(np.uint8)\n\n\ndef select_slice_to_plot(image: np.ndarray, segmentation: np.ndarray) -> int:\n    \"\"\"\n    image and segmentation are expected to be 3D\n\n    selects the slice with the largest amount of fg (regardless of label)\n\n    we give image so that we can easily replace this function if needed\n    \"\"\"\n    fg_mask = segmentation != 0\n    fg_per_slice = fg_mask.sum((1, 2))\n    selected_slice = int(np.argmax(fg_per_slice))\n    return selected_slice\n\n\ndef select_slice_to_plot2(image: np.ndarray, segmentation: np.ndarray) -> int:\n    \"\"\"\n    image and segmentation are expected to be 3D (or 1, x, y)\n\n    selects the slice with the largest amount of fg (how much percent of each class are in each slice? pick slice\n    with highest avg percent)\n\n    we give image so that we can easily replace this function if needed\n    \"\"\"\n    classes = [i for i in np.sort(pd.unique(segmentation.ravel())) if i != 0]\n    fg_per_slice = np.zeros((image.shape[0], len(classes)))\n    for i, c in enumerate(classes):\n        fg_mask = segmentation == c\n        fg_per_slice[:, i] = fg_mask.sum((1, 2))\n        fg_per_slice[:, i] /= fg_per_slice.sum()\n    fg_per_slice = fg_per_slice.mean(1)\n    return int(np.argmax(fg_per_slice))\n\n\ndef plot_overlay(image_file: str, segmentation_file: str, image_reader_writer: BaseReaderWriter, output_file: str,\n                 overlay_intensity: float = 0.6):\n    import matplotlib.pyplot as plt\n\n    image, props = image_reader_writer.read_images((image_file, ))\n    image = image[0]\n    seg, props_seg = image_reader_writer.read_seg(segmentation_file)\n    seg = seg[0]\n\n    assert image.shape == seg.shape, \"image and seg do not have the same shape: %s, %s\" % (\n        image_file, segmentation_file)\n\n    assert image.ndim == 3, 'only 3D images/segs are supported'\n\n    selected_slice = select_slice_to_plot2(image, seg)\n    # print(image.shape, selected_slice)\n\n    overlay = generate_overlay(image[selected_slice], seg[selected_slice], overlay_intensity=overlay_intensity)\n\n    plt.imsave(output_file, overlay)\n\n\ndef plot_overlay_preprocessed(case_file: str, output_file: str, overlay_intensity: float = 0.6, channel_idx=0):\n    import matplotlib.pyplot as plt\n    data = np.load(case_file)['data']\n    seg = np.load(case_file)['seg'][0]\n\n    assert channel_idx < (data.shape[0]), 'This dataset only supports channel index up to %d' % (data.shape[0] - 1)\n\n    image = data[channel_idx]\n    seg[seg < 0] = 0\n\n    selected_slice = select_slice_to_plot2(image, seg)\n\n    overlay = generate_overlay(image[selected_slice], seg[selected_slice], overlay_intensity=overlay_intensity)\n\n    plt.imsave(output_file, overlay)\n\n\ndef multiprocessing_plot_overlay(list_of_image_files, list_of_seg_files, image_reader_writer,\n                                 list_of_output_files, overlay_intensity,\n                                 num_processes=8):\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        r = p.starmap_async(plot_overlay, zip(\n            list_of_image_files, list_of_seg_files, [image_reader_writer] * len(list_of_output_files),\n            list_of_output_files, [overlay_intensity] * len(list_of_output_files)\n        ))\n        r.get()\n\n\ndef multiprocessing_plot_overlay_preprocessed(list_of_case_files, list_of_output_files, overlay_intensity,\n                                              num_processes=8, channel_idx=0):\n    with multiprocessing.get_context(\"spawn\").Pool(num_processes) as p:\n        r = p.starmap_async(plot_overlay_preprocessed, zip(\n            list_of_case_files, list_of_output_files, [overlay_intensity] * len(list_of_output_files),\n                                                      [channel_idx] * len(list_of_output_files)\n        ))\n        r.get()\n\n\ndef generate_overlays_from_raw(dataset_name_or_id: Union[int, str], output_folder: str,\n                               num_processes: int = 8, channel_idx: int = 0, overlay_intensity: float = 0.6):\n    dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n    folder = join(nnUNet_raw, dataset_name)\n    dataset_json = load_json(join(folder, 'dataset.json'))\n    dataset = get_filenames_of_train_images_and_targets(folder, dataset_json)\n\n    image_files = [v['images'][channel_idx] for v in dataset.values()]\n    seg_files = [v['label'] for v in dataset.values()]\n\n    assert all([isfile(i) for i in image_files])\n    assert all([isfile(i) for i in seg_files])\n\n    maybe_mkdir_p(output_folder)\n    output_files = [join(output_folder, i + '.png') for i in dataset.keys()]\n\n    image_reader_writer = determine_reader_writer_from_dataset_json(dataset_json, image_files[0])()\n    multiprocessing_plot_overlay(image_files, seg_files, image_reader_writer, output_files, overlay_intensity, num_processes)\n\n\ndef generate_overlays_from_preprocessed(dataset_name_or_id: Union[int, str], output_folder: str,\n                                        num_processes: int = 8, channel_idx: int = 0,\n                                        configuration: str = None,\n                                        plans_identifier: str = 'nnUNetPlans',\n                                        overlay_intensity: float = 0.6):\n    dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)\n    folder = join(nnUNet_preprocessed, dataset_name)\n    if not isdir(folder): raise RuntimeError(\"run preprocessing for that task first\")\n\n    plans = load_json(join(folder, plans_identifier + '.json'))\n    if configuration is None:\n        if '3d_fullres' in plans['configurations'].keys():\n            configuration = '3d_fullres'\n        else:\n            configuration = '2d'\n    data_identifier = plans['configurations'][configuration][\"data_identifier\"]\n    preprocessed_folder = join(folder, data_identifier)\n\n    if not isdir(preprocessed_folder):\n        raise RuntimeError(f\"Preprocessed data folder for configuration {configuration} of plans identifier \"\n                           f\"{plans_identifier} ({dataset_name}) does not exist. Run preprocessing for this \"\n                           f\"configuration first!\")\n\n    identifiers = [i[:-4] for i in subfiles(preprocessed_folder, suffix='.npz', join=False)]\n\n    output_files = [join(output_folder, i + '.png') for i in identifiers]\n    image_files = [join(preprocessed_folder, i + \".npz\") for i in identifiers]\n\n    maybe_mkdir_p(output_folder)\n    multiprocessing_plot_overlay_preprocessed(image_files, output_files, overlay_intensity=overlay_intensity,\n                                              num_processes=num_processes, channel_idx=channel_idx)\n\n\ndef entry_point_generate_overlay():\n    import argparse\n    parser = argparse.ArgumentParser(\"Plots png overlays of the slice with the most foreground. Note that this \"\n                                     \"disregards spacing information!\")\n    parser.add_argument('-d', type=str, help=\"Dataset name or id\", required=True)\n    parser.add_argument('-o', type=str, help=\"output folder\", required=True)\n    parser.add_argument('-np', type=int, default=default_num_processes, required=False,\n                        help=f\"number of processes used. Default: {default_num_processes}\")\n    parser.add_argument('-channel_idx', type=int, default=0, required=False,\n                        help=\"channel index used (0 = _0000). Default: 0\")\n    parser.add_argument('--use_raw', action='store_true', required=False, help=\"if set then we use raw data. else \"\n                                                                               \"we use preprocessed\")\n    parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',\n                        help='plans identifier. Only used if --use_raw is not set! Default: nnUNetPlans')\n    parser.add_argument('-c', type=str, required=False, default=None,\n                        help='configuration name. Only used if --use_raw is not set! Default: None = '\n                             '3d_fullres if available, else 2d')\n    parser.add_argument('-overlay_intensity', type=float, required=False, default=0.6,\n                        help='overlay intensity. Higher = brighter/less transparent')\n\n\n    args = parser.parse_args()\n\n    if args.use_raw:\n        generate_overlays_from_raw(args.d, args.o, args.np, args.channel_idx,\n                                   overlay_intensity=args.overlay_intensity)\n    else:\n        generate_overlays_from_preprocessed(args.d, args.o, args.np, args.channel_idx, args.c, args.p,\n                                            overlay_intensity=args.overlay_intensity)\n\n\nif __name__ == '__main__':\n    entry_point_generate_overlay()"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/plans_handling/__init__.py",
    "content": ""
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/plans_handling/plans_handler.py",
    "content": "from __future__ import annotations\n\nimport dynamic_network_architectures\nfrom copy import deepcopy\nfrom functools import lru_cache, partial\nfrom typing import Union, Tuple, List, Type, Callable\n\nimport numpy as np\nimport torch\n\nfrom nnunetv2.preprocessing.resampling.utils import recursive_find_resampling_fn_by_name\nfrom torch import nn\n\nimport nnunetv2\nfrom batchgenerators.utilities.file_and_folder_operations import load_json, join\n\nfrom nnunetv2.imageio.reader_writer_registry import recursive_find_reader_writer_by_name\nfrom nnunetv2.utilities.find_class_by_name import recursive_find_python_class\nfrom nnunetv2.utilities.label_handling.label_handling import get_labelmanager_class_from_plans\n\n\n# see https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/\nfrom typing import TYPE_CHECKING\n\nif TYPE_CHECKING:\n    from nnunetv2.utilities.label_handling.label_handling import LabelManager\n    from nnunetv2.imageio.base_reader_writer import BaseReaderWriter\n    from nnunetv2.preprocessing.preprocessors.default_preprocessor import DefaultPreprocessor\n    from nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner\n\n\nclass ConfigurationManager(object):\n    def __init__(self, configuration_dict: dict):\n        self.configuration = configuration_dict\n\n    def __repr__(self):\n        return self.configuration.__repr__()\n\n    @property\n    def data_identifier(self) -> str:\n        return self.configuration['data_identifier']\n\n    @property\n    def preprocessor_name(self) -> str:\n        return self.configuration['preprocessor_name']\n\n    @property\n    @lru_cache(maxsize=1)\n    def preprocessor_class(self) -> Type[DefaultPreprocessor]:\n        preprocessor_class = recursive_find_python_class(join(nnunetv2.__path__[0], \"preprocessing\"),\n                                                         self.preprocessor_name,\n                                                         current_module=\"nnunetv2.preprocessing\")\n        return preprocessor_class\n\n    @property\n    def batch_size(self) -> int:\n        return self.configuration['batch_size']\n\n    @property\n    def patch_size(self) -> List[int]:\n        return self.configuration['patch_size']\n\n    @property\n    def median_image_size_in_voxels(self) -> List[int]:\n        return self.configuration['median_image_size_in_voxels']\n\n    @property\n    def spacing(self) -> List[float]:\n        return self.configuration['spacing']\n\n    @property\n    def normalization_schemes(self) -> List[str]:\n        return self.configuration['normalization_schemes']\n\n    @property\n    def use_mask_for_norm(self) -> List[bool]:\n        return self.configuration['use_mask_for_norm']\n\n    @property\n    def UNet_class_name(self) -> str:\n        return self.configuration['UNet_class_name']\n\n    @property\n    @lru_cache(maxsize=1)\n    def UNet_class(self) -> Type[nn.Module]:\n        unet_class = recursive_find_python_class(join(dynamic_network_architectures.__path__[0], \"architectures\"),\n                                                 self.UNet_class_name,\n                                                 current_module=\"dynamic_network_architectures.architectures\")\n        if unet_class is None:\n            raise RuntimeError('The network architecture specified by the plans file '\n                               'is non-standard (maybe your own?). Fix this by not using '\n                               'ConfigurationManager.UNet_class to instantiate '\n                               'it (probably just overwrite build_network_architecture of your trainer.')\n        return unet_class\n\n    @property\n    def UNet_base_num_features(self) -> int:\n        return self.configuration['UNet_base_num_features']\n\n    @property\n    def n_conv_per_stage_encoder(self) -> List[int]:\n        return self.configuration['n_conv_per_stage_encoder']\n\n    @property\n    def n_conv_per_stage_decoder(self) -> List[int]:\n        return self.configuration['n_conv_per_stage_decoder']\n\n    @property\n    def num_pool_per_axis(self) -> List[int]:\n        return self.configuration['num_pool_per_axis']\n\n    @property\n    def pool_op_kernel_sizes(self) -> List[List[int]]:\n        return self.configuration['pool_op_kernel_sizes']\n\n    @property\n    def conv_kernel_sizes(self) -> List[List[int]]:\n        return self.configuration['conv_kernel_sizes']\n\n    @property\n    def unet_max_num_features(self) -> int:\n        return self.configuration['unet_max_num_features']\n\n    @property\n    @lru_cache(maxsize=1)\n    def resampling_fn_data(self) -> Callable[\n        [Union[torch.Tensor, np.ndarray],\n         Union[Tuple[int, ...], List[int], np.ndarray],\n         Union[Tuple[float, ...], List[float], np.ndarray],\n         Union[Tuple[float, ...], List[float], np.ndarray]\n         ],\n        Union[torch.Tensor, np.ndarray]]:\n        fn = recursive_find_resampling_fn_by_name(self.configuration['resampling_fn_data'])\n        fn = partial(fn, **self.configuration['resampling_fn_data_kwargs'])\n        return fn\n\n    @property\n    @lru_cache(maxsize=1)\n    def resampling_fn_probabilities(self) -> Callable[\n        [Union[torch.Tensor, np.ndarray],\n         Union[Tuple[int, ...], List[int], np.ndarray],\n         Union[Tuple[float, ...], List[float], np.ndarray],\n         Union[Tuple[float, ...], List[float], np.ndarray]\n         ],\n        Union[torch.Tensor, np.ndarray]]:\n        fn = recursive_find_resampling_fn_by_name(self.configuration['resampling_fn_probabilities'])\n        fn = partial(fn, **self.configuration['resampling_fn_probabilities_kwargs'])\n        return fn\n\n    @property\n    @lru_cache(maxsize=1)\n    def resampling_fn_seg(self) -> Callable[\n        [Union[torch.Tensor, np.ndarray],\n         Union[Tuple[int, ...], List[int], np.ndarray],\n         Union[Tuple[float, ...], List[float], np.ndarray],\n         Union[Tuple[float, ...], List[float], np.ndarray]\n         ],\n        Union[torch.Tensor, np.ndarray]]:\n        fn = recursive_find_resampling_fn_by_name(self.configuration['resampling_fn_seg'])\n        fn = partial(fn, **self.configuration['resampling_fn_seg_kwargs'])\n        return fn\n\n    @property\n    def batch_dice(self) -> bool:\n        return self.configuration['batch_dice']\n\n    @property\n    def next_stage_names(self) -> Union[List[str], None]:\n        ret = self.configuration.get('next_stage')\n        if ret is not None:\n            if isinstance(ret, str):\n                ret = [ret]\n        return ret\n\n    @property\n    def previous_stage_name(self) -> Union[str, None]:\n        return self.configuration.get('previous_stage')\n\n\nclass PlansManager(object):\n    def __init__(self, plans_file_or_dict: Union[str, dict]):\n        \"\"\"\n        Why do we need this?\n        1) resolve inheritance in configurations\n        2) expose otherwise annoying stuff like getting the label manager or IO class from a string\n        3) clearly expose the things that are in the plans instead of hiding them in a dict\n        4) cache shit\n\n        This class does not prevent you from going wild. You can still use the plans directly if you prefer\n        (PlansHandler.plans['key'])\n        \"\"\"\n        self.plans = plans_file_or_dict if isinstance(plans_file_or_dict, dict) else load_json(plans_file_or_dict)\n\n    def __repr__(self):\n        return self.plans.__repr__()\n\n    def _internal_resolve_configuration_inheritance(self, configuration_name: str,\n                                                    visited: Tuple[str, ...] = None) -> dict:\n        if configuration_name not in self.plans['configurations'].keys():\n            raise ValueError(f'The configuration {configuration_name} does not exist in the plans I have. Valid '\n                             f'configuration names are {list(self.plans[\"configurations\"].keys())}.')\n        configuration = deepcopy(self.plans['configurations'][configuration_name])\n        if 'inherits_from' in configuration:\n            parent_config_name = configuration['inherits_from']\n\n            if visited is None:\n                visited = (configuration_name,)\n            else:\n                if parent_config_name in visited:\n                    raise RuntimeError(f\"Circular dependency detected. The following configurations were visited \"\n                                       f\"while solving inheritance (in that order!): {visited}. \"\n                                       f\"Current configuration: {configuration_name}. Its parent configuration \"\n                                       f\"is {parent_config_name}.\")\n                visited = (*visited, configuration_name)\n\n            base_config = self._internal_resolve_configuration_inheritance(parent_config_name, visited)\n            base_config.update(configuration)\n            configuration = base_config\n        return configuration\n\n    @lru_cache(maxsize=10)\n    def get_configuration(self, configuration_name: str):\n        if configuration_name not in self.plans['configurations'].keys():\n            raise RuntimeError(f\"Requested configuration {configuration_name} not found in plans. \"\n                               f\"Available configurations: {list(self.plans['configurations'].keys())}\")\n\n        configuration_dict = self._internal_resolve_configuration_inheritance(configuration_name)\n        return ConfigurationManager(configuration_dict)\n\n    @property\n    def dataset_name(self) -> str:\n        return self.plans['dataset_name']\n\n    @property\n    def plans_name(self) -> str:\n        return self.plans['plans_name']\n\n    @property\n    def original_median_spacing_after_transp(self) -> List[float]:\n        return self.plans['original_median_spacing_after_transp']\n\n    @property\n    def original_median_shape_after_transp(self) -> List[float]:\n        return self.plans['original_median_shape_after_transp']\n\n    @property\n    @lru_cache(maxsize=1)\n    def image_reader_writer_class(self) -> Type[BaseReaderWriter]:\n        return recursive_find_reader_writer_by_name(self.plans['image_reader_writer'])\n\n    @property\n    def transpose_forward(self) -> List[int]:\n        return self.plans['transpose_forward']\n\n    @property\n    def transpose_backward(self) -> List[int]:\n        return self.plans['transpose_backward']\n\n    @property\n    def available_configurations(self) -> List[str]:\n        return list(self.plans['configurations'].keys())\n\n    @property\n    @lru_cache(maxsize=1)\n    def experiment_planner_class(self) -> Type[ExperimentPlanner]:\n        planner_name = self.experiment_planner_name\n        experiment_planner = recursive_find_python_class(join(nnunetv2.__path__[0], \"experiment_planning\"),\n                                                         planner_name,\n                                                         current_module=\"nnunetv2.experiment_planning\")\n        return experiment_planner\n\n    @property\n    def experiment_planner_name(self) -> str:\n        return self.plans['experiment_planner_used']\n\n    @property\n    @lru_cache(maxsize=1)\n    def label_manager_class(self) -> Type[LabelManager]:\n        return get_labelmanager_class_from_plans(self.plans)\n\n    def get_label_manager(self, dataset_json: dict, **kwargs) -> LabelManager:\n        return self.label_manager_class(label_dict=dataset_json['labels'],\n                                        regions_class_order=dataset_json.get('regions_class_order'),\n                                        **kwargs)\n\n    @property\n    def foreground_intensity_properties_per_channel(self) -> dict:\n        if 'foreground_intensity_properties_per_channel' not in self.plans.keys():\n            if 'foreground_intensity_properties_by_modality' in self.plans.keys():\n                return self.plans['foreground_intensity_properties_by_modality']\n        return self.plans['foreground_intensity_properties_per_channel']\n\n\nif __name__ == '__main__':\n    from nnunetv2.paths import nnUNet_preprocessed\n    from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name\n\n    plans = load_json(join(nnUNet_preprocessed, maybe_convert_to_dataset_name(3), 'nnUNetPlans.json'))\n    # build new configuration that inherits from 3d_fullres\n    plans['configurations']['3d_fullres_bs4'] = {\n        'batch_size': 4,\n        'inherits_from': '3d_fullres'\n    }\n    # now get plans and configuration managers\n    plans_manager = PlansManager(plans)\n    configuration_manager = plans_manager.get_configuration('3d_fullres_bs4')\n    print(configuration_manager)  # look for batch size 4\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2/utilities/utils.py",
    "content": "#    Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center\n#    (DKFZ), Heidelberg, Germany\n#\n#    Licensed under the Apache License, Version 2.0 (the \"License\");\n#    you may not use this file except in compliance with the License.\n#    You may obtain a copy of the License at\n#\n#        http://www.apache.org/licenses/LICENSE-2.0\n#\n#    Unless required by applicable law or agreed to in writing, software\n#    distributed under the License is distributed on an \"AS IS\" BASIS,\n#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n#    See the License for the specific language governing permissions and\n#    limitations under the License.\nimport os.path\nfrom functools import lru_cache\nfrom typing import Union\n\nfrom batchgenerators.utilities.file_and_folder_operations import *\nimport numpy as np\nimport re\n\nfrom nnunetv2.paths import nnUNet_raw\n\n\ndef get_identifiers_from_splitted_dataset_folder(folder: str, file_ending: str):\n    files = subfiles(folder, suffix=file_ending, join=False)\n    # all files have a 4 digit channel index (_XXXX)\n    crop = len(file_ending) + 5\n    files = [i[:-crop] for i in files]\n    # only unique image ids\n    files = np.unique(files)\n    return files\n\n\ndef create_lists_from_splitted_dataset_folder(folder: str, file_ending: str, identifiers: List[str] = None) -> List[\n    List[str]]:\n    \"\"\"\n    does not rely on dataset.json\n    \"\"\"\n    if identifiers is None:\n        identifiers = get_identifiers_from_splitted_dataset_folder(folder, file_ending)\n    files = subfiles(folder, suffix=file_ending, join=False, sort=True)\n    list_of_lists = []\n    for f in identifiers:\n        p = re.compile(re.escape(f) + r\"_\\d\\d\\d\\d\" + re.escape(file_ending))\n        list_of_lists.append([join(folder, i) for i in files if p.fullmatch(i)])\n    return list_of_lists\n\n\ndef get_filenames_of_train_images_and_targets(raw_dataset_folder: str, dataset_json: dict = None):\n    if dataset_json is None:\n        dataset_json = load_json(join(raw_dataset_folder, 'dataset.json'))\n\n    if 'dataset' in dataset_json.keys():\n        dataset = dataset_json['dataset']\n        for k in dataset.keys():\n            dataset[k]['label'] = os.path.abspath(join(raw_dataset_folder, dataset[k]['label'])) if not os.path.isabs(dataset[k]['label']) else dataset[k]['label']\n            dataset[k]['images'] = [os.path.abspath(join(raw_dataset_folder, i)) if not os.path.isabs(i) else i for i in dataset[k]['images']]\n    else:\n        identifiers = get_identifiers_from_splitted_dataset_folder(join(raw_dataset_folder, 'imagesTr'), dataset_json['file_ending'])\n        images = create_lists_from_splitted_dataset_folder(join(raw_dataset_folder, 'imagesTr'), dataset_json['file_ending'], identifiers)\n        segs = [join(raw_dataset_folder, 'labelsTr', i + dataset_json['file_ending']) for i in identifiers]\n        dataset = {i: {'images': im, 'label': se} for i, im, se in zip(identifiers, images, segs)}\n    return dataset\n\n\nif __name__ == '__main__':\n    print(get_filenames_of_train_images_and_targets(join(nnUNet_raw, 'Dataset002_Heart')))\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2.egg-info/PKG-INFO",
    "content": "Metadata-Version: 2.1\nName: nnunetv2\nVersion: 2.2.1\nSummary: nnU-Net is a framework for out-of-the box image segmentation.\nAuthor: Helmholtz Imaging Applied Computer Vision Lab\nAuthor-email: Fabian Isensee <f.isensee@dkfz-heidelberg.de>\nLicense:                                  Apache License\n                                   Version 2.0, January 2004\n                                http://www.apache.org/licenses/\n        \n           TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n        \n           1. Definitions.\n        \n              \"License\" shall mean the terms and conditions for use, reproduction,\n              and distribution as defined by Sections 1 through 9 of this document.\n        \n              \"Licensor\" shall mean the copyright owner or entity authorized by\n              the copyright owner that is granting the License.\n        \n              \"Legal Entity\" shall mean the union of the acting entity and all\n              other entities that control, are controlled by, or are under common\n              control with that entity. For the purposes of this definition,\n              \"control\" means (i) the power, direct or indirect, to cause the\n              direction or management of such entity, whether by contract or\n              otherwise, or (ii) ownership of fifty percent (50%) or more of the\n              outstanding shares, or (iii) beneficial ownership of such entity.\n        \n              \"You\" (or \"Your\") shall mean an individual or Legal Entity\n              exercising permissions granted by this License.\n        \n              \"Source\" form shall mean the preferred form for making modifications,\n              including but not limited to software source code, documentation\n              source, and configuration files.\n        \n              \"Object\" form shall mean any form resulting from mechanical\n              transformation or translation of a Source form, including but\n              not limited to compiled object code, generated documentation,\n              and conversions to other media types.\n        \n              \"Work\" shall mean the work of authorship, whether in Source or\n              Object form, made available under the License, as indicated by a\n              copyright notice that is included in or attached to the work\n              (an example is provided in the Appendix below).\n        \n              \"Derivative Works\" shall mean any work, whether in Source or Object\n              form, that is based on (or derived from) the Work and for which the\n              editorial revisions, annotations, elaborations, or other modifications\n              represent, as a whole, an original work of authorship. For the purposes\n              of this License, Derivative Works shall not include works that remain\n              separable from, or merely link (or bind by name) to the interfaces of,\n              the Work and Derivative Works thereof.\n        \n              \"Contribution\" shall mean any work of authorship, including\n              the original version of the Work and any modifications or additions\n              to that Work or Derivative Works thereof, that is intentionally\n              submitted to Licensor for inclusion in the Work by the copyright owner\n              or by an individual or Legal Entity authorized to submit on behalf of\n              the copyright owner. For the purposes of this definition, \"submitted\"\n              means any form of electronic, verbal, or written communication sent\n              to the Licensor or its representatives, including but not limited to\n              communication on electronic mailing lists, source code control systems,\n              and issue tracking systems that are managed by, or on behalf of, the\n              Licensor for the purpose of discussing and improving the Work, but\n              excluding communication that is conspicuously marked or otherwise\n              designated in writing by the copyright owner as \"Not a Contribution.\"\n        \n              \"Contributor\" shall mean Licensor and any individual or Legal Entity\n              on behalf of whom a Contribution has been received by Licensor and\n              subsequently incorporated within the Work.\n        \n           2. Grant of Copyright License. Subject to the terms and conditions of\n              this License, each Contributor hereby grants to You a perpetual,\n              worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n              copyright license to reproduce, prepare Derivative Works of,\n              publicly display, publicly perform, sublicense, and distribute the\n              Work and such Derivative Works in Source or Object form.\n        \n           3. Grant of Patent License. Subject to the terms and conditions of\n              this License, each Contributor hereby grants to You a perpetual,\n              worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n              (except as stated in this section) patent license to make, have made,\n              use, offer to sell, sell, import, and otherwise transfer the Work,\n              where such license applies only to those patent claims licensable\n              by such Contributor that are necessarily infringed by their\n              Contribution(s) alone or by combination of their Contribution(s)\n              with the Work to which such Contribution(s) was submitted. If You\n              institute patent litigation against any entity (including a\n              cross-claim or counterclaim in a lawsuit) alleging that the Work\n              or a Contribution incorporated within the Work constitutes direct\n              or contributory patent infringement, then any patent licenses\n              granted to You under this License for that Work shall terminate\n              as of the date such litigation is filed.\n        \n           4. Redistribution. You may reproduce and distribute copies of the\n              Work or Derivative Works thereof in any medium, with or without\n              modifications, and in Source or Object form, provided that You\n              meet the following conditions:\n        \n              (a) You must give any other recipients of the Work or\n                  Derivative Works a copy of this License; and\n        \n              (b) You must cause any modified files to carry prominent notices\n                  stating that You changed the files; and\n        \n              (c) You must retain, in the Source form of any Derivative Works\n                  that You distribute, all copyright, patent, trademark, and\n                  attribution notices from the Source form of the Work,\n                  excluding those notices that do not pertain to any part of\n                  the Derivative Works; and\n        \n              (d) If the Work includes a \"NOTICE\" text file as part of its\n                  distribution, then any Derivative Works that You distribute must\n                  include a readable copy of the attribution notices contained\n                  within such NOTICE file, excluding those notices that do not\n                  pertain to any part of the Derivative Works, in at least one\n                  of the following places: within a NOTICE text file distributed\n                  as part of the Derivative Works; within the Source form or\n                  documentation, if provided along with the Derivative Works; or,\n                  within a display generated by the Derivative Works, if and\n                  wherever such third-party notices normally appear. The contents\n                  of the NOTICE file are for informational purposes only and\n                  do not modify the License. You may add Your own attribution\n                  notices within Derivative Works that You distribute, alongside\n                  or as an addendum to the NOTICE text from the Work, provided\n                  that such additional attribution notices cannot be construed\n                  as modifying the License.\n        \n              You may add Your own copyright statement to Your modifications and\n              may provide additional or different license terms and conditions\n              for use, reproduction, or distribution of Your modifications, or\n              for any such Derivative Works as a whole, provided Your use,\n              reproduction, and distribution of the Work otherwise complies with\n              the conditions stated in this License.\n        \n           5. Submission of Contributions. Unless You explicitly state otherwise,\n              any Contribution intentionally submitted for inclusion in the Work\n              by You to the Licensor shall be under the terms and conditions of\n              this License, without any additional terms or conditions.\n              Notwithstanding the above, nothing herein shall supersede or modify\n              the terms of any separate license agreement you may have executed\n              with Licensor regarding such Contributions.\n        \n           6. Trademarks. This License does not grant permission to use the trade\n              names, trademarks, service marks, or product names of the Licensor,\n              except as required for reasonable and customary use in describing the\n              origin of the Work and reproducing the content of the NOTICE file.\n        \n           7. Disclaimer of Warranty. Unless required by applicable law or\n              agreed to in writing, Licensor provides the Work (and each\n              Contributor provides its Contributions) on an \"AS IS\" BASIS,\n              WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n              implied, including, without limitation, any warranties or conditions\n              of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n              PARTICULAR PURPOSE. You are solely responsible for determining the\n              appropriateness of using or redistributing the Work and assume any\n              risks associated with Your exercise of permissions under this License.\n        \n           8. Limitation of Liability. In no event and under no legal theory,\n              whether in tort (including negligence), contract, or otherwise,\n              unless required by applicable law (such as deliberate and grossly\n              negligent acts) or agreed to in writing, shall any Contributor be\n              liable to You for damages, including any direct, indirect, special,\n              incidental, or consequential damages of any character arising as a\n              result of this License or out of the use or inability to use the\n              Work (including but not limited to damages for loss of goodwill,\n              work stoppage, computer failure or malfunction, or any and all\n              other commercial damages or losses), even if such Contributor\n              has been advised of the possibility of such damages.\n        \n           9. Accepting Warranty or Additional Liability. While redistributing\n              the Work or Derivative Works thereof, You may choose to offer,\n              and charge a fee for, acceptance of support, warranty, indemnity,\n              or other liability obligations and/or rights consistent with this\n              License. However, in accepting such obligations, You may act only\n              on Your own behalf and on Your sole responsibility, not on behalf\n              of any other Contributor, and only if You agree to indemnify,\n              defend, and hold each Contributor harmless for any liability\n              incurred by, or claims asserted against, such Contributor by reason\n              of your accepting any such warranty or additional liability.\n        \n           END OF TERMS AND CONDITIONS\n        \n           APPENDIX: How to apply the Apache License to your work.\n        \n              To apply the Apache License to your work, attach the following\n              boilerplate notice, with the fields enclosed by brackets \"[]\"\n              replaced with your own identifying information. (Don't include\n              the brackets!)  The text should be enclosed in the appropriate\n              comment syntax for the file format. We also recommend that a\n              file or class name and description of purpose be included on the\n              same \"printed page\" as the copyright notice for easier\n              identification within third-party archives.\n        \n           Copyright [2019] [Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany]\n        \n           Licensed under the Apache License, Version 2.0 (the \"License\");\n           you may not use this file except in compliance with the License.\n           You may obtain a copy of the License at\n        \n               http://www.apache.org/licenses/LICENSE-2.0\n        \n           Unless required by applicable law or agreed to in writing, software\n           distributed under the License is distributed on an \"AS IS\" BASIS,\n           WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n           See the License for the specific language governing permissions and\n           limitations under the License.\nProject-URL: homepage, https://github.com/MIC-DKFZ/nnUNet\nProject-URL: repository, https://github.com/MIC-DKFZ/nnUNet\nKeywords: deep learning,image segmentation,semantic segmentation,medical image analysis,medical image segmentation,nnU-Net,nnunet\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Intended Audience :: Developers\nClassifier: Intended Audience :: Science/Research\nClassifier: Intended Audience :: Healthcare Industry\nClassifier: Programming Language :: Python :: 3\nClassifier: License :: OSI Approved :: Apache Software License\nClassifier: Topic :: Scientific/Engineering :: Artificial Intelligence\nClassifier: Topic :: Scientific/Engineering :: Image Recognition\nClassifier: Topic :: Scientific/Engineering :: Medical Science Apps.\nRequires-Python: >=3.9\nDescription-Content-Type: text/markdown\nLicense-File: LICENSE\nRequires-Dist: torch>=2.0.0\nRequires-Dist: acvl-utils>=0.2\nRequires-Dist: dynamic-network-architectures>=0.2\nRequires-Dist: tqdm\nRequires-Dist: dicom2nifti\nRequires-Dist: scipy\nRequires-Dist: batchgenerators>=0.25\nRequires-Dist: numpy\nRequires-Dist: scikit-learn\nRequires-Dist: scikit-image>=0.19.3\nRequires-Dist: SimpleITK>=2.2.1\nRequires-Dist: pandas\nRequires-Dist: graphviz\nRequires-Dist: tifffile\nRequires-Dist: requests\nRequires-Dist: nibabel\nRequires-Dist: matplotlib\nRequires-Dist: seaborn\nRequires-Dist: imagecodecs\nRequires-Dist: yacs\nProvides-Extra: dev\nRequires-Dist: black; extra == \"dev\"\nRequires-Dist: ruff; extra == \"dev\"\nRequires-Dist: pre-commit; extra == \"dev\"\n\n# Welcome to the new nnU-Net!\n\nClick [here](https://github.com/MIC-DKFZ/nnUNet/tree/nnunetv1) if you were looking for the old one instead.\n\nComing from V1? Check out the [TLDR Migration Guide](documentation/tldr_migration_guide_from_v1.md). Reading the rest of the documentation is still strongly recommended ;-)\n\n# What is nnU-Net?\nImage datasets are enormously diverse: image dimensionality (2D, 3D), modalities/input channels (RGB image, CT, MRI, microscopy, ...), \nimage sizes, voxel sizes, class ratio, target structure properties and more change substantially between datasets. \nTraditionally, given a new problem, a tailored solution needs to be manually designed and optimized  - a process that \nis prone to errors, not scalable and where success is overwhelmingly determined by the skill of the experimenter. Even \nfor experts, this process is anything but simple: there are not only many design choices and data properties that need to \nbe considered, but they are also tightly interconnected, rendering reliable manual pipeline optimization all but impossible! \n\n![nnU-Net overview](documentation/assets/nnU-Net_overview.png)\n\n**nnU-Net is a semantic segmentation method that automatically adapts to a given dataset. It will analyze the provided \ntraining cases and automatically configure a matching U-Net-based segmentation pipeline. No expertise required on your \nend! You can simply train the models and use them for your application**.\n\nUpon release, nnU-Net was evaluated on 23 datasets belonging to competitions from the biomedical domain. Despite competing \nwith handcrafted solutions for each respective dataset, nnU-Net's fully automated pipeline scored several first places on \nopen leaderboards! Since then nnU-Net has stood the test of time: it continues to be used as a baseline and method \ndevelopment framework ([9 out of 10 challenge winners at MICCAI 2020](https://arxiv.org/abs/2101.00232) and 5 out of 7 \nin MICCAI 2021 built their methods on top of nnU-Net, \n [we won AMOS2022 with nnU-Net](https://amos22.grand-challenge.org/final-ranking/))!\n\nPlease cite the [following paper](https://www.google.com/url?q=https://www.nature.com/articles/s41592-020-01008-z&sa=D&source=docs&ust=1677235958581755&usg=AOvVaw3dWL0SrITLhCJUBiNIHCQO) when using nnU-Net:\n\n    Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring \n    method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211.\n\n\n## What can nnU-Net do for you?\nIf you are a **domain scientist** (biologist, radiologist, ...) looking to analyze your own images, nnU-Net provides \nan out-of-the-box solution that is all but guaranteed to provide excellent results on your individual dataset. Simply \nconvert your dataset into the nnU-Net format and enjoy the power of AI - no expertise required!\n\nIf you are an **AI researcher** developing segmentation methods, nnU-Net:\n- offers a fantastic out-of-the-box applicable baseline algorithm to compete against\n- can act as a method development framework to test your contribution on a large number of datasets without having to \ntune individual pipelines (for example evaluating a new loss function)\n- provides a strong starting point for further dataset-specific optimizations. This is particularly used when competing \nin segmentation challenges\n- provides a new perspective on the design of segmentation methods: maybe you can find better connections between \ndataset properties and best-fitting segmentation pipelines?\n\n## What is the scope of nnU-Net?\nnnU-Net is built for semantic segmentation. It can handle 2D and 3D images with arbitrary \ninput modalities/channels. It can understand voxel spacings, anisotropies and is robust even when classes are highly\nimbalanced.\n\nnnU-Net relies on supervised learning, which means that you need to provide training cases for your application. The number of \nrequired training cases varies heavily depending on the complexity of the segmentation problem. No \none-fits-all number can be provided here! nnU-Net does not require more training cases than other solutions - maybe \neven less due to our extensive use of data augmentation. \n\nnnU-Net expects to be able to process entire images at once during preprocessing and postprocessing, so it cannot \nhandle enormous images. As a reference: we tested images from 40x40x40 pixels all the way up to 1500x1500x1500 in 3D \nand 40x40 up to ~30000x30000 in 2D! If your RAM allows it, larger is always possible.\n\n## How does nnU-Net work?\nGiven a new dataset, nnU-Net will systematically analyze the provided training cases and create a 'dataset fingerprint'. \nnnU-Net then creates several U-Net configurations for each dataset: \n- `2d`: a 2D U-Net (for 2D and 3D datasets)\n- `3d_fullres`: a 3D U-Net that operates on a high image resolution (for 3D datasets only)\n- `3d_lowres` → `3d_cascade_fullres`: a 3D U-Net cascade where first a 3D U-Net operates on low resolution images and \nthen a second high-resolution 3D U-Net refined the predictions of the former (for 3D datasets with large image sizes only)\n\n**Note that not all U-Net configurations are created for all datasets. In datasets with small image sizes, the \nU-Net cascade (and with it the 3d_lowres configuration) is omitted because the patch size of the full \nresolution U-Net already covers a large part of the input images.**\n\nnnU-Net configures its segmentation pipelines based on a three-step recipe:\n- **Fixed parameters** are not adapted. During development of nnU-Net we identified a robust configuration (that is, certain architecture and training properties) that can \nsimply be used all the time. This includes, for example, nnU-Net's loss function, (most of the) data augmentation strategy and learning rate.\n- **Rule-based parameters** use the dataset fingerprint to adapt certain segmentation pipeline properties by following \nhard-coded heuristic rules. For example, the network topology (pooling behavior and depth of the network architecture) \nare adapted to the patch size; the patch size, network topology and batch size are optimized jointly given some GPU \nmemory constraint. \n- **Empirical parameters** are essentially trial-and-error. For example the selection of the best U-net configuration \nfor the given dataset (2D, 3D full resolution, 3D low resolution, 3D cascade) and the optimization of the postprocessing strategy.\n\n## How to get started?\nRead these:\n- [Installation instructions](documentation/installation_instructions.md)\n- [Dataset conversion](documentation/dataset_format.md)\n- [Usage instructions](documentation/how_to_use_nnunet.md)\n\nAdditional information:\n- [Region-based training](documentation/region_based_training.md)\n- [Manual data splits](documentation/manual_data_splits.md)\n- [Pretraining and finetuning](documentation/pretraining_and_finetuning.md)\n- [Intensity Normalization in nnU-Net](documentation/explanation_normalization.md)\n- [Manually editing nnU-Net configurations](documentation/explanation_plans_files.md)\n- [Extending nnU-Net](documentation/extending_nnunet.md)\n- [What is different in V2?](documentation/changelog.md)\n\nCompetitions:\n- [AutoPET II](documentation/competitions/AutoPETII.md)\n\n[//]: # (- [Ignore label]&#40;documentation/ignore_label.md&#41;)\n\n## Where does nnU-Net perform well and where does it not perform?\nnnU-Net excels in segmentation problems that need to be solved by training from scratch, \nfor example: research applications that feature non-standard image modalities and input channels,\nchallenge datasets from the biomedical domain, majority of 3D segmentation problems, etc . We have yet to find a \ndataset for which nnU-Net's working principle fails!\n\nNote: On standard segmentation \nproblems, such as 2D RGB images in ADE20k and Cityscapes, fine-tuning a foundation model (that was pretrained on a large corpus of \nsimilar images, e.g. Imagenet 22k, JFT-300M) will provide better performance than nnU-Net! That is simply because these \nmodels allow much better initialization. Foundation models are not supported by nnU-Net as \nthey 1) are not useful for segmentation problems that deviate from the standard setting (see above mentioned \ndatasets), 2) would typically only support 2D architectures and 3) conflict with our core design principle of carefully adapting \nthe network topology for each dataset (if the topology is changed one can no longer transfer pretrained weights!) \n\n## What happened to the old nnU-Net?\nThe core of the old nnU-Net was hacked together in a short time period while participating in the Medical Segmentation \nDecathlon challenge in 2018. Consequently, code structure and quality were not the best. Many features \nwere added later on and didn't quite fit into the nnU-Net design principles. Overall quite messy, really. And annoying to work with.\n\nnnU-Net V2 is a complete overhaul. The \"delete everything and start again\" kind. So everything is better \n(in the author's opinion haha). While the segmentation performance [remains the same](https://docs.google.com/spreadsheets/d/13gqjIKEMPFPyMMMwA1EML57IyoBjfC3-QCTn4zRN_Mg/edit?usp=sharing), a lot of cool stuff has been added. \nIt is now also much easier to use it as a development framework and to manually fine-tune its configuration to new \ndatasets. A big driver for the reimplementation was also the emergence of [Helmholtz Imaging](http://helmholtz-imaging.de), \nprompting us to extend nnU-Net to more image formats and domains. Take a look [here](documentation/changelog.md) for some highlights.\n\n# Acknowledgements\n<img src=\"documentation/assets/HI_Logo.png\" height=\"100px\" />\n\n<img src=\"documentation/assets/dkfz_logo.png\" height=\"100px\" />\n\nnnU-Net is developed and maintained by the Applied Computer Vision Lab (ACVL) of [Helmholtz Imaging](http://helmholtz-imaging.de) \nand the [Division of Medical Image Computing](https://www.dkfz.de/en/mic/index.php) at the \n[German Cancer Research Center (DKFZ)](https://www.dkfz.de/en/index.html).\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2.egg-info/SOURCES.txt",
    "content": "LICENSE\npyproject.toml\nreadme.md\nsetup.py\nnnunetv2/__init__.py\nnnunetv2/configuration.py\nnnunetv2/paths.py\nnnunetv2.egg-info/PKG-INFO\nnnunetv2.egg-info/SOURCES.txt\nnnunetv2.egg-info/dependency_links.txt\nnnunetv2.egg-info/entry_points.txt\nnnunetv2.egg-info/requires.txt\nnnunetv2.egg-info/top_level.txt\nnnunetv2/batch_running/__init__.py\nnnunetv2/batch_running/collect_results_custom_Decathlon.py\nnnunetv2/batch_running/collect_results_custom_Decathlon_2d.py\nnnunetv2/batch_running/generate_lsf_runs_customDecathlon.py\nnnunetv2/batch_running/benchmarking/__init__.py\nnnunetv2/batch_running/benchmarking/generate_benchmarking_commands.py\nnnunetv2/batch_running/benchmarking/summarize_benchmark_results.py\nnnunetv2/batch_running/release_trainings/__init__.py\nnnunetv2/batch_running/release_trainings/nnunetv2_v1/__init__.py\nnnunetv2/batch_running/release_trainings/nnunetv2_v1/collect_results.py\nnnunetv2/batch_running/release_trainings/nnunetv2_v1/generate_lsf_commands.py\nnnunetv2/dataset_conversion/Dataset027_ACDC.py\nnnunetv2/dataset_conversion/Dataset073_Fluo_C3DH_A549_SIM.py\nnnunetv2/dataset_conversion/Dataset114_MNMs.py\nnnunetv2/dataset_conversion/Dataset115_EMIDEC.py\nnnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py\nnnunetv2/dataset_conversion/Dataset137_BraTS21.py\nnnunetv2/dataset_conversion/Dataset218_Amos2022_task1.py\nnnunetv2/dataset_conversion/Dataset219_Amos2022_task2.py\nnnunetv2/dataset_conversion/Dataset220_KiTS2023.py\nnnunetv2/dataset_conversion/Dataset221_AutoPETII_2023.py\nnnunetv2/dataset_conversion/Dataset988_dummyDataset4.py\nnnunetv2/dataset_conversion/__init__.py\nnnunetv2/dataset_conversion/convert_MSD_dataset.py\nnnunetv2/dataset_conversion/convert_raw_dataset_from_old_nnunet_format.py\nnnunetv2/dataset_conversion/generate_dataset_json.py\nnnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset996_IntegrationTest_Hippocampus_regions_ignore.py\nnnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset997_IntegrationTest_Hippocampus_regions.py\nnnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset998_IntegrationTest_Hippocampus_ignore.py\nnnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset999_IntegrationTest_Hippocampus.py\nnnunetv2/dataset_conversion/datasets_for_integration_tests/__init__.py\nnnunetv2/ensembling/__init__.py\nnnunetv2/ensembling/ensemble.py\nnnunetv2/evaluation/__init__.py\nnnunetv2/evaluation/accumulate_cv_results.py\nnnunetv2/evaluation/evaluate_predictions.py\nnnunetv2/evaluation/find_best_configuration.py\nnnunetv2/experiment_planning/__init__.py\nnnunetv2/experiment_planning/plan_and_preprocess_api.py\nnnunetv2/experiment_planning/plan_and_preprocess_entrypoints.py\nnnunetv2/experiment_planning/verify_dataset_integrity.py\nnnunetv2/experiment_planning/dataset_fingerprint/__init__.py\nnnunetv2/experiment_planning/dataset_fingerprint/fingerprint_extractor.py\nnnunetv2/experiment_planning/experiment_planners/__init__.py\nnnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py\nnnunetv2/experiment_planning/experiment_planners/network_topology.py\nnnunetv2/experiment_planning/experiment_planners/resencUNet_planner.py\nnnunetv2/experiment_planning/plans_for_pretraining/__init__.py\nnnunetv2/experiment_planning/plans_for_pretraining/move_plans_between_datasets.py\nnnunetv2/imageio/__init__.py\nnnunetv2/imageio/base_reader_writer.py\nnnunetv2/imageio/natural_image_reader_writer.py\nnnunetv2/imageio/nibabel_reader_writer.py\nnnunetv2/imageio/reader_writer_registry.py\nnnunetv2/imageio/simpleitk_reader_writer.py\nnnunetv2/imageio/tif_reader_writer.py\nnnunetv2/inference/__init__.py\nnnunetv2/inference/data_iterators.py\nnnunetv2/inference/examples.py\nnnunetv2/inference/export_prediction.py\nnnunetv2/inference/predict_from_raw_data.py\nnnunetv2/inference/sliding_window_prediction.py\nnnunetv2/model_sharing/__init__.py\nnnunetv2/model_sharing/entry_points.py\nnnunetv2/model_sharing/model_download.py\nnnunetv2/model_sharing/model_export.py\nnnunetv2/model_sharing/model_import.py\nnnunetv2/postprocessing/__init__.py\nnnunetv2/postprocessing/remove_connected_components.py\nnnunetv2/preprocessing/__init__.py\nnnunetv2/preprocessing/cropping/__init__.py\nnnunetv2/preprocessing/cropping/cropping.py\nnnunetv2/preprocessing/normalization/__init__.py\nnnunetv2/preprocessing/normalization/default_normalization_schemes.py\nnnunetv2/preprocessing/normalization/map_channel_name_to_normalization.py\nnnunetv2/preprocessing/preprocessors/__init__.py\nnnunetv2/preprocessing/preprocessors/default_preprocessor.py\nnnunetv2/preprocessing/resampling/__init__.py\nnnunetv2/preprocessing/resampling/default_resampling.py\nnnunetv2/preprocessing/resampling/utils.py\nnnunetv2/run/__init__.py\nnnunetv2/run/load_pretrained_weights.py\nnnunetv2/run/run_training.py\nnnunetv2/tests/__init__.py\nnnunetv2/tests/integration_tests/__init__.py\nnnunetv2/tests/integration_tests/add_lowres_and_cascade.py\nnnunetv2/tests/integration_tests/cleanup_integration_test.py\nnnunetv2/tests/integration_tests/run_integration_test_bestconfig_inference.py\nnnunetv2/training/__init__.py\nnnunetv2/training/data_augmentation/__init__.py\nnnunetv2/training/data_augmentation/compute_initial_patch_size.py\nnnunetv2/training/data_augmentation/custom_transforms/__init__.py\nnnunetv2/training/data_augmentation/custom_transforms/cascade_transforms.py\nnnunetv2/training/data_augmentation/custom_transforms/deep_supervision_donwsampling.py\nnnunetv2/training/data_augmentation/custom_transforms/limited_length_multithreaded_augmenter.py\nnnunetv2/training/data_augmentation/custom_transforms/manipulating_data_dict.py\nnnunetv2/training/data_augmentation/custom_transforms/masking.py\nnnunetv2/training/data_augmentation/custom_transforms/region_based_training.py\nnnunetv2/training/data_augmentation/custom_transforms/transforms_for_dummy_2d.py\nnnunetv2/training/dataloading/__init__.py\nnnunetv2/training/dataloading/base_data_loader.py\nnnunetv2/training/dataloading/data_loader_2d.py\nnnunetv2/training/dataloading/data_loader_3d.py\nnnunetv2/training/dataloading/nnunet_dataset.py\nnnunetv2/training/dataloading/utils.py\nnnunetv2/training/logging/__init__.py\nnnunetv2/training/logging/nnunet_logger.py\nnnunetv2/training/loss/__init__.py\nnnunetv2/training/loss/compound_losses.py\nnnunetv2/training/loss/deep_supervision.py\nnnunetv2/training/loss/dice.py\nnnunetv2/training/loss/robust_ce_loss.py\nnnunetv2/training/lr_scheduler/__init__.py\nnnunetv2/training/lr_scheduler/polylr.py\nnnunetv2/training/nnUNetTrainer/__init__.py\nnnunetv2/training/nnUNetTrainer/nnUNetTrainer.py\nnnunetv2/training/nnUNetTrainer/variants/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/benchmarking/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs.py\nnnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs_noDataLoading.py\nnnunetv2/training/nnUNetTrainer/variants/data_augmentation/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py\nnnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDAOrd0.py\nnnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoDA.py\nnnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoMirroring.py\nnnunetv2/training/nnUNetTrainer/variants/loss/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerCELoss.py\nnnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerDiceLoss.py\nnnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerTopkLoss.py\nnnunetv2/training/nnUNetTrainer/variants/lr_schedule/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/lr_schedule/nnUNetTrainerCosAnneal.py\nnnunetv2/training/nnUNetTrainer/variants/network_architecture/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerBN.py\nnnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerNoDeepSupervision.py\nnnunetv2/training/nnUNetTrainer/variants/optimizer/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdam.py\nnnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdan.py\nnnunetv2/training/nnUNetTrainer/variants/sampling/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/sampling/nnUNetTrainer_probabilisticOversampling.py\nnnunetv2/training/nnUNetTrainer/variants/training_length/__init__.py\nnnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs.py\nnnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs_NoMirroring.py\nnnunetv2/utilities/__init__.py\nnnunetv2/utilities/collate_outputs.py\nnnunetv2/utilities/dataset_name_id_conversion.py\nnnunetv2/utilities/ddp_allgather.py\nnnunetv2/utilities/default_n_proc_DA.py\nnnunetv2/utilities/file_path_utilities.py\nnnunetv2/utilities/find_class_by_name.py\nnnunetv2/utilities/get_network_from_plans.py\nnnunetv2/utilities/helpers.py\nnnunetv2/utilities/json_export.py\nnnunetv2/utilities/network_initialization.py\nnnunetv2/utilities/overlay_plots.py\nnnunetv2/utilities/utils.py\nnnunetv2/utilities/label_handling/__init__.py\nnnunetv2/utilities/label_handling/label_handling.py\nnnunetv2/utilities/plans_handling/__init__.py\nnnunetv2/utilities/plans_handling/plans_handler.py"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2.egg-info/dependency_links.txt",
    "content": "\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2.egg-info/entry_points.txt",
    "content": "[console_scripts]\nnnUNetv2_accumulate_crossval_results = nnunetv2.evaluation.find_best_configuration:accumulate_crossval_results_entry_point\nnnUNetv2_apply_postprocessing = nnunetv2.postprocessing.remove_connected_components:entry_point_apply_postprocessing\nnnUNetv2_convert_MSD_dataset = nnunetv2.dataset_conversion.convert_MSD_dataset:entry_point\nnnUNetv2_convert_old_nnUNet_dataset = nnunetv2.dataset_conversion.convert_raw_dataset_from_old_nnunet_format:convert_entry_point\nnnUNetv2_determine_postprocessing = nnunetv2.postprocessing.remove_connected_components:entry_point_determine_postprocessing_folder\nnnUNetv2_download_pretrained_model_by_url = nnunetv2.model_sharing.entry_points:download_by_url\nnnUNetv2_ensemble = nnunetv2.ensembling.ensemble:entry_point_ensemble_folders\nnnUNetv2_evaluate_folder = nnunetv2.evaluation.evaluate_predictions:evaluate_folder_entry_point\nnnUNetv2_evaluate_simple = nnunetv2.evaluation.evaluate_predictions:evaluate_simple_entry_point\nnnUNetv2_export_model_to_zip = nnunetv2.model_sharing.entry_points:export_pretrained_model_entry\nnnUNetv2_extract_fingerprint = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:extract_fingerprint_entry\nnnUNetv2_find_best_configuration = nnunetv2.evaluation.find_best_configuration:find_best_configuration_entry_point\nnnUNetv2_install_pretrained_model_from_zip = nnunetv2.model_sharing.entry_points:install_from_zip_entry_point\nnnUNetv2_move_plans_between_datasets = nnunetv2.experiment_planning.plans_for_pretraining.move_plans_between_datasets:entry_point_move_plans_between_datasets\nnnUNetv2_plan_and_preprocess = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_and_preprocess_entry\nnnUNetv2_plan_experiment = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_experiment_entry\nnnUNetv2_plot_overlay_pngs = nnunetv2.utilities.overlay_plots:entry_point_generate_overlay\nnnUNetv2_predict = nnunetv2.inference.predict_from_raw_data:predict_entry_point\nnnUNetv2_predict_from_modelfolder = nnunetv2.inference.predict_from_raw_data:predict_entry_point_modelfolder\nnnUNetv2_preprocess = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:preprocess_entry\nnnUNetv2_train = nnunetv2.run.run_training:run_training_entry\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2.egg-info/requires.txt",
    "content": "torch>=2.0.0\nacvl-utils>=0.2\ndynamic-network-architectures>=0.2\ntqdm\ndicom2nifti\nscipy\nbatchgenerators>=0.25\nnumpy\nscikit-learn\nscikit-image>=0.19.3\nSimpleITK>=2.2.1\npandas\ngraphviz\ntifffile\nrequests\nnibabel\nmatplotlib\nseaborn\nimagecodecs\nyacs\n\n[dev]\nblack\nruff\npre-commit\n"
  },
  {
    "path": "Finetune/nnUNet/nnunetv2.egg-info/top_level.txt",
    "content": "nnunetv2\n"
  },
  {
    "path": "Finetune/nnUNet/pyproject.toml",
    "content": "[project]\nname = \"nnunetv2\"\nversion = \"2.2.1\"\nrequires-python = \">=3.9\"\ndescription = \"nnU-Net is a framework for out-of-the box image segmentation.\"\nreadme = \"readme.md\"\nlicense = { file = \"LICENSE\" }\nauthors = [\n    { name = \"Fabian Isensee\", email = \"f.isensee@dkfz-heidelberg.de\"},\n    { name = \"Helmholtz Imaging Applied Computer Vision Lab\" }\n]\nclassifiers = [\n    \"Development Status :: 5 - Production/Stable\",\n    \"Intended Audience :: Developers\",\n    \"Intended Audience :: Science/Research\",\n    \"Intended Audience :: Healthcare Industry\",\n    \"Programming Language :: Python :: 3\",\n    \"License :: OSI Approved :: Apache Software License\",\n    \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n    \"Topic :: Scientific/Engineering :: Image Recognition\",\n    \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n]\nkeywords = [\n    'deep learning',\n    'image segmentation',\n    'semantic segmentation',\n    'medical image analysis',\n    'medical image segmentation',\n    'nnU-Net',\n    'nnunet'\n]\ndependencies = [\n    \"torch>=2.0.0\",\n    \"acvl-utils>=0.2\",\n    \"dynamic-network-architectures>=0.2\",\n    \"tqdm\",\n    \"dicom2nifti\",\n    \"scipy\",\n    \"batchgenerators>=0.25\",\n    \"numpy\",\n    \"scikit-learn\",\n    \"scikit-image>=0.19.3\",\n    \"SimpleITK>=2.2.1\",\n    \"pandas\",\n    \"graphviz\",\n    'tifffile',\n    'requests',\n    \"nibabel\",\n    \"matplotlib\",\n    \"seaborn\",\n    \"imagecodecs\",\n    \"yacs\"\n]\n\n[project.urls]\nhomepage = \"https://github.com/MIC-DKFZ/nnUNet\"\nrepository = \"https://github.com/MIC-DKFZ/nnUNet\"\n\n[project.scripts]\nnnUNetv2_plan_and_preprocess = \"nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_and_preprocess_entry\"\nnnUNetv2_extract_fingerprint = \"nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:extract_fingerprint_entry\"\nnnUNetv2_plan_experiment = \"nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_experiment_entry\"\nnnUNetv2_preprocess = \"nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:preprocess_entry\"\nnnUNetv2_train = \"nnunetv2.run.run_training:run_training_entry\"\nnnUNetv2_predict_from_modelfolder = \"nnunetv2.inference.predict_from_raw_data:predict_entry_point_modelfolder\"\nnnUNetv2_predict = \"nnunetv2.inference.predict_from_raw_data:predict_entry_point\"\nnnUNetv2_convert_old_nnUNet_dataset = \"nnunetv2.dataset_conversion.convert_raw_dataset_from_old_nnunet_format:convert_entry_point\"\nnnUNetv2_find_best_configuration = \"nnunetv2.evaluation.find_best_configuration:find_best_configuration_entry_point\"\nnnUNetv2_determine_postprocessing = \"nnunetv2.postprocessing.remove_connected_components:entry_point_determine_postprocessing_folder\"\nnnUNetv2_apply_postprocessing = \"nnunetv2.postprocessing.remove_connected_components:entry_point_apply_postprocessing\"\nnnUNetv2_ensemble = \"nnunetv2.ensembling.ensemble:entry_point_ensemble_folders\"\nnnUNetv2_accumulate_crossval_results = \"nnunetv2.evaluation.find_best_configuration:accumulate_crossval_results_entry_point\"\nnnUNetv2_plot_overlay_pngs = \"nnunetv2.utilities.overlay_plots:entry_point_generate_overlay\"\nnnUNetv2_download_pretrained_model_by_url = \"nnunetv2.model_sharing.entry_points:download_by_url\"\nnnUNetv2_install_pretrained_model_from_zip = \"nnunetv2.model_sharing.entry_points:install_from_zip_entry_point\"\nnnUNetv2_export_model_to_zip = \"nnunetv2.model_sharing.entry_points:export_pretrained_model_entry\"\nnnUNetv2_move_plans_between_datasets = \"nnunetv2.experiment_planning.plans_for_pretraining.move_plans_between_datasets:entry_point_move_plans_between_datasets\"\nnnUNetv2_evaluate_folder = \"nnunetv2.evaluation.evaluate_predictions:evaluate_folder_entry_point\"\nnnUNetv2_evaluate_simple = \"nnunetv2.evaluation.evaluate_predictions:evaluate_simple_entry_point\"\nnnUNetv2_convert_MSD_dataset = \"nnunetv2.dataset_conversion.convert_MSD_dataset:entry_point\"\n\n[project.optional-dependencies]\ndev = [\n    \"black\",\n    \"ruff\",\n    \"pre-commit\"\n]\n\n[tool.codespell]\nskip = '.git,*.pdf,*.svg'\n#\n# ignore-words-list = ''\n"
  },
  {
    "path": "Finetune/nnUNet/setup.py",
    "content": "import setuptools\n\nif __name__ == \"__main__\":\n    setuptools.setup()\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "# VoCo\n\n<a href=\"https://arxiv.org/abs/2402.17300\"><img src='https://img.shields.io/badge/arXiv-VoCo-red' alt='Paper PDF'></a>\n<a href='https://huggingface.co/datasets/Luffy503/VoCo-10k/tree/main'><img src='https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue'></a>\n\nCode for CVPR 2024 paper, [**\"VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis\"**](https://arxiv.org/abs/2402.17300)\n\nAuthors: Linshan Wu, <a href=\"https://scholar.google.com/citations?user=PfM5gucAAAAJ&hl=en\">Jiaxin Zhuang</a>, and <a href=\"https://scholar.google.com/citations?hl=en&user=Z_t5DjwAAAAJ\">Hao Chen</a>\n\nThis work presents VoCo, a simple-yet-effective contrastive learning framework for pre-training large scale 3D medical images. Our **10k CT images pre-training** model are available. Our **160k CT images pre-training** models are available!\n\n**Our TPAMI version is available at [**Large-Scale-Medical**](https://github.com/Luffy03/Large-Scale-Medical), which provides stronger models, larger-scale datasets, various training recipes, and more downstream tasks!!!**\n\n## Abstract\nSelf-Supervised Learning (SSL) has demonstrated promising results in 3D medical image analysis. However, the lack of high-level semantics in pre-training still heavily hinders the performance of downstream tasks. We observe that 3D medical images contain relatively consistent contextual position information, i.e., consistent geometric relations between different organs, which leads to a potential way for us to learn consistent semantic representations in pre-training. In this paper, we propose a simple-yet-effective **Vo**lume **Co**ntrast (**VoCo**) framework to leverage the contextual position priors for pre-training. Specifically, we first generate a group of base crops from different regions while enforcing feature discrepancy among them, where we employ them as class assignments of different regions. Then, we randomly crop sub-volumes and predict them belonging to which class (located at which region) by contrasting their similarity to different base crops, which can be seen as predicting contextual positions of different sub-volumes. Through this pretext task, VoCo implicitly encodes the contextual position priors into model representations without the guidance of annotations, enabling us to effectively improve the performance of downstream tasks that require high-level semantics. Extensive experimental results on six downstream tasks demonstrate the superior effectiveness of VoCo.\n\n![teaser](assets/framework.png)\n\n## Usage\n### Load Pre-trained weight\n```\nimport torch\nimport argparse\nfrom monai.networks.nets import SwinUNETR\n\nparser = argparse.ArgumentParser(description=\"Swin UNETR\")\nparser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\nparser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\nparser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\nparser.add_argument(\"--feature_size\", default=48, type=int, help=\"feature size\")\nparser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\nparser.add_argument(\"--out_channels\", default=14, type=int, help=\"number of output channels\")\nparser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\nargs = parser.parse_args()\nmodel = SwinUNETR(\n        img_size=(args.roi_x, args.roi_y, args.roi_z),\n        in_channels=args.in_channels,\n        out_channels=args.out_channels,\n        feature_size=args.feature_size,\n        use_checkpoint=args.use_checkpoint,\n        use_v2=True)\nmodel_dict = torch.load(args.pretrained_checkpoint, map_location=torch.device('cpu'))\nstate_dict = model_dict\nif \"module.\" in list(state_dict.keys())[0]:\n    print(\"Tag 'module.' found in state dict - fixing!\")\n    for key in list(state_dict.keys()):\n        state_dict[key.replace(\"module.\", \"\")] = state_dict.pop(key)\nif \"swin_vit\" in list(state_dict.keys())[0]:\n    print(\"Tag 'swin_vit' found in state dict - fixing!\")\n    for key in list(state_dict.keys()):\n        state_dict[key.replace(\"swin_vit\", \"swinViT\")] = state_dict.pop(key)\nmodel.load_state_dict(state_dict, strict=False)\nprint(\"Using pretrained voco ema self-supervised Swin UNETR backbone weights !\")\n```\n### Prepare Dataset\nFirst, you need to download the pre-training dataset. The 10k dataset are all open-source and you can download yourself. Or you can download it in our [hugging face repo](https://huggingface.co/datasets/Luffy503/VoCo-10k/tree/main).\n![teaser](assets/10k.png)\nNote: 10k dataset is collected by Dr. <a href=\"https://scholar.google.com/citations?user=PfM5gucAAAAJ&hl=en\">Jiaxin Zhuang</a>\n```\n├── data\n    ├── BTCV\n    ├── TCIAcovid19\n    ├── Luna16-jx\n    ├── stoic21\n    ├── Totalsegmentator_dataset\n    ├── Flare23\n    ├── LiDC\n    └── HNSCC_convert_v1\n```\n### Pre-Training\n(1) Note that in this repo, we present the version of our 10k pre-training, thus some details may be different to our paper.\n\n(2) To accerlate the training, we use \"Persistentdataset\" to pre-cache dataset, which requires extra storage. It is important in our codes. If you don't have enough storage, you can change it back in \"utils/data_utils.py\".\n\nTo pre-train: \n```bash \nsh train.sh\n```\n### Finetune\nOur finetune codes will soon be available, or you can directly use the codes in [MONAI](https://github.com/Project-MONAI/research-contributions).\n\nMore finetune implementation are in preparation!\n## Acknowledgement\nWe thank [MONAI](https://github.com/Project-MONAI/research-contributions) for part of their codes.\n## Citation ✏️ 📄\nIf you find this repo useful for your research, please consider citing the paper as follows:\n\n```\n@article{voco,\n  title={Large-Scale 3D Medical Image Pre-training with Geometric Context Priors},\n  author={Wu, Linshan and Zhuang, Jiaxin and Chen, Hao},\n  journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},\n  year={2025},\n  publisher={IEEE}\n}\n@InProceedings{voco,\n    author    = {Wu, Linshan and Zhuang, Jiaxin and Chen, Hao},\n    title     = {VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis},\n    booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},\n    month     = {June},\n    year      = {2024},\n    pages     = {22873-22882}\n}\n```\n"
  },
  {
    "path": "jsons/HNSCC.json",
    "content": "{\"training\": [{\"image\": \"00001.nii.gz\"}, {\"image\": \"00002.nii.gz\"}, {\"image\": \"00003.nii.gz\"}, {\"image\": \"00005.nii.gz\"}, {\"image\": \"00006.nii.gz\"}, {\"image\": \"00007.nii.gz\"}, {\"image\": \"00008.nii.gz\"}, {\"image\": \"00009.nii.gz\"}, {\"image\": \"00010.nii.gz\"}, {\"image\": \"00011.nii.gz\"}, {\"image\": \"00013.nii.gz\"}, {\"image\": \"00014.nii.gz\"}, {\"image\": \"00015.nii.gz\"}, {\"image\": \"00016.nii.gz\"}, {\"image\": \"00017.nii.gz\"}, {\"image\": \"00018.nii.gz\"}, {\"image\": \"00019.nii.gz\"}, {\"image\": \"00020.nii.gz\"}, {\"image\": \"00021.nii.gz\"}, {\"image\": \"00022.nii.gz\"}, {\"image\": \"00023.nii.gz\"}, {\"image\": \"00025.nii.gz\"}, {\"image\": \"00026.nii.gz\"}, {\"image\": \"00028.nii.gz\"}, {\"image\": \"00030.nii.gz\"}, {\"image\": \"00031.nii.gz\"}, {\"image\": \"00032.nii.gz\"}, {\"image\": \"00033.nii.gz\"}, {\"image\": \"00034.nii.gz\"}, {\"image\": \"00035.nii.gz\"}, {\"image\": \"00036.nii.gz\"}, {\"image\": \"00037.nii.gz\"}, {\"image\": \"00038.nii.gz\"}, {\"image\": \"00040.nii.gz\"}, {\"image\": \"00041.nii.gz\"}, {\"image\": \"00042.nii.gz\"}, {\"image\": \"00043.nii.gz\"}, {\"image\": \"00044.nii.gz\"}, {\"image\": \"00045.nii.gz\"}, {\"image\": \"00046.nii.gz\"}, {\"image\": \"00047.nii.gz\"}, {\"image\": \"00048.nii.gz\"}, {\"image\": \"00049.nii.gz\"}, {\"image\": \"00050.nii.gz\"}, {\"image\": \"00051.nii.gz\"}, {\"image\": \"00052.nii.gz\"}, {\"image\": \"00053.nii.gz\"}, {\"image\": \"00054.nii.gz\"}, {\"image\": \"00055.nii.gz\"}, {\"image\": \"00056.nii.gz\"}, {\"image\": \"00058.nii.gz\"}, {\"image\": \"00059.nii.gz\"}, {\"image\": \"00061.nii.gz\"}, {\"image\": \"00062.nii.gz\"}, {\"image\": \"00063.nii.gz\"}, {\"image\": \"00066.nii.gz\"}, {\"image\": \"00067.nii.gz\"}, {\"image\": \"00068.nii.gz\"}, {\"image\": \"00069.nii.gz\"}, {\"image\": \"00070.nii.gz\"}, {\"image\": \"00071.nii.gz\"}, {\"image\": \"00073.nii.gz\"}, {\"image\": \"00074.nii.gz\"}, {\"image\": \"00075.nii.gz\"}, {\"image\": \"00077.nii.gz\"}, {\"image\": \"00078.nii.gz\"}, {\"image\": \"00079.nii.gz\"}, {\"image\": \"00080.nii.gz\"}, {\"image\": \"00081.nii.gz\"}, {\"image\": \"00082.nii.gz\"}, {\"image\": \"00083.nii.gz\"}, {\"image\": \"00084.nii.gz\"}, {\"image\": \"00085.nii.gz\"}, {\"image\": \"00086.nii.gz\"}, {\"image\": \"00087.nii.gz\"}, {\"image\": \"00089.nii.gz\"}, {\"image\": \"00090.nii.gz\"}, {\"image\": \"00091.nii.gz\"}, {\"image\": \"00092.nii.gz\"}, {\"image\": \"00093.nii.gz\"}, {\"image\": \"00094.nii.gz\"}, {\"image\": \"00096.nii.gz\"}, {\"image\": \"00097.nii.gz\"}, {\"image\": \"00099.nii.gz\"}, {\"image\": \"00100.nii.gz\"}, {\"image\": \"00101.nii.gz\"}, {\"image\": \"00102.nii.gz\"}, {\"image\": \"00103.nii.gz\"}, {\"image\": \"00104.nii.gz\"}, {\"image\": \"00105.nii.gz\"}, {\"image\": \"00106.nii.gz\"}, {\"image\": \"00107.nii.gz\"}, {\"image\": \"00108.nii.gz\"}, {\"image\": \"00109.nii.gz\"}, {\"image\": \"00110.nii.gz\"}, {\"image\": \"00111.nii.gz\"}, {\"image\": \"00112.nii.gz\"}, {\"image\": \"00117.nii.gz\"}, {\"image\": \"00118.nii.gz\"}, {\"image\": \"00119.nii.gz\"}, {\"image\": \"00120.nii.gz\"}, {\"image\": \"00121.nii.gz\"}, {\"image\": \"00122.nii.gz\"}, {\"image\": \"00126.nii.gz\"}, {\"image\": \"00127.nii.gz\"}, {\"image\": \"00128.nii.gz\"}, {\"image\": \"00129.nii.gz\"}, {\"image\": \"00130.nii.gz\"}, {\"image\": \"00131.nii.gz\"}, {\"image\": \"00132.nii.gz\"}, {\"image\": \"00133.nii.gz\"}, {\"image\": \"00134.nii.gz\"}, {\"image\": \"00135.nii.gz\"}, {\"image\": \"00136.nii.gz\"}, {\"image\": \"00137.nii.gz\"}, {\"image\": \"00138.nii.gz\"}, {\"image\": \"00139.nii.gz\"}, {\"image\": \"00140.nii.gz\"}, {\"image\": \"00141.nii.gz\"}, {\"image\": \"00142.nii.gz\"}, {\"image\": \"00143.nii.gz\"}, {\"image\": \"00144.nii.gz\"}, {\"image\": \"00145.nii.gz\"}, {\"image\": \"00146.nii.gz\"}, {\"image\": \"00147.nii.gz\"}, {\"image\": \"00148.nii.gz\"}, {\"image\": \"00149.nii.gz\"}, {\"image\": \"00150.nii.gz\"}, {\"image\": \"00151.nii.gz\"}, {\"image\": \"00152.nii.gz\"}, {\"image\": \"00154.nii.gz\"}, {\"image\": \"00155.nii.gz\"}, {\"image\": \"00156.nii.gz\"}, {\"image\": \"00157.nii.gz\"}, {\"image\": \"00158.nii.gz\"}, {\"image\": \"00161.nii.gz\"}, {\"image\": \"00162.nii.gz\"}, {\"image\": \"00163.nii.gz\"}, {\"image\": \"00165.nii.gz\"}, {\"image\": \"00166.nii.gz\"}, {\"image\": \"00167.nii.gz\"}, {\"image\": \"00168.nii.gz\"}, {\"image\": \"00169.nii.gz\"}, {\"image\": \"00170.nii.gz\"}, {\"image\": \"00171.nii.gz\"}, {\"image\": \"00172.nii.gz\"}, {\"image\": \"00173.nii.gz\"}, {\"image\": \"00174.nii.gz\"}, {\"image\": \"00175.nii.gz\"}, {\"image\": \"00176.nii.gz\"}, {\"image\": \"00177.nii.gz\"}, {\"image\": \"00178.nii.gz\"}, {\"image\": \"00179.nii.gz\"}, {\"image\": \"00180.nii.gz\"}, {\"image\": \"00181.nii.gz\"}, {\"image\": \"00182.nii.gz\"}, {\"image\": \"00183.nii.gz\"}, {\"image\": \"00184.nii.gz\"}, {\"image\": \"00185.nii.gz\"}, {\"image\": \"00186.nii.gz\"}, {\"image\": \"00187.nii.gz\"}, {\"image\": \"00188.nii.gz\"}, {\"image\": \"00189.nii.gz\"}, {\"image\": \"00190.nii.gz\"}, {\"image\": \"00191.nii.gz\"}, {\"image\": \"00193.nii.gz\"}, {\"image\": \"00196.nii.gz\"}, {\"image\": \"00197.nii.gz\"}, {\"image\": \"00198.nii.gz\"}, {\"image\": \"00199.nii.gz\"}, {\"image\": \"00200.nii.gz\"}, {\"image\": \"00201.nii.gz\"}, {\"image\": \"00202.nii.gz\"}, {\"image\": \"00203.nii.gz\"}, {\"image\": \"00205.nii.gz\"}, {\"image\": \"00206.nii.gz\"}, {\"image\": \"00207.nii.gz\"}, {\"image\": \"00208.nii.gz\"}, {\"image\": \"00209.nii.gz\"}, {\"image\": \"00210.nii.gz\"}, {\"image\": \"00211.nii.gz\"}, {\"image\": \"00213.nii.gz\"}, {\"image\": \"00216.nii.gz\"}, {\"image\": \"00217.nii.gz\"}, {\"image\": \"00218.nii.gz\"}, {\"image\": \"00219.nii.gz\"}, {\"image\": \"00220.nii.gz\"}, {\"image\": \"00221.nii.gz\"}, {\"image\": \"00222.nii.gz\"}, {\"image\": \"00223.nii.gz\"}, {\"image\": \"00225.nii.gz\"}, {\"image\": \"00226.nii.gz\"}, {\"image\": \"00227.nii.gz\"}, {\"image\": \"00228.nii.gz\"}, {\"image\": \"00229.nii.gz\"}, {\"image\": \"00230.nii.gz\"}, {\"image\": \"00231.nii.gz\"}, {\"image\": \"00232.nii.gz\"}, {\"image\": \"00233.nii.gz\"}, {\"image\": \"00234.nii.gz\"}, {\"image\": \"00235.nii.gz\"}, {\"image\": \"00236.nii.gz\"}, {\"image\": \"00237.nii.gz\"}, {\"image\": \"00238.nii.gz\"}, {\"image\": \"00239.nii.gz\"}, {\"image\": \"00240.nii.gz\"}, {\"image\": \"00241.nii.gz\"}, {\"image\": \"00242.nii.gz\"}, {\"image\": \"00243.nii.gz\"}, {\"image\": \"00244.nii.gz\"}, {\"image\": \"00245.nii.gz\"}, {\"image\": \"00246.nii.gz\"}, {\"image\": \"00251.nii.gz\"}, {\"image\": \"00252.nii.gz\"}, {\"image\": \"00253.nii.gz\"}, {\"image\": \"00254.nii.gz\"}, {\"image\": \"00255.nii.gz\"}, {\"image\": \"00256.nii.gz\"}, {\"image\": \"00257.nii.gz\"}, {\"image\": \"00258.nii.gz\"}, {\"image\": \"00259.nii.gz\"}, {\"image\": \"00260.nii.gz\"}, {\"image\": \"00261.nii.gz\"}, {\"image\": \"00262.nii.gz\"}, {\"image\": \"00263.nii.gz\"}, {\"image\": \"00264.nii.gz\"}, {\"image\": \"00265.nii.gz\"}, {\"image\": \"00266.nii.gz\"}, {\"image\": \"00267.nii.gz\"}, {\"image\": \"00268.nii.gz\"}, {\"image\": \"00269.nii.gz\"}, {\"image\": \"00271.nii.gz\"}, {\"image\": \"00272.nii.gz\"}, {\"image\": \"00273.nii.gz\"}, {\"image\": \"00275.nii.gz\"}, {\"image\": \"00277.nii.gz\"}, {\"image\": \"00278.nii.gz\"}, {\"image\": \"00279.nii.gz\"}, {\"image\": \"00280.nii.gz\"}, {\"image\": \"00281.nii.gz\"}, {\"image\": \"00282.nii.gz\"}, {\"image\": \"00283.nii.gz\"}, {\"image\": \"00284.nii.gz\"}, {\"image\": \"00285.nii.gz\"}, {\"image\": \"00286.nii.gz\"}, {\"image\": \"00287.nii.gz\"}, {\"image\": \"00288.nii.gz\"}, {\"image\": \"00289.nii.gz\"}, {\"image\": \"00290.nii.gz\"}, {\"image\": \"00292.nii.gz\"}, {\"image\": \"00293.nii.gz\"}, {\"image\": \"00294.nii.gz\"}, {\"image\": \"00295.nii.gz\"}, {\"image\": \"00296.nii.gz\"}, {\"image\": \"00297.nii.gz\"}, {\"image\": \"00298.nii.gz\"}, {\"image\": \"00299.nii.gz\"}, {\"image\": \"00300.nii.gz\"}, {\"image\": \"00302.nii.gz\"}, {\"image\": \"00303.nii.gz\"}, {\"image\": \"00304.nii.gz\"}, {\"image\": \"00305.nii.gz\"}, {\"image\": \"00306.nii.gz\"}, {\"image\": \"00307.nii.gz\"}, {\"image\": \"00308.nii.gz\"}, {\"image\": \"00309.nii.gz\"}, {\"image\": \"00310.nii.gz\"}, {\"image\": \"00311.nii.gz\"}, {\"image\": \"00312.nii.gz\"}, {\"image\": \"00313.nii.gz\"}, {\"image\": \"00314.nii.gz\"}, {\"image\": \"00315.nii.gz\"}, {\"image\": \"00316.nii.gz\"}, {\"image\": \"00317.nii.gz\"}, {\"image\": \"00318.nii.gz\"}, {\"image\": \"00319.nii.gz\"}, {\"image\": \"00320.nii.gz\"}, {\"image\": \"00321.nii.gz\"}, {\"image\": \"00322.nii.gz\"}, {\"image\": \"00323.nii.gz\"}, {\"image\": \"00324.nii.gz\"}, {\"image\": \"00325.nii.gz\"}, {\"image\": \"00326.nii.gz\"}, {\"image\": \"00327.nii.gz\"}, {\"image\": \"00328.nii.gz\"}, {\"image\": \"00329.nii.gz\"}, {\"image\": \"00330.nii.gz\"}, {\"image\": \"00331.nii.gz\"}, {\"image\": \"00332.nii.gz\"}, {\"image\": \"00333.nii.gz\"}, {\"image\": \"00334.nii.gz\"}, {\"image\": \"00335.nii.gz\"}, {\"image\": \"00336.nii.gz\"}, {\"image\": \"00337.nii.gz\"}, {\"image\": \"00338.nii.gz\"}, {\"image\": \"00339.nii.gz\"}, {\"image\": \"00340.nii.gz\"}, {\"image\": \"00341.nii.gz\"}, {\"image\": \"00342.nii.gz\"}, {\"image\": \"00343.nii.gz\"}, {\"image\": \"00345.nii.gz\"}, {\"image\": \"00348.nii.gz\"}, {\"image\": \"00349.nii.gz\"}, {\"image\": \"00350.nii.gz\"}, {\"image\": \"00351.nii.gz\"}, {\"image\": \"00352.nii.gz\"}, {\"image\": \"00353.nii.gz\"}, {\"image\": \"00354.nii.gz\"}, {\"image\": \"00355.nii.gz\"}, {\"image\": \"00356.nii.gz\"}, {\"image\": \"00357.nii.gz\"}, {\"image\": \"00358.nii.gz\"}, {\"image\": \"00359.nii.gz\"}, {\"image\": \"00362.nii.gz\"}, {\"image\": \"00363.nii.gz\"}, {\"image\": \"00364.nii.gz\"}, {\"image\": \"00365.nii.gz\"}, {\"image\": \"00366.nii.gz\"}, {\"image\": \"00367.nii.gz\"}, {\"image\": \"00368.nii.gz\"}, {\"image\": \"00369.nii.gz\"}, {\"image\": \"00370.nii.gz\"}, {\"image\": \"00371.nii.gz\"}, {\"image\": \"00372.nii.gz\"}, {\"image\": \"00373.nii.gz\"}, {\"image\": \"00374.nii.gz\"}, {\"image\": \"00375.nii.gz\"}, {\"image\": \"00376.nii.gz\"}, {\"image\": \"00377.nii.gz\"}, {\"image\": \"00378.nii.gz\"}, {\"image\": \"00379.nii.gz\"}, {\"image\": \"00380.nii.gz\"}, {\"image\": \"00381.nii.gz\"}, {\"image\": \"00382.nii.gz\"}, {\"image\": \"00383.nii.gz\"}, {\"image\": \"00384.nii.gz\"}, {\"image\": \"00385.nii.gz\"}, {\"image\": \"00386.nii.gz\"}, {\"image\": \"00387.nii.gz\"}, {\"image\": \"00389.nii.gz\"}, {\"image\": \"00390.nii.gz\"}, {\"image\": \"00391.nii.gz\"}, {\"image\": \"00392.nii.gz\"}, {\"image\": \"00393.nii.gz\"}, {\"image\": \"00394.nii.gz\"}, {\"image\": \"00395.nii.gz\"}, {\"image\": \"00396.nii.gz\"}, {\"image\": \"00397.nii.gz\"}, {\"image\": \"00398.nii.gz\"}, {\"image\": \"00399.nii.gz\"}, {\"image\": \"00400.nii.gz\"}, {\"image\": \"00401.nii.gz\"}, {\"image\": \"00402.nii.gz\"}, {\"image\": \"00403.nii.gz\"}, {\"image\": \"00404.nii.gz\"}, {\"image\": \"00405.nii.gz\"}, {\"image\": \"00406.nii.gz\"}, {\"image\": \"00407.nii.gz\"}, {\"image\": \"00408.nii.gz\"}, {\"image\": \"00409.nii.gz\"}, {\"image\": \"00410.nii.gz\"}, {\"image\": \"00411.nii.gz\"}, {\"image\": \"00412.nii.gz\"}, {\"image\": \"00413.nii.gz\"}, {\"image\": \"00414.nii.gz\"}, {\"image\": \"00415.nii.gz\"}, {\"image\": \"00416.nii.gz\"}, {\"image\": \"00419.nii.gz\"}, {\"image\": \"00421.nii.gz\"}, {\"image\": \"00422.nii.gz\"}, {\"image\": \"00423.nii.gz\"}, {\"image\": \"00424.nii.gz\"}, {\"image\": \"00425.nii.gz\"}, {\"image\": \"00426.nii.gz\"}, {\"image\": \"00427.nii.gz\"}, {\"image\": \"00428.nii.gz\"}, {\"image\": \"00429.nii.gz\"}, {\"image\": \"00430.nii.gz\"}, {\"image\": \"00431.nii.gz\"}, {\"image\": \"00432.nii.gz\"}, {\"image\": \"00433.nii.gz\"}, {\"image\": \"00435.nii.gz\"}, {\"image\": \"00436.nii.gz\"}, {\"image\": \"00437.nii.gz\"}, {\"image\": \"00438.nii.gz\"}, {\"image\": \"00439.nii.gz\"}, {\"image\": \"00440.nii.gz\"}, {\"image\": \"00441.nii.gz\"}, {\"image\": \"00442.nii.gz\"}, {\"image\": \"00443.nii.gz\"}, {\"image\": \"00444.nii.gz\"}, {\"image\": \"00445.nii.gz\"}, {\"image\": \"00446.nii.gz\"}, {\"image\": \"00447.nii.gz\"}, {\"image\": \"00448.nii.gz\"}, {\"image\": \"00449.nii.gz\"}, {\"image\": \"00450.nii.gz\"}, {\"image\": \"00451.nii.gz\"}, {\"image\": \"00452.nii.gz\"}, {\"image\": \"00454.nii.gz\"}, {\"image\": \"00455.nii.gz\"}, {\"image\": \"00456.nii.gz\"}, {\"image\": \"00457.nii.gz\"}, {\"image\": \"00458.nii.gz\"}, {\"image\": \"00459.nii.gz\"}, {\"image\": \"00460.nii.gz\"}, {\"image\": \"00461.nii.gz\"}, {\"image\": \"00462.nii.gz\"}, {\"image\": \"00463.nii.gz\"}, {\"image\": \"00464.nii.gz\"}, {\"image\": \"00465.nii.gz\"}, {\"image\": \"00466.nii.gz\"}, {\"image\": \"00467.nii.gz\"}, {\"image\": \"00468.nii.gz\"}, {\"image\": \"00469.nii.gz\"}, {\"image\": \"00473.nii.gz\"}, {\"image\": \"00474.nii.gz\"}, {\"image\": \"00476.nii.gz\"}, {\"image\": \"00477.nii.gz\"}, {\"image\": \"00478.nii.gz\"}, {\"image\": \"00479.nii.gz\"}, {\"image\": \"00480.nii.gz\"}, {\"image\": \"00481.nii.gz\"}, {\"image\": \"00483.nii.gz\"}, {\"image\": \"00484.nii.gz\"}, {\"image\": \"00485.nii.gz\"}, {\"image\": \"00486.nii.gz\"}, {\"image\": \"00487.nii.gz\"}, {\"image\": \"00488.nii.gz\"}, {\"image\": \"00489.nii.gz\"}, {\"image\": \"00490.nii.gz\"}, {\"image\": \"00491.nii.gz\"}, {\"image\": \"00492.nii.gz\"}, {\"image\": \"00494.nii.gz\"}, {\"image\": \"00497.nii.gz\"}, {\"image\": \"00498.nii.gz\"}, {\"image\": \"00499.nii.gz\"}, {\"image\": \"00500.nii.gz\"}, {\"image\": \"00501.nii.gz\"}, {\"image\": \"00502.nii.gz\"}, {\"image\": \"00504.nii.gz\"}, {\"image\": \"00505.nii.gz\"}, {\"image\": \"00506.nii.gz\"}, {\"image\": \"00507.nii.gz\"}, {\"image\": \"00508.nii.gz\"}, {\"image\": \"00509.nii.gz\"}, {\"image\": \"00510.nii.gz\"}, {\"image\": \"00511.nii.gz\"}, {\"image\": \"00512.nii.gz\"}, {\"image\": \"00513.nii.gz\"}, {\"image\": \"00514.nii.gz\"}, {\"image\": \"00515.nii.gz\"}, {\"image\": \"00516.nii.gz\"}, {\"image\": \"00518.nii.gz\"}, {\"image\": \"00519.nii.gz\"}, {\"image\": \"00520.nii.gz\"}, {\"image\": \"00521.nii.gz\"}, {\"image\": \"00522.nii.gz\"}, {\"image\": \"00523.nii.gz\"}, {\"image\": \"00524.nii.gz\"}, {\"image\": \"00525.nii.gz\"}, {\"image\": \"00526.nii.gz\"}, {\"image\": \"00527.nii.gz\"}, {\"image\": \"00528.nii.gz\"}, {\"image\": \"00529.nii.gz\"}, {\"image\": \"00530.nii.gz\"}, {\"image\": \"00531.nii.gz\"}, {\"image\": \"00532.nii.gz\"}, {\"image\": \"00533.nii.gz\"}, {\"image\": \"00534.nii.gz\"}, {\"image\": \"00535.nii.gz\"}, {\"image\": \"00536.nii.gz\"}, {\"image\": \"00537.nii.gz\"}, {\"image\": \"00538.nii.gz\"}, {\"image\": \"00539.nii.gz\"}, {\"image\": \"00540.nii.gz\"}, {\"image\": \"00541.nii.gz\"}, {\"image\": \"00542.nii.gz\"}, {\"image\": \"00543.nii.gz\"}, {\"image\": \"00544.nii.gz\"}, {\"image\": \"00545.nii.gz\"}, {\"image\": \"00546.nii.gz\"}, {\"image\": \"00547.nii.gz\"}, {\"image\": \"00548.nii.gz\"}, {\"image\": \"00549.nii.gz\"}, {\"image\": \"00550.nii.gz\"}, {\"image\": \"00551.nii.gz\"}, {\"image\": \"00552.nii.gz\"}, {\"image\": \"00554.nii.gz\"}, {\"image\": \"00555.nii.gz\"}, {\"image\": \"00556.nii.gz\"}, {\"image\": \"00557.nii.gz\"}, {\"image\": \"00558.nii.gz\"}, {\"image\": \"00559.nii.gz\"}, {\"image\": \"00560.nii.gz\"}, {\"image\": \"00561.nii.gz\"}, {\"image\": \"00562.nii.gz\"}, {\"image\": \"00564.nii.gz\"}, {\"image\": \"00566.nii.gz\"}, {\"image\": \"00567.nii.gz\"}, {\"image\": \"00568.nii.gz\"}, {\"image\": \"00569.nii.gz\"}, {\"image\": \"00570.nii.gz\"}, {\"image\": \"00571.nii.gz\"}, {\"image\": \"00572.nii.gz\"}, {\"image\": \"00573.nii.gz\"}, {\"image\": \"00574.nii.gz\"}, {\"image\": \"00576.nii.gz\"}, {\"image\": \"00577.nii.gz\"}, {\"image\": \"00578.nii.gz\"}, {\"image\": \"00579.nii.gz\"}, {\"image\": \"00580.nii.gz\"}, {\"image\": \"00581.nii.gz\"}, {\"image\": \"00582.nii.gz\"}, {\"image\": \"00583.nii.gz\"}, {\"image\": \"00584.nii.gz\"}, {\"image\": \"00585.nii.gz\"}, {\"image\": \"00586.nii.gz\"}, {\"image\": \"00587.nii.gz\"}, {\"image\": \"00588.nii.gz\"}, {\"image\": \"00589.nii.gz\"}, {\"image\": \"00590.nii.gz\"}, {\"image\": \"00591.nii.gz\"}, {\"image\": \"00592.nii.gz\"}, {\"image\": \"00593.nii.gz\"}, {\"image\": \"00594.nii.gz\"}, {\"image\": \"00595.nii.gz\"}, {\"image\": \"00596.nii.gz\"}, {\"image\": \"00597.nii.gz\"}, {\"image\": \"00598.nii.gz\"}, {\"image\": \"00599.nii.gz\"}, {\"image\": \"00600.nii.gz\"}, {\"image\": \"00601.nii.gz\"}, {\"image\": \"00602.nii.gz\"}, {\"image\": \"00603.nii.gz\"}, {\"image\": \"00604.nii.gz\"}, {\"image\": \"00606.nii.gz\"}, {\"image\": \"00607.nii.gz\"}, {\"image\": \"00608.nii.gz\"}, {\"image\": \"00610.nii.gz\"}, {\"image\": \"00611.nii.gz\"}, {\"image\": \"00612.nii.gz\"}, {\"image\": \"00613.nii.gz\"}, {\"image\": \"00614.nii.gz\"}, {\"image\": \"00615.nii.gz\"}, {\"image\": \"00616.nii.gz\"}, {\"image\": \"00617.nii.gz\"}, {\"image\": \"00618.nii.gz\"}, {\"image\": \"00619.nii.gz\"}, {\"image\": \"00620.nii.gz\"}, {\"image\": \"00621.nii.gz\"}, {\"image\": \"00622.nii.gz\"}, {\"image\": \"00626.nii.gz\"}, {\"image\": \"00628.nii.gz\"}, {\"image\": \"00629.nii.gz\"}, {\"image\": \"00630.nii.gz\"}, {\"image\": \"00631.nii.gz\"}, {\"image\": \"00632.nii.gz\"}, {\"image\": \"00633.nii.gz\"}, {\"image\": \"00634.nii.gz\"}, {\"image\": \"00635.nii.gz\"}, {\"image\": \"00636.nii.gz\"}, {\"image\": \"00637.nii.gz\"}, {\"image\": \"00638.nii.gz\"}, {\"image\": \"00639.nii.gz\"}, {\"image\": \"00640.nii.gz\"}, {\"image\": \"00641.nii.gz\"}, {\"image\": \"00643.nii.gz\"}, {\"image\": \"00644.nii.gz\"}, {\"image\": \"00645.nii.gz\"}, {\"image\": \"00646.nii.gz\"}, {\"image\": \"00647.nii.gz\"}, {\"image\": \"00648.nii.gz\"}, {\"image\": \"00649.nii.gz\"}, {\"image\": \"00650.nii.gz\"}, {\"image\": \"00651.nii.gz\"}, {\"image\": \"00653.nii.gz\"}, {\"image\": \"00654.nii.gz\"}, {\"image\": \"00655.nii.gz\"}, {\"image\": \"00656.nii.gz\"}, {\"image\": \"00657.nii.gz\"}, {\"image\": \"00659.nii.gz\"}, {\"image\": \"00660.nii.gz\"}, {\"image\": \"00661.nii.gz\"}, {\"image\": \"00662.nii.gz\"}, {\"image\": \"00663.nii.gz\"}, {\"image\": \"00664.nii.gz\"}, {\"image\": \"00665.nii.gz\"}, {\"image\": \"00666.nii.gz\"}, {\"image\": \"00667.nii.gz\"}, {\"image\": \"00668.nii.gz\"}, {\"image\": \"00669.nii.gz\"}, {\"image\": \"00670.nii.gz\"}, {\"image\": \"00671.nii.gz\"}, {\"image\": \"00673.nii.gz\"}, {\"image\": \"00674.nii.gz\"}, {\"image\": \"00675.nii.gz\"}, {\"image\": \"00676.nii.gz\"}, {\"image\": \"00677.nii.gz\"}, {\"image\": \"00678.nii.gz\"}, {\"image\": \"00679.nii.gz\"}, {\"image\": \"00681.nii.gz\"}, {\"image\": \"00683.nii.gz\"}, {\"image\": \"00684.nii.gz\"}, {\"image\": \"00685.nii.gz\"}, {\"image\": \"00686.nii.gz\"}, {\"image\": \"00687.nii.gz\"}, {\"image\": \"00688.nii.gz\"}, {\"image\": \"00689.nii.gz\"}, {\"image\": \"00690.nii.gz\"}, {\"image\": \"00692.nii.gz\"}, {\"image\": \"00693.nii.gz\"}, {\"image\": \"00694.nii.gz\"}, {\"image\": \"00695.nii.gz\"}, {\"image\": \"00696.nii.gz\"}, {\"image\": \"00697.nii.gz\"}, {\"image\": \"00698.nii.gz\"}, {\"image\": \"00699.nii.gz\"}, {\"image\": \"00700.nii.gz\"}, {\"image\": \"00701.nii.gz\"}, {\"image\": \"00702.nii.gz\"}, {\"image\": \"00703.nii.gz\"}, {\"image\": \"00704.nii.gz\"}, {\"image\": \"00705.nii.gz\"}, {\"image\": \"00706.nii.gz\"}, {\"image\": \"00707.nii.gz\"}, {\"image\": \"00708.nii.gz\"}, {\"image\": \"00709.nii.gz\"}, {\"image\": \"00710.nii.gz\"}, {\"image\": \"00711.nii.gz\"}, {\"image\": \"00712.nii.gz\"}, {\"image\": \"00713.nii.gz\"}, {\"image\": \"00714.nii.gz\"}, {\"image\": \"00716.nii.gz\"}, {\"image\": \"00717.nii.gz\"}, {\"image\": \"00718.nii.gz\"}, {\"image\": \"00719.nii.gz\"}, {\"image\": \"00720.nii.gz\"}, {\"image\": \"00722.nii.gz\"}, {\"image\": \"00723.nii.gz\"}, {\"image\": \"00724.nii.gz\"}, {\"image\": \"00725.nii.gz\"}, {\"image\": \"00726.nii.gz\"}, {\"image\": \"00727.nii.gz\"}, {\"image\": \"00728.nii.gz\"}, {\"image\": \"00731.nii.gz\"}, {\"image\": \"00732.nii.gz\"}, {\"image\": \"00733.nii.gz\"}, {\"image\": \"00734.nii.gz\"}, {\"image\": \"00735.nii.gz\"}, {\"image\": \"00736.nii.gz\"}, {\"image\": \"00737.nii.gz\"}, {\"image\": \"00738.nii.gz\"}, {\"image\": \"00739.nii.gz\"}, {\"image\": \"00741.nii.gz\"}, {\"image\": \"00743.nii.gz\"}, {\"image\": \"00744.nii.gz\"}, {\"image\": \"00745.nii.gz\"}, {\"image\": \"00746.nii.gz\"}, {\"image\": \"00747.nii.gz\"}, {\"image\": \"00748.nii.gz\"}, {\"image\": \"00749.nii.gz\"}, {\"image\": \"00750.nii.gz\"}, {\"image\": \"00751.nii.gz\"}, {\"image\": \"00752.nii.gz\"}, {\"image\": \"00755.nii.gz\"}, {\"image\": \"00757.nii.gz\"}, {\"image\": \"00758.nii.gz\"}, {\"image\": \"00760.nii.gz\"}, {\"image\": \"00762.nii.gz\"}, {\"image\": \"00763.nii.gz\"}, {\"image\": \"00764.nii.gz\"}, {\"image\": \"00767.nii.gz\"}, {\"image\": \"00768.nii.gz\"}, {\"image\": \"00769.nii.gz\"}, {\"image\": \"00770.nii.gz\"}, {\"image\": \"00771.nii.gz\"}, {\"image\": \"00772.nii.gz\"}, {\"image\": \"00774.nii.gz\"}, {\"image\": \"00775.nii.gz\"}, {\"image\": \"00777.nii.gz\"}, {\"image\": \"00779.nii.gz\"}, {\"image\": \"00780.nii.gz\"}, {\"image\": \"00781.nii.gz\"}, {\"image\": \"00782.nii.gz\"}, {\"image\": \"00783.nii.gz\"}, {\"image\": \"00784.nii.gz\"}, {\"image\": \"00786.nii.gz\"}, {\"image\": \"00787.nii.gz\"}, {\"image\": \"00788.nii.gz\"}, {\"image\": \"00789.nii.gz\"}, {\"image\": \"00790.nii.gz\"}, {\"image\": \"00791.nii.gz\"}, {\"image\": \"00792.nii.gz\"}, {\"image\": \"00793.nii.gz\"}, {\"image\": \"00794.nii.gz\"}, {\"image\": \"00795.nii.gz\"}, {\"image\": \"00796.nii.gz\"}, {\"image\": \"00797.nii.gz\"}, {\"image\": \"00798.nii.gz\"}, {\"image\": \"00799.nii.gz\"}, {\"image\": \"00800.nii.gz\"}, {\"image\": \"00801.nii.gz\"}, {\"image\": \"00804.nii.gz\"}, {\"image\": \"00805.nii.gz\"}, {\"image\": \"00807.nii.gz\"}, {\"image\": \"00808.nii.gz\"}, {\"image\": \"00809.nii.gz\"}, {\"image\": \"00811.nii.gz\"}, {\"image\": \"00813.nii.gz\"}, {\"image\": \"00814.nii.gz\"}, {\"image\": \"00815.nii.gz\"}, {\"image\": \"00816.nii.gz\"}, {\"image\": \"00817.nii.gz\"}, {\"image\": \"00818.nii.gz\"}, {\"image\": \"00819.nii.gz\"}, {\"image\": \"00820.nii.gz\"}, {\"image\": \"00821.nii.gz\"}, {\"image\": \"00822.nii.gz\"}, {\"image\": \"00828.nii.gz\"}, {\"image\": \"00829.nii.gz\"}, {\"image\": \"00830.nii.gz\"}, {\"image\": \"00831.nii.gz\"}, {\"image\": \"00832.nii.gz\"}, {\"image\": \"00833.nii.gz\"}, {\"image\": \"00835.nii.gz\"}, {\"image\": \"00838.nii.gz\"}, {\"image\": \"00839.nii.gz\"}, {\"image\": \"00840.nii.gz\"}, {\"image\": \"00841.nii.gz\"}, {\"image\": \"00842.nii.gz\"}, {\"image\": \"00844.nii.gz\"}, {\"image\": \"00845.nii.gz\"}, {\"image\": \"00846.nii.gz\"}, {\"image\": \"00848.nii.gz\"}, {\"image\": \"00849.nii.gz\"}, {\"image\": \"00850.nii.gz\"}, {\"image\": \"00851.nii.gz\"}, {\"image\": \"00852.nii.gz\"}, {\"image\": \"00853.nii.gz\"}, {\"image\": \"00854.nii.gz\"}, {\"image\": \"00855.nii.gz\"}, {\"image\": \"00857.nii.gz\"}, {\"image\": \"00858.nii.gz\"}, {\"image\": \"00859.nii.gz\"}, {\"image\": \"00860.nii.gz\"}, {\"image\": \"00861.nii.gz\"}, {\"image\": \"00862.nii.gz\"}, {\"image\": \"00863.nii.gz\"}, {\"image\": \"00864.nii.gz\"}, {\"image\": \"00865.nii.gz\"}, {\"image\": \"00867.nii.gz\"}, {\"image\": \"00868.nii.gz\"}, {\"image\": \"00869.nii.gz\"}, {\"image\": \"00870.nii.gz\"}, {\"image\": \"00871.nii.gz\"}, {\"image\": \"00872.nii.gz\"}, {\"image\": \"00875.nii.gz\"}, {\"image\": \"00876.nii.gz\"}, {\"image\": \"00877.nii.gz\"}, {\"image\": \"00879.nii.gz\"}, {\"image\": \"00880.nii.gz\"}, {\"image\": \"00881.nii.gz\"}, {\"image\": \"00882.nii.gz\"}, {\"image\": \"00883.nii.gz\"}, {\"image\": \"00884.nii.gz\"}, {\"image\": \"00885.nii.gz\"}, {\"image\": \"00886.nii.gz\"}, {\"image\": \"00887.nii.gz\"}, {\"image\": \"00888.nii.gz\"}, {\"image\": \"00889.nii.gz\"}, {\"image\": \"00890.nii.gz\"}, {\"image\": \"00891.nii.gz\"}, {\"image\": \"00892.nii.gz\"}, {\"image\": \"00893.nii.gz\"}, {\"image\": \"00894.nii.gz\"}, {\"image\": \"00895.nii.gz\"}, {\"image\": \"00896.nii.gz\"}, {\"image\": \"00897.nii.gz\"}, {\"image\": \"00898.nii.gz\"}, {\"image\": \"00899.nii.gz\"}, {\"image\": \"00900.nii.gz\"}, {\"image\": \"00901.nii.gz\"}, {\"image\": \"00902.nii.gz\"}, {\"image\": \"00903.nii.gz\"}, {\"image\": \"00905.nii.gz\"}, {\"image\": \"00906.nii.gz\"}, {\"image\": \"00907.nii.gz\"}, {\"image\": \"00908.nii.gz\"}, {\"image\": \"00909.nii.gz\"}, {\"image\": \"00910.nii.gz\"}, {\"image\": \"00911.nii.gz\"}, {\"image\": \"00913.nii.gz\"}, {\"image\": \"00914.nii.gz\"}, {\"image\": \"00916.nii.gz\"}, {\"image\": \"00917.nii.gz\"}, {\"image\": \"00918.nii.gz\"}, {\"image\": \"00919.nii.gz\"}, {\"image\": \"00920.nii.gz\"}, {\"image\": \"00921.nii.gz\"}, {\"image\": \"00922.nii.gz\"}, {\"image\": \"00923.nii.gz\"}, {\"image\": \"00924.nii.gz\"}, {\"image\": \"00925.nii.gz\"}, {\"image\": \"00926.nii.gz\"}, {\"image\": \"00927.nii.gz\"}, {\"image\": \"00928.nii.gz\"}, {\"image\": \"00929.nii.gz\"}, {\"image\": \"00930.nii.gz\"}, {\"image\": \"00931.nii.gz\"}, {\"image\": \"00932.nii.gz\"}, {\"image\": \"00933.nii.gz\"}, {\"image\": \"00934.nii.gz\"}, {\"image\": \"00935.nii.gz\"}, {\"image\": \"00936.nii.gz\"}, {\"image\": \"00937.nii.gz\"}, {\"image\": \"00938.nii.gz\"}, {\"image\": \"00939.nii.gz\"}, {\"image\": \"00940.nii.gz\"}, {\"image\": \"00941.nii.gz\"}, {\"image\": \"00942.nii.gz\"}, {\"image\": \"00943.nii.gz\"}, {\"image\": \"00944.nii.gz\"}, {\"image\": \"00945.nii.gz\"}, {\"image\": \"00946.nii.gz\"}, {\"image\": \"00947.nii.gz\"}, {\"image\": \"00948.nii.gz\"}, {\"image\": \"00949.nii.gz\"}, {\"image\": \"00950.nii.gz\"}, {\"image\": \"00951.nii.gz\"}, {\"image\": \"00952.nii.gz\"}, {\"image\": \"00954.nii.gz\"}, {\"image\": \"00955.nii.gz\"}, {\"image\": \"00956.nii.gz\"}, {\"image\": \"00957.nii.gz\"}, {\"image\": \"00958.nii.gz\"}, {\"image\": \"00959.nii.gz\"}, {\"image\": \"00960.nii.gz\"}, {\"image\": \"00961.nii.gz\"}, {\"image\": \"00962.nii.gz\"}, {\"image\": \"00963.nii.gz\"}, {\"image\": \"00964.nii.gz\"}, {\"image\": \"00965.nii.gz\"}, {\"image\": \"00966.nii.gz\"}, {\"image\": \"00967.nii.gz\"}, {\"image\": \"00968.nii.gz\"}, {\"image\": \"00969.nii.gz\"}, {\"image\": \"00970.nii.gz\"}, {\"image\": \"00971.nii.gz\"}, {\"image\": \"00972.nii.gz\"}, {\"image\": \"00973.nii.gz\"}, {\"image\": \"00974.nii.gz\"}, {\"image\": \"00975.nii.gz\"}, {\"image\": \"00976.nii.gz\"}, {\"image\": \"00977.nii.gz\"}, {\"image\": \"00978.nii.gz\"}, {\"image\": \"00979.nii.gz\"}, {\"image\": \"00981.nii.gz\"}, {\"image\": \"00982.nii.gz\"}, {\"image\": \"00983.nii.gz\"}, {\"image\": \"00984.nii.gz\"}, {\"image\": \"00985.nii.gz\"}, {\"image\": \"00986.nii.gz\"}, {\"image\": \"00987.nii.gz\"}, {\"image\": \"00988.nii.gz\"}, {\"image\": \"00989.nii.gz\"}, {\"image\": \"00990.nii.gz\"}, {\"image\": \"00991.nii.gz\"}, {\"image\": \"00992.nii.gz\"}, {\"image\": \"00993.nii.gz\"}, {\"image\": \"00994.nii.gz\"}, {\"image\": \"00995.nii.gz\"}, {\"image\": \"00997.nii.gz\"}, {\"image\": \"00998.nii.gz\"}, {\"image\": \"00999.nii.gz\"}, {\"image\": \"01000.nii.gz\"}, {\"image\": \"01001.nii.gz\"}, {\"image\": \"01002.nii.gz\"}, {\"image\": \"01003.nii.gz\"}, {\"image\": \"01004.nii.gz\"}, {\"image\": \"01005.nii.gz\"}, {\"image\": \"01006.nii.gz\"}, {\"image\": \"01007.nii.gz\"}, {\"image\": \"01008.nii.gz\"}, {\"image\": \"01009.nii.gz\"}, {\"image\": \"01010.nii.gz\"}, {\"image\": \"01011.nii.gz\"}, {\"image\": \"01012.nii.gz\"}, {\"image\": \"01013.nii.gz\"}, {\"image\": \"01014.nii.gz\"}, {\"image\": \"01015.nii.gz\"}, {\"image\": \"01016.nii.gz\"}, {\"image\": \"01017.nii.gz\"}, {\"image\": \"01019.nii.gz\"}, {\"image\": \"01020.nii.gz\"}, {\"image\": \"01021.nii.gz\"}, {\"image\": \"01022.nii.gz\"}, {\"image\": \"01023.nii.gz\"}, {\"image\": \"01024.nii.gz\"}, {\"image\": \"01025.nii.gz\"}, {\"image\": \"01026.nii.gz\"}, {\"image\": \"01027.nii.gz\"}, {\"image\": \"01028.nii.gz\"}, {\"image\": \"01030.nii.gz\"}, {\"image\": \"01031.nii.gz\"}, {\"image\": \"01032.nii.gz\"}, {\"image\": \"01033.nii.gz\"}, {\"image\": \"01034.nii.gz\"}, {\"image\": \"01035.nii.gz\"}, {\"image\": \"01036.nii.gz\"}, {\"image\": \"01037.nii.gz\"}, {\"image\": \"01038.nii.gz\"}, {\"image\": \"01040.nii.gz\"}, {\"image\": \"01041.nii.gz\"}, {\"image\": \"01042.nii.gz\"}, {\"image\": \"01043.nii.gz\"}, {\"image\": \"01044.nii.gz\"}, {\"image\": \"01045.nii.gz\"}, {\"image\": \"01046.nii.gz\"}, {\"image\": \"01047.nii.gz\"}, {\"image\": \"01048.nii.gz\"}, {\"image\": \"01049.nii.gz\"}, {\"image\": \"01051.nii.gz\"}, {\"image\": \"01052.nii.gz\"}, {\"image\": \"01054.nii.gz\"}, {\"image\": \"01055.nii.gz\"}, {\"image\": \"01056.nii.gz\"}, {\"image\": \"01057.nii.gz\"}, {\"image\": \"01058.nii.gz\"}, {\"image\": \"01059.nii.gz\"}, {\"image\": \"01060.nii.gz\"}, {\"image\": \"01061.nii.gz\"}, {\"image\": \"01062.nii.gz\"}, {\"image\": \"01063.nii.gz\"}, {\"image\": \"01064.nii.gz\"}, {\"image\": \"01065.nii.gz\"}, {\"image\": \"01066.nii.gz\"}, {\"image\": \"01067.nii.gz\"}, {\"image\": \"01068.nii.gz\"}, {\"image\": \"01069.nii.gz\"}, {\"image\": \"01070.nii.gz\"}, {\"image\": \"01071.nii.gz\"}, {\"image\": \"01072.nii.gz\"}, {\"image\": \"01073.nii.gz\"}, {\"image\": \"01075.nii.gz\"}, {\"image\": \"01077.nii.gz\"}, {\"image\": \"01078.nii.gz\"}, {\"image\": \"01080.nii.gz\"}, {\"image\": \"01081.nii.gz\"}, {\"image\": \"01082.nii.gz\"}, {\"image\": \"01083.nii.gz\"}, {\"image\": \"01084.nii.gz\"}, {\"image\": \"01086.nii.gz\"}, {\"image\": \"01087.nii.gz\"}, {\"image\": \"01088.nii.gz\"}, {\"image\": \"01090.nii.gz\"}, {\"image\": \"01091.nii.gz\"}, {\"image\": \"01092.nii.gz\"}, {\"image\": \"01093.nii.gz\"}, {\"image\": \"01094.nii.gz\"}, {\"image\": \"01095.nii.gz\"}, {\"image\": \"01096.nii.gz\"}, {\"image\": \"01097.nii.gz\"}, {\"image\": \"01098.nii.gz\"}, {\"image\": \"01099.nii.gz\"}, {\"image\": \"01100.nii.gz\"}, {\"image\": \"01101.nii.gz\"}, {\"image\": \"01102.nii.gz\"}, {\"image\": \"01103.nii.gz\"}, {\"image\": \"01105.nii.gz\"}, {\"image\": \"01106.nii.gz\"}, {\"image\": \"01107.nii.gz\"}, {\"image\": \"01108.nii.gz\"}, {\"image\": \"01109.nii.gz\"}, {\"image\": \"01110.nii.gz\"}, {\"image\": \"01111.nii.gz\"}, {\"image\": \"01112.nii.gz\"}, {\"image\": \"01113.nii.gz\"}, {\"image\": \"01114.nii.gz\"}, {\"image\": \"01115.nii.gz\"}, {\"image\": \"01116.nii.gz\"}, {\"image\": \"01117.nii.gz\"}, {\"image\": \"01118.nii.gz\"}, {\"image\": \"01119.nii.gz\"}, {\"image\": \"01120.nii.gz\"}, {\"image\": \"01121.nii.gz\"}, {\"image\": \"01122.nii.gz\"}, {\"image\": \"01123.nii.gz\"}, {\"image\": \"01124.nii.gz\"}, {\"image\": \"01125.nii.gz\"}, {\"image\": \"01127.nii.gz\"}, {\"image\": \"01128.nii.gz\"}, {\"image\": \"01129.nii.gz\"}, {\"image\": \"01130.nii.gz\"}, {\"image\": \"01131.nii.gz\"}, {\"image\": \"01132.nii.gz\"}, {\"image\": \"01133.nii.gz\"}, {\"image\": \"01134.nii.gz\"}, {\"image\": \"01135.nii.gz\"}, {\"image\": \"01136.nii.gz\"}, {\"image\": \"01137.nii.gz\"}, {\"image\": \"01138.nii.gz\"}, {\"image\": \"01139.nii.gz\"}, {\"image\": \"01140.nii.gz\"}, {\"image\": \"01141.nii.gz\"}, {\"image\": \"01142.nii.gz\"}, {\"image\": \"01143.nii.gz\"}, {\"image\": \"01144.nii.gz\"}, {\"image\": \"01146.nii.gz\"}, {\"image\": \"01147.nii.gz\"}, {\"image\": \"01148.nii.gz\"}, {\"image\": \"01149.nii.gz\"}, {\"image\": \"01150.nii.gz\"}, {\"image\": \"01151.nii.gz\"}, {\"image\": \"01152.nii.gz\"}, {\"image\": \"01153.nii.gz\"}, {\"image\": \"01155.nii.gz\"}, {\"image\": \"01156.nii.gz\"}, {\"image\": \"01157.nii.gz\"}, {\"image\": \"01158.nii.gz\"}, {\"image\": \"01159.nii.gz\"}, {\"image\": \"01161.nii.gz\"}, {\"image\": \"01162.nii.gz\"}, {\"image\": \"01163.nii.gz\"}, {\"image\": \"01164.nii.gz\"}, {\"image\": \"01166.nii.gz\"}, {\"image\": \"01167.nii.gz\"}, {\"image\": \"01170.nii.gz\"}, {\"image\": \"01171.nii.gz\"}, {\"image\": \"01173.nii.gz\"}, {\"image\": \"01175.nii.gz\"}, {\"image\": \"01176.nii.gz\"}, {\"image\": \"01177.nii.gz\"}, {\"image\": \"01178.nii.gz\"}, {\"image\": \"01179.nii.gz\"}, {\"image\": \"01180.nii.gz\"}, {\"image\": \"01181.nii.gz\"}, {\"image\": \"01182.nii.gz\"}, {\"image\": \"01183.nii.gz\"}, {\"image\": \"01184.nii.gz\"}, {\"image\": \"01186.nii.gz\"}, {\"image\": \"01187.nii.gz\"}, {\"image\": \"01188.nii.gz\"}, {\"image\": \"01189.nii.gz\"}, {\"image\": \"01190.nii.gz\"}, {\"image\": \"01192.nii.gz\"}, {\"image\": \"01193.nii.gz\"}, {\"image\": \"01194.nii.gz\"}, {\"image\": \"01195.nii.gz\"}, {\"image\": \"01196.nii.gz\"}, {\"image\": \"01197.nii.gz\"}, {\"image\": \"01198.nii.gz\"}, {\"image\": \"01201.nii.gz\"}, {\"image\": \"01202.nii.gz\"}, {\"image\": \"01203.nii.gz\"}, {\"image\": \"01204.nii.gz\"}, {\"image\": \"01206.nii.gz\"}, {\"image\": \"01207.nii.gz\"}, {\"image\": \"01208.nii.gz\"}, {\"image\": \"01209.nii.gz\"}, {\"image\": \"01210.nii.gz\"}, {\"image\": \"01212.nii.gz\"}, {\"image\": \"01213.nii.gz\"}, {\"image\": \"01214.nii.gz\"}, {\"image\": \"01216.nii.gz\"}, {\"image\": \"01217.nii.gz\"}, {\"image\": \"01218.nii.gz\"}, {\"image\": \"01219.nii.gz\"}, {\"image\": \"01220.nii.gz\"}, {\"image\": \"01222.nii.gz\"}, {\"image\": \"01223.nii.gz\"}], \"createDate\": \"2023-07-08_15-38-30\"}"
  },
  {
    "path": "jsons/Totalsegmentator_dataset.json",
    "content": "{\"training\": [{\"image\": \"s1405/ct.nii.gz\"}, {\"image\": \"s1404/ct.nii.gz\"}, {\"image\": \"s1403/ct.nii.gz\"}, {\"image\": \"s1401/ct.nii.gz\"}, {\"image\": \"s1400/ct.nii.gz\"}, {\"image\": \"s1399/ct.nii.gz\"}, {\"image\": \"s1397/ct.nii.gz\"}, {\"image\": \"s1395/ct.nii.gz\"}, {\"image\": \"s1394/ct.nii.gz\"}, {\"image\": \"s1390/ct.nii.gz\"}, {\"image\": \"s1385/ct.nii.gz\"}, {\"image\": \"s1374/ct.nii.gz\"}, {\"image\": \"s1368/ct.nii.gz\"}, {\"image\": \"s1364/ct.nii.gz\"}, {\"image\": \"s1355/ct.nii.gz\"}, {\"image\": \"s1354/ct.nii.gz\"}, {\"image\": \"s1353/ct.nii.gz\"}, {\"image\": \"s1352/ct.nii.gz\"}, {\"image\": \"s1350/ct.nii.gz\"}, {\"image\": \"s1349/ct.nii.gz\"}, {\"image\": \"s1348/ct.nii.gz\"}, {\"image\": \"s1347/ct.nii.gz\"}, {\"image\": \"s1346/ct.nii.gz\"}, {\"image\": \"s1345/ct.nii.gz\"}, {\"image\": \"s1344/ct.nii.gz\"}, {\"image\": \"s1343/ct.nii.gz\"}, {\"image\": \"s1342/ct.nii.gz\"}, {\"image\": \"s1341/ct.nii.gz\"}, {\"image\": \"s1340/ct.nii.gz\"}, {\"image\": \"s1339/ct.nii.gz\"}, {\"image\": \"s1338/ct.nii.gz\"}, {\"image\": \"s1337/ct.nii.gz\"}, {\"image\": \"s1336/ct.nii.gz\"}, {\"image\": \"s1335/ct.nii.gz\"}, {\"image\": \"s1334/ct.nii.gz\"}, {\"image\": \"s1333/ct.nii.gz\"}, {\"image\": \"s1332/ct.nii.gz\"}, {\"image\": \"s1331/ct.nii.gz\"}, {\"image\": \"s1330/ct.nii.gz\"}, {\"image\": \"s1329/ct.nii.gz\"}, {\"image\": \"s1328/ct.nii.gz\"}, {\"image\": \"s1327/ct.nii.gz\"}, {\"image\": \"s1326/ct.nii.gz\"}, {\"image\": \"s1325/ct.nii.gz\"}, {\"image\": \"s1323/ct.nii.gz\"}, {\"image\": \"s1322/ct.nii.gz\"}, {\"image\": \"s1321/ct.nii.gz\"}, {\"image\": \"s1319/ct.nii.gz\"}, {\"image\": \"s1318/ct.nii.gz\"}, {\"image\": \"s1317/ct.nii.gz\"}, {\"image\": \"s1316/ct.nii.gz\"}, {\"image\": \"s1315/ct.nii.gz\"}, {\"image\": \"s1314/ct.nii.gz\"}, {\"image\": \"s1312/ct.nii.gz\"}, {\"image\": \"s1311/ct.nii.gz\"}, {\"image\": \"s1310/ct.nii.gz\"}, {\"image\": \"s1309/ct.nii.gz\"}, {\"image\": \"s1308/ct.nii.gz\"}, {\"image\": \"s1307/ct.nii.gz\"}, {\"image\": \"s1305/ct.nii.gz\"}, {\"image\": \"s1304/ct.nii.gz\"}, {\"image\": \"s1303/ct.nii.gz\"}, {\"image\": \"s1301/ct.nii.gz\"}, {\"image\": \"s1300/ct.nii.gz\"}, {\"image\": \"s1299/ct.nii.gz\"}, {\"image\": \"s1298/ct.nii.gz\"}, {\"image\": \"s1297/ct.nii.gz\"}, {\"image\": \"s1296/ct.nii.gz\"}, {\"image\": \"s1295/ct.nii.gz\"}, {\"image\": \"s1294/ct.nii.gz\"}, {\"image\": \"s1293/ct.nii.gz\"}, {\"image\": \"s1292/ct.nii.gz\"}, {\"image\": \"s1291/ct.nii.gz\"}, {\"image\": \"s1290/ct.nii.gz\"}, {\"image\": \"s1289/ct.nii.gz\"}, {\"image\": \"s1288/ct.nii.gz\"}, {\"image\": \"s1287/ct.nii.gz\"}, {\"image\": \"s1286/ct.nii.gz\"}, {\"image\": \"s1285/ct.nii.gz\"}, {\"image\": \"s1283/ct.nii.gz\"}, {\"image\": \"s1281/ct.nii.gz\"}, {\"image\": \"s1280/ct.nii.gz\"}, {\"image\": \"s1279/ct.nii.gz\"}, {\"image\": \"s1278/ct.nii.gz\"}, {\"image\": \"s1277/ct.nii.gz\"}, {\"image\": \"s1276/ct.nii.gz\"}, {\"image\": \"s1275/ct.nii.gz\"}, {\"image\": \"s1274/ct.nii.gz\"}, {\"image\": \"s1273/ct.nii.gz\"}, {\"image\": \"s1272/ct.nii.gz\"}, {\"image\": \"s1271/ct.nii.gz\"}, {\"image\": \"s1270/ct.nii.gz\"}, {\"image\": \"s1269/ct.nii.gz\"}, {\"image\": \"s1268/ct.nii.gz\"}, {\"image\": \"s1267/ct.nii.gz\"}, {\"image\": \"s1264/ct.nii.gz\"}, {\"image\": \"s1262/ct.nii.gz\"}, {\"image\": \"s1261/ct.nii.gz\"}, {\"image\": \"s1260/ct.nii.gz\"}, {\"image\": \"s1259/ct.nii.gz\"}, {\"image\": \"s1258/ct.nii.gz\"}, {\"image\": \"s1257/ct.nii.gz\"}, {\"image\": \"s1256/ct.nii.gz\"}, {\"image\": \"s1255/ct.nii.gz\"}, {\"image\": \"s1254/ct.nii.gz\"}, {\"image\": \"s1252/ct.nii.gz\"}, {\"image\": \"s1251/ct.nii.gz\"}, {\"image\": \"s1250/ct.nii.gz\"}, {\"image\": \"s1249/ct.nii.gz\"}, {\"image\": \"s1248/ct.nii.gz\"}, {\"image\": \"s1247/ct.nii.gz\"}, {\"image\": \"s1246/ct.nii.gz\"}, {\"image\": \"s1245/ct.nii.gz\"}, {\"image\": \"s1244/ct.nii.gz\"}, {\"image\": \"s1243/ct.nii.gz\"}, {\"image\": \"s1242/ct.nii.gz\"}, {\"image\": \"s1241/ct.nii.gz\"}, {\"image\": \"s1240/ct.nii.gz\"}, {\"image\": \"s1239/ct.nii.gz\"}, {\"image\": \"s1238/ct.nii.gz\"}, {\"image\": \"s1237/ct.nii.gz\"}, {\"image\": \"s1236/ct.nii.gz\"}, {\"image\": \"s1235/ct.nii.gz\"}, {\"image\": \"s1234/ct.nii.gz\"}, {\"image\": \"s1228/ct.nii.gz\"}, {\"image\": \"s1215/ct.nii.gz\"}, {\"image\": \"s1210/ct.nii.gz\"}, {\"image\": \"s1206/ct.nii.gz\"}, {\"image\": \"s1197/ct.nii.gz\"}, {\"image\": \"s1191/ct.nii.gz\"}, {\"image\": \"s1190/ct.nii.gz\"}, {\"image\": \"s1189/ct.nii.gz\"}, {\"image\": \"s1187/ct.nii.gz\"}, {\"image\": \"s1185/ct.nii.gz\"}, {\"image\": \"s1184/ct.nii.gz\"}, {\"image\": \"s1183/ct.nii.gz\"}, {\"image\": \"s1182/ct.nii.gz\"}, {\"image\": \"s1179/ct.nii.gz\"}, {\"image\": \"s1178/ct.nii.gz\"}, {\"image\": \"s1177/ct.nii.gz\"}, {\"image\": \"s1176/ct.nii.gz\"}, {\"image\": \"s1175/ct.nii.gz\"}, {\"image\": \"s1174/ct.nii.gz\"}, {\"image\": \"s1173/ct.nii.gz\"}, {\"image\": \"s1172/ct.nii.gz\"}, {\"image\": \"s1171/ct.nii.gz\"}, {\"image\": \"s1170/ct.nii.gz\"}, {\"image\": \"s1169/ct.nii.gz\"}, {\"image\": \"s1168/ct.nii.gz\"}, {\"image\": \"s1167/ct.nii.gz\"}, {\"image\": \"s1165/ct.nii.gz\"}, {\"image\": \"s1164/ct.nii.gz\"}, {\"image\": \"s1163/ct.nii.gz\"}, {\"image\": \"s1162/ct.nii.gz\"}, {\"image\": \"s1161/ct.nii.gz\"}, {\"image\": \"s1159/ct.nii.gz\"}, {\"image\": \"s1158/ct.nii.gz\"}, {\"image\": \"s1157/ct.nii.gz\"}, {\"image\": \"s1156/ct.nii.gz\"}, {\"image\": \"s1155/ct.nii.gz\"}, {\"image\": \"s1154/ct.nii.gz\"}, {\"image\": \"s1153/ct.nii.gz\"}, {\"image\": \"s1152/ct.nii.gz\"}, {\"image\": \"s1151/ct.nii.gz\"}, {\"image\": \"s1150/ct.nii.gz\"}, {\"image\": \"s1149/ct.nii.gz\"}, {\"image\": \"s1148/ct.nii.gz\"}, {\"image\": \"s1147/ct.nii.gz\"}, {\"image\": \"s1146/ct.nii.gz\"}, {\"image\": \"s1145/ct.nii.gz\"}, {\"image\": \"s1144/ct.nii.gz\"}, {\"image\": \"s1143/ct.nii.gz\"}, {\"image\": \"s1142/ct.nii.gz\"}, {\"image\": \"s1141/ct.nii.gz\"}, {\"image\": \"s1140/ct.nii.gz\"}, {\"image\": \"s1138/ct.nii.gz\"}, {\"image\": \"s1137/ct.nii.gz\"}, {\"image\": \"s1136/ct.nii.gz\"}, {\"image\": \"s1135/ct.nii.gz\"}, {\"image\": \"s1134/ct.nii.gz\"}, {\"image\": \"s1133/ct.nii.gz\"}, {\"image\": \"s1132/ct.nii.gz\"}, {\"image\": \"s1131/ct.nii.gz\"}, {\"image\": \"s1130/ct.nii.gz\"}, {\"image\": \"s1129/ct.nii.gz\"}, {\"image\": \"s1128/ct.nii.gz\"}, {\"image\": \"s1127/ct.nii.gz\"}, {\"image\": \"s1125/ct.nii.gz\"}, {\"image\": \"s1124/ct.nii.gz\"}, {\"image\": \"s1123/ct.nii.gz\"}, {\"image\": \"s1122/ct.nii.gz\"}, {\"image\": \"s1120/ct.nii.gz\"}, {\"image\": \"s1112/ct.nii.gz\"}, {\"image\": \"s1110/ct.nii.gz\"}, {\"image\": \"s1099/ct.nii.gz\"}, {\"image\": \"s1097/ct.nii.gz\"}, {\"image\": \"s1093/ct.nii.gz\"}, {\"image\": \"s1086/ct.nii.gz\"}, {\"image\": \"s1070/ct.nii.gz\"}, {\"image\": \"s1069/ct.nii.gz\"}, {\"image\": \"s1067/ct.nii.gz\"}, {\"image\": \"s1065/ct.nii.gz\"}, {\"image\": \"s1063/ct.nii.gz\"}, {\"image\": \"s1060/ct.nii.gz\"}, {\"image\": \"s1056/ct.nii.gz\"}, {\"image\": \"s1049/ct.nii.gz\"}, {\"image\": \"s1048/ct.nii.gz\"}, {\"image\": \"s1040/ct.nii.gz\"}, {\"image\": \"s1022/ct.nii.gz\"}, {\"image\": \"s1017/ct.nii.gz\"}, {\"image\": \"s1015/ct.nii.gz\"}, {\"image\": \"s1010/ct.nii.gz\"}, {\"image\": \"s1005/ct.nii.gz\"}, {\"image\": \"s0994/ct.nii.gz\"}, {\"image\": \"s0980/ct.nii.gz\"}, {\"image\": \"s0972/ct.nii.gz\"}, {\"image\": \"s0962/ct.nii.gz\"}, {\"image\": \"s0956/ct.nii.gz\"}, {\"image\": \"s0955/ct.nii.gz\"}, {\"image\": \"s0949/ct.nii.gz\"}, {\"image\": \"s0945/ct.nii.gz\"}, {\"image\": \"s0923/ct.nii.gz\"}, {\"image\": \"s0913/ct.nii.gz\"}, {\"image\": \"s0908/ct.nii.gz\"}, {\"image\": \"s0907/ct.nii.gz\"}, {\"image\": \"s0891/ct.nii.gz\"}, {\"image\": \"s0880/ct.nii.gz\"}, {\"image\": \"s0870/ct.nii.gz\"}, {\"image\": \"s0868/ct.nii.gz\"}, {\"image\": \"s0866/ct.nii.gz\"}, {\"image\": \"s0865/ct.nii.gz\"}, {\"image\": \"s0862/ct.nii.gz\"}, {\"image\": \"s0860/ct.nii.gz\"}, {\"image\": \"s0852/ct.nii.gz\"}, {\"image\": \"s0851/ct.nii.gz\"}, {\"image\": \"s0818/ct.nii.gz\"}, {\"image\": \"s0813/ct.nii.gz\"}, {\"image\": \"s0809/ct.nii.gz\"}, {\"image\": \"s0789/ct.nii.gz\"}, {\"image\": \"s0788/ct.nii.gz\"}, {\"image\": \"s0784/ct.nii.gz\"}, {\"image\": \"s0782/ct.nii.gz\"}, {\"image\": \"s0777/ct.nii.gz\"}, {\"image\": \"s0773/ct.nii.gz\"}, {\"image\": \"s0768/ct.nii.gz\"}, {\"image\": \"s0766/ct.nii.gz\"}, {\"image\": \"s0746/ct.nii.gz\"}, {\"image\": \"s0733/ct.nii.gz\"}, {\"image\": \"s0718/ct.nii.gz\"}, {\"image\": \"s0714/ct.nii.gz\"}, {\"image\": \"s0710/ct.nii.gz\"}, {\"image\": \"s0708/ct.nii.gz\"}, {\"image\": \"s0682/ct.nii.gz\"}, {\"image\": \"s0681/ct.nii.gz\"}, {\"image\": \"s0674/ct.nii.gz\"}, {\"image\": \"s0672/ct.nii.gz\"}, {\"image\": \"s0666/ct.nii.gz\"}, {\"image\": \"s0660/ct.nii.gz\"}, {\"image\": \"s0655/ct.nii.gz\"}, {\"image\": \"s0650/ct.nii.gz\"}, {\"image\": \"s0622/ct.nii.gz\"}, {\"image\": \"s0618/ct.nii.gz\"}, {\"image\": \"s0612/ct.nii.gz\"}, {\"image\": \"s0611/ct.nii.gz\"}, {\"image\": \"s0600/ct.nii.gz\"}, {\"image\": \"s0593/ct.nii.gz\"}, {\"image\": \"s0586/ct.nii.gz\"}, {\"image\": \"s0581/ct.nii.gz\"}, {\"image\": \"s0579/ct.nii.gz\"}, {\"image\": \"s0576/ct.nii.gz\"}, {\"image\": \"s0568/ct.nii.gz\"}, {\"image\": \"s0560/ct.nii.gz\"}, {\"image\": \"s0535/ct.nii.gz\"}, {\"image\": \"s0511/ct.nii.gz\"}, {\"image\": \"s0502/ct.nii.gz\"}, {\"image\": \"s0499/ct.nii.gz\"}, {\"image\": \"s0497/ct.nii.gz\"}, {\"image\": \"s0493/ct.nii.gz\"}, {\"image\": \"s0492/ct.nii.gz\"}, {\"image\": \"s0486/ct.nii.gz\"}, {\"image\": \"s0475/ct.nii.gz\"}, {\"image\": \"s0472/ct.nii.gz\"}, {\"image\": \"s0461/ct.nii.gz\"}, {\"image\": \"s0454/ct.nii.gz\"}, {\"image\": \"s0449/ct.nii.gz\"}, {\"image\": \"s0445/ct.nii.gz\"}, {\"image\": \"s0438/ct.nii.gz\"}, {\"image\": \"s0429/ct.nii.gz\"}, {\"image\": \"s0428/ct.nii.gz\"}, {\"image\": \"s0420/ct.nii.gz\"}, {\"image\": \"s0419/ct.nii.gz\"}, {\"image\": \"s0414/ct.nii.gz\"}, {\"image\": \"s0406/ct.nii.gz\"}, {\"image\": \"s0393/ct.nii.gz\"}, {\"image\": \"s0390/ct.nii.gz\"}, {\"image\": \"s0385/ct.nii.gz\"}, {\"image\": \"s0377/ct.nii.gz\"}, {\"image\": \"s0375/ct.nii.gz\"}, {\"image\": \"s0371/ct.nii.gz\"}, {\"image\": \"s0369/ct.nii.gz\"}, {\"image\": \"s0342/ct.nii.gz\"}, {\"image\": \"s0338/ct.nii.gz\"}, {\"image\": \"s0334/ct.nii.gz\"}, {\"image\": \"s0331/ct.nii.gz\"}, {\"image\": \"s0319/ct.nii.gz\"}, {\"image\": \"s0315/ct.nii.gz\"}, {\"image\": \"s0306/ct.nii.gz\"}, {\"image\": \"s0294/ct.nii.gz\"}, {\"image\": \"s0283/ct.nii.gz\"}, {\"image\": \"s0279/ct.nii.gz\"}, {\"image\": \"s0275/ct.nii.gz\"}, {\"image\": \"s0266/ct.nii.gz\"}, {\"image\": \"s0261/ct.nii.gz\"}, {\"image\": \"s0260/ct.nii.gz\"}, {\"image\": \"s0257/ct.nii.gz\"}, {\"image\": \"s0247/ct.nii.gz\"}, {\"image\": \"s0230/ct.nii.gz\"}, {\"image\": \"s0229/ct.nii.gz\"}, {\"image\": \"s0224/ct.nii.gz\"}, {\"image\": \"s0222/ct.nii.gz\"}, {\"image\": \"s0208/ct.nii.gz\"}, {\"image\": \"s0193/ct.nii.gz\"}, {\"image\": \"s0188/ct.nii.gz\"}, {\"image\": \"s0172/ct.nii.gz\"}, {\"image\": \"s0170/ct.nii.gz\"}, {\"image\": \"s0162/ct.nii.gz\"}, {\"image\": \"s0150/ct.nii.gz\"}, {\"image\": \"s0147/ct.nii.gz\"}, {\"image\": \"s0141/ct.nii.gz\"}, {\"image\": \"s0131/ct.nii.gz\"}, {\"image\": \"s0129/ct.nii.gz\"}, {\"image\": \"s0124/ct.nii.gz\"}, {\"image\": \"s0122/ct.nii.gz\"}, {\"image\": \"s0112/ct.nii.gz\"}, {\"image\": \"s0111/ct.nii.gz\"}, {\"image\": \"s0105/ct.nii.gz\"}, {\"image\": \"s0100/ct.nii.gz\"}, {\"image\": \"s0088/ct.nii.gz\"}, {\"image\": \"s0084/ct.nii.gz\"}, {\"image\": \"s0083/ct.nii.gz\"}, {\"image\": \"s0065/ct.nii.gz\"}, {\"image\": \"s0059/ct.nii.gz\"}, {\"image\": \"s0056/ct.nii.gz\"}, {\"image\": \"s0045/ct.nii.gz\"}, {\"image\": \"s0043/ct.nii.gz\"}, {\"image\": \"s0042/ct.nii.gz\"}, {\"image\": \"s0028/ct.nii.gz\"}, {\"image\": \"s0024/ct.nii.gz\"}, {\"image\": \"s0022/ct.nii.gz\"}, {\"image\": \"s0000/ct.nii.gz\"}, {\"image\": \"s1391/ct.nii.gz\"}, {\"image\": \"s1389/ct.nii.gz\"}, {\"image\": \"s1388/ct.nii.gz\"}, {\"image\": \"s1387/ct.nii.gz\"}, {\"image\": \"s1386/ct.nii.gz\"}, {\"image\": \"s1384/ct.nii.gz\"}, {\"image\": \"s1383/ct.nii.gz\"}, {\"image\": \"s1382/ct.nii.gz\"}, {\"image\": \"s1380/ct.nii.gz\"}, {\"image\": \"s1379/ct.nii.gz\"}, {\"image\": \"s1378/ct.nii.gz\"}, {\"image\": \"s1377/ct.nii.gz\"}, {\"image\": \"s1375/ct.nii.gz\"}, {\"image\": \"s1373/ct.nii.gz\"}, {\"image\": \"s1372/ct.nii.gz\"}, {\"image\": \"s1371/ct.nii.gz\"}, {\"image\": \"s1370/ct.nii.gz\"}, {\"image\": \"s1369/ct.nii.gz\"}, {\"image\": \"s1367/ct.nii.gz\"}, {\"image\": \"s1366/ct.nii.gz\"}, {\"image\": \"s1365/ct.nii.gz\"}, {\"image\": \"s1363/ct.nii.gz\"}, {\"image\": \"s1362/ct.nii.gz\"}, {\"image\": \"s1361/ct.nii.gz\"}, {\"image\": \"s1359/ct.nii.gz\"}, {\"image\": \"s1358/ct.nii.gz\"}, {\"image\": \"s1357/ct.nii.gz\"}, {\"image\": \"s1233/ct.nii.gz\"}, {\"image\": \"s1231/ct.nii.gz\"}, {\"image\": \"s1230/ct.nii.gz\"}, {\"image\": \"s1227/ct.nii.gz\"}, {\"image\": \"s1226/ct.nii.gz\"}, {\"image\": \"s1225/ct.nii.gz\"}, {\"image\": \"s1224/ct.nii.gz\"}, {\"image\": \"s1223/ct.nii.gz\"}, {\"image\": \"s1222/ct.nii.gz\"}, {\"image\": \"s1221/ct.nii.gz\"}, {\"image\": \"s1220/ct.nii.gz\"}, {\"image\": \"s1218/ct.nii.gz\"}, {\"image\": \"s1216/ct.nii.gz\"}, {\"image\": \"s1212/ct.nii.gz\"}, {\"image\": \"s1209/ct.nii.gz\"}, {\"image\": \"s1208/ct.nii.gz\"}, {\"image\": \"s1207/ct.nii.gz\"}, {\"image\": \"s1205/ct.nii.gz\"}, {\"image\": \"s1203/ct.nii.gz\"}, {\"image\": \"s1201/ct.nii.gz\"}, {\"image\": \"s1199/ct.nii.gz\"}, {\"image\": \"s1196/ct.nii.gz\"}, {\"image\": \"s1195/ct.nii.gz\"}, {\"image\": \"s1194/ct.nii.gz\"}, {\"image\": \"s1192/ct.nii.gz\"}, {\"image\": \"s1121/ct.nii.gz\"}, {\"image\": \"s1119/ct.nii.gz\"}, {\"image\": \"s1115/ct.nii.gz\"}, {\"image\": \"s1114/ct.nii.gz\"}, {\"image\": \"s1113/ct.nii.gz\"}, {\"image\": \"s1111/ct.nii.gz\"}, {\"image\": \"s1109/ct.nii.gz\"}, {\"image\": \"s1107/ct.nii.gz\"}, {\"image\": \"s1106/ct.nii.gz\"}, {\"image\": \"s1105/ct.nii.gz\"}, {\"image\": \"s1104/ct.nii.gz\"}, {\"image\": \"s1103/ct.nii.gz\"}, {\"image\": \"s1102/ct.nii.gz\"}, {\"image\": \"s1101/ct.nii.gz\"}, {\"image\": \"s1100/ct.nii.gz\"}, {\"image\": \"s1098/ct.nii.gz\"}, {\"image\": \"s1096/ct.nii.gz\"}, {\"image\": \"s1094/ct.nii.gz\"}, {\"image\": \"s1090/ct.nii.gz\"}, {\"image\": \"s1089/ct.nii.gz\"}, {\"image\": \"s1088/ct.nii.gz\"}, {\"image\": \"s1085/ct.nii.gz\"}, {\"image\": \"s1084/ct.nii.gz\"}, {\"image\": \"s1083/ct.nii.gz\"}, {\"image\": \"s1082/ct.nii.gz\"}, {\"image\": \"s1079/ct.nii.gz\"}, {\"image\": \"s1077/ct.nii.gz\"}, {\"image\": \"s1075/ct.nii.gz\"}, {\"image\": \"s1073/ct.nii.gz\"}, {\"image\": \"s1072/ct.nii.gz\"}, {\"image\": \"s1071/ct.nii.gz\"}, {\"image\": \"s1068/ct.nii.gz\"}, {\"image\": \"s1066/ct.nii.gz\"}, {\"image\": \"s1062/ct.nii.gz\"}, {\"image\": \"s1061/ct.nii.gz\"}, {\"image\": \"s1059/ct.nii.gz\"}, {\"image\": \"s1058/ct.nii.gz\"}, {\"image\": \"s1057/ct.nii.gz\"}, {\"image\": \"s1055/ct.nii.gz\"}, {\"image\": \"s1053/ct.nii.gz\"}, {\"image\": \"s1052/ct.nii.gz\"}, {\"image\": \"s1051/ct.nii.gz\"}, {\"image\": \"s1050/ct.nii.gz\"}, {\"image\": \"s1047/ct.nii.gz\"}, {\"image\": \"s1046/ct.nii.gz\"}, {\"image\": \"s1045/ct.nii.gz\"}, {\"image\": \"s1044/ct.nii.gz\"}, {\"image\": \"s1043/ct.nii.gz\"}, {\"image\": \"s1042/ct.nii.gz\"}, {\"image\": \"s1041/ct.nii.gz\"}, {\"image\": \"s1039/ct.nii.gz\"}, {\"image\": \"s1038/ct.nii.gz\"}, {\"image\": \"s1037/ct.nii.gz\"}, {\"image\": \"s1036/ct.nii.gz\"}, {\"image\": \"s1035/ct.nii.gz\"}, {\"image\": \"s1034/ct.nii.gz\"}, {\"image\": \"s1033/ct.nii.gz\"}, {\"image\": \"s1032/ct.nii.gz\"}, {\"image\": \"s1031/ct.nii.gz\"}, {\"image\": \"s1029/ct.nii.gz\"}, {\"image\": \"s1028/ct.nii.gz\"}, {\"image\": \"s1026/ct.nii.gz\"}, {\"image\": \"s1025/ct.nii.gz\"}, {\"image\": \"s1024/ct.nii.gz\"}, {\"image\": \"s1023/ct.nii.gz\"}, {\"image\": \"s1021/ct.nii.gz\"}, {\"image\": \"s1020/ct.nii.gz\"}, {\"image\": \"s1018/ct.nii.gz\"}, {\"image\": \"s1016/ct.nii.gz\"}, {\"image\": \"s1014/ct.nii.gz\"}, {\"image\": \"s1013/ct.nii.gz\"}, {\"image\": \"s1012/ct.nii.gz\"}, {\"image\": \"s1011/ct.nii.gz\"}, {\"image\": \"s1009/ct.nii.gz\"}, {\"image\": \"s1008/ct.nii.gz\"}, {\"image\": \"s1007/ct.nii.gz\"}, {\"image\": \"s1006/ct.nii.gz\"}, {\"image\": \"s1004/ct.nii.gz\"}, {\"image\": \"s1003/ct.nii.gz\"}, {\"image\": \"s1002/ct.nii.gz\"}, {\"image\": \"s1001/ct.nii.gz\"}, {\"image\": \"s1000/ct.nii.gz\"}, {\"image\": \"s0999/ct.nii.gz\"}, {\"image\": \"s0997/ct.nii.gz\"}, {\"image\": \"s0996/ct.nii.gz\"}, {\"image\": \"s0995/ct.nii.gz\"}, {\"image\": \"s0993/ct.nii.gz\"}, {\"image\": \"s0992/ct.nii.gz\"}, {\"image\": \"s0991/ct.nii.gz\"}, {\"image\": \"s0989/ct.nii.gz\"}, {\"image\": \"s0988/ct.nii.gz\"}, {\"image\": \"s0987/ct.nii.gz\"}, {\"image\": \"s0986/ct.nii.gz\"}, {\"image\": \"s0985/ct.nii.gz\"}, {\"image\": \"s0984/ct.nii.gz\"}, {\"image\": \"s0983/ct.nii.gz\"}, {\"image\": \"s0982/ct.nii.gz\"}, {\"image\": \"s0981/ct.nii.gz\"}, {\"image\": \"s0979/ct.nii.gz\"}, {\"image\": \"s0978/ct.nii.gz\"}, {\"image\": \"s0977/ct.nii.gz\"}, {\"image\": \"s0976/ct.nii.gz\"}, {\"image\": \"s0975/ct.nii.gz\"}, {\"image\": \"s0974/ct.nii.gz\"}, {\"image\": \"s0973/ct.nii.gz\"}, {\"image\": \"s0971/ct.nii.gz\"}, {\"image\": \"s0970/ct.nii.gz\"}, {\"image\": \"s0968/ct.nii.gz\"}, {\"image\": \"s0965/ct.nii.gz\"}, {\"image\": \"s0963/ct.nii.gz\"}, {\"image\": \"s0961/ct.nii.gz\"}, {\"image\": \"s0960/ct.nii.gz\"}, {\"image\": \"s0959/ct.nii.gz\"}, {\"image\": \"s0958/ct.nii.gz\"}, {\"image\": \"s0957/ct.nii.gz\"}, {\"image\": \"s0954/ct.nii.gz\"}, {\"image\": \"s0953/ct.nii.gz\"}, {\"image\": \"s0952/ct.nii.gz\"}, {\"image\": \"s0951/ct.nii.gz\"}, {\"image\": \"s0950/ct.nii.gz\"}, {\"image\": \"s0947/ct.nii.gz\"}, {\"image\": \"s0946/ct.nii.gz\"}, {\"image\": \"s0944/ct.nii.gz\"}, {\"image\": \"s0943/ct.nii.gz\"}, {\"image\": \"s0941/ct.nii.gz\"}, {\"image\": \"s0940/ct.nii.gz\"}, {\"image\": \"s0939/ct.nii.gz\"}, {\"image\": \"s0938/ct.nii.gz\"}, {\"image\": \"s0937/ct.nii.gz\"}, {\"image\": \"s0936/ct.nii.gz\"}, {\"image\": \"s0935/ct.nii.gz\"}, {\"image\": \"s0934/ct.nii.gz\"}, {\"image\": \"s0933/ct.nii.gz\"}, {\"image\": \"s0931/ct.nii.gz\"}, {\"image\": \"s0930/ct.nii.gz\"}, {\"image\": \"s0928/ct.nii.gz\"}, {\"image\": \"s0927/ct.nii.gz\"}, {\"image\": \"s0925/ct.nii.gz\"}, {\"image\": \"s0924/ct.nii.gz\"}, {\"image\": \"s0922/ct.nii.gz\"}, {\"image\": \"s0921/ct.nii.gz\"}, {\"image\": \"s0919/ct.nii.gz\"}, {\"image\": \"s0918/ct.nii.gz\"}, {\"image\": \"s0916/ct.nii.gz\"}, {\"image\": \"s0915/ct.nii.gz\"}, {\"image\": \"s0914/ct.nii.gz\"}, {\"image\": \"s0912/ct.nii.gz\"}, {\"image\": \"s0911/ct.nii.gz\"}, {\"image\": \"s0910/ct.nii.gz\"}, {\"image\": \"s0909/ct.nii.gz\"}, {\"image\": \"s0904/ct.nii.gz\"}, {\"image\": \"s0903/ct.nii.gz\"}, {\"image\": \"s0901/ct.nii.gz\"}, {\"image\": \"s0899/ct.nii.gz\"}, {\"image\": \"s0898/ct.nii.gz\"}, {\"image\": \"s0897/ct.nii.gz\"}, {\"image\": \"s0896/ct.nii.gz\"}, {\"image\": \"s0895/ct.nii.gz\"}, {\"image\": \"s0894/ct.nii.gz\"}, {\"image\": \"s0892/ct.nii.gz\"}, {\"image\": \"s0890/ct.nii.gz\"}, {\"image\": \"s0889/ct.nii.gz\"}, {\"image\": \"s0885/ct.nii.gz\"}, {\"image\": \"s0884/ct.nii.gz\"}, {\"image\": \"s0883/ct.nii.gz\"}, {\"image\": \"s0881/ct.nii.gz\"}, {\"image\": \"s0879/ct.nii.gz\"}, {\"image\": \"s0878/ct.nii.gz\"}, {\"image\": \"s0877/ct.nii.gz\"}, {\"image\": \"s0876/ct.nii.gz\"}, {\"image\": \"s0875/ct.nii.gz\"}, {\"image\": \"s0874/ct.nii.gz\"}, {\"image\": \"s0873/ct.nii.gz\"}, {\"image\": \"s0871/ct.nii.gz\"}, {\"image\": \"s0869/ct.nii.gz\"}, {\"image\": \"s0867/ct.nii.gz\"}, {\"image\": \"s0863/ct.nii.gz\"}, {\"image\": \"s0861/ct.nii.gz\"}, {\"image\": \"s0859/ct.nii.gz\"}, {\"image\": \"s0858/ct.nii.gz\"}, {\"image\": \"s0857/ct.nii.gz\"}, {\"image\": \"s0855/ct.nii.gz\"}, {\"image\": \"s0853/ct.nii.gz\"}, {\"image\": \"s0850/ct.nii.gz\"}, {\"image\": \"s0849/ct.nii.gz\"}, {\"image\": \"s0848/ct.nii.gz\"}, {\"image\": \"s0847/ct.nii.gz\"}, {\"image\": \"s0846/ct.nii.gz\"}, {\"image\": \"s0845/ct.nii.gz\"}, {\"image\": \"s0844/ct.nii.gz\"}, {\"image\": \"s0843/ct.nii.gz\"}, {\"image\": \"s0842/ct.nii.gz\"}, {\"image\": \"s0840/ct.nii.gz\"}, {\"image\": \"s0839/ct.nii.gz\"}, {\"image\": \"s0838/ct.nii.gz\"}, {\"image\": \"s0837/ct.nii.gz\"}, {\"image\": \"s0836/ct.nii.gz\"}, {\"image\": \"s0835/ct.nii.gz\"}, {\"image\": \"s0834/ct.nii.gz\"}, {\"image\": \"s0833/ct.nii.gz\"}, {\"image\": \"s0832/ct.nii.gz\"}, {\"image\": \"s0831/ct.nii.gz\"}, {\"image\": \"s0830/ct.nii.gz\"}, {\"image\": \"s0829/ct.nii.gz\"}, {\"image\": \"s0826/ct.nii.gz\"}, {\"image\": \"s0825/ct.nii.gz\"}, {\"image\": \"s0824/ct.nii.gz\"}, {\"image\": \"s0822/ct.nii.gz\"}, {\"image\": \"s0821/ct.nii.gz\"}, {\"image\": \"s0820/ct.nii.gz\"}, {\"image\": \"s0819/ct.nii.gz\"}, {\"image\": \"s0816/ct.nii.gz\"}, {\"image\": \"s0815/ct.nii.gz\"}, {\"image\": \"s0814/ct.nii.gz\"}, {\"image\": \"s0812/ct.nii.gz\"}, {\"image\": \"s0811/ct.nii.gz\"}, {\"image\": \"s0810/ct.nii.gz\"}, {\"image\": \"s0808/ct.nii.gz\"}, {\"image\": \"s0807/ct.nii.gz\"}, {\"image\": \"s0806/ct.nii.gz\"}, {\"image\": \"s0805/ct.nii.gz\"}, {\"image\": \"s0804/ct.nii.gz\"}, {\"image\": \"s0802/ct.nii.gz\"}, {\"image\": \"s0801/ct.nii.gz\"}, {\"image\": \"s0800/ct.nii.gz\"}, {\"image\": \"s0798/ct.nii.gz\"}, {\"image\": \"s0797/ct.nii.gz\"}, {\"image\": \"s0796/ct.nii.gz\"}, {\"image\": \"s0795/ct.nii.gz\"}, {\"image\": \"s0794/ct.nii.gz\"}, {\"image\": \"s0793/ct.nii.gz\"}, {\"image\": \"s0792/ct.nii.gz\"}, {\"image\": \"s0791/ct.nii.gz\"}, {\"image\": \"s0790/ct.nii.gz\"}, {\"image\": \"s0787/ct.nii.gz\"}, {\"image\": \"s0786/ct.nii.gz\"}, {\"image\": \"s0785/ct.nii.gz\"}, {\"image\": \"s0783/ct.nii.gz\"}, {\"image\": \"s0781/ct.nii.gz\"}, {\"image\": \"s0780/ct.nii.gz\"}, {\"image\": \"s0778/ct.nii.gz\"}, {\"image\": \"s0776/ct.nii.gz\"}, {\"image\": \"s0775/ct.nii.gz\"}, {\"image\": \"s0774/ct.nii.gz\"}, {\"image\": \"s0772/ct.nii.gz\"}, {\"image\": \"s0771/ct.nii.gz\"}, {\"image\": \"s0770/ct.nii.gz\"}, {\"image\": \"s0769/ct.nii.gz\"}, {\"image\": \"s0767/ct.nii.gz\"}, {\"image\": \"s0765/ct.nii.gz\"}, {\"image\": \"s0764/ct.nii.gz\"}, {\"image\": \"s0763/ct.nii.gz\"}, {\"image\": \"s0762/ct.nii.gz\"}, {\"image\": \"s0760/ct.nii.gz\"}, {\"image\": \"s0759/ct.nii.gz\"}, {\"image\": \"s0756/ct.nii.gz\"}, {\"image\": \"s0754/ct.nii.gz\"}, {\"image\": \"s0753/ct.nii.gz\"}, {\"image\": \"s0752/ct.nii.gz\"}, {\"image\": \"s0751/ct.nii.gz\"}, {\"image\": \"s0750/ct.nii.gz\"}, {\"image\": \"s0749/ct.nii.gz\"}, {\"image\": \"s0748/ct.nii.gz\"}, {\"image\": \"s0747/ct.nii.gz\"}, {\"image\": \"s0744/ct.nii.gz\"}, {\"image\": \"s0743/ct.nii.gz\"}, {\"image\": \"s0741/ct.nii.gz\"}, {\"image\": \"s0740/ct.nii.gz\"}, {\"image\": \"s0739/ct.nii.gz\"}, {\"image\": \"s0738/ct.nii.gz\"}, {\"image\": \"s0737/ct.nii.gz\"}, {\"image\": \"s0736/ct.nii.gz\"}, {\"image\": \"s0735/ct.nii.gz\"}, {\"image\": \"s0734/ct.nii.gz\"}, {\"image\": \"s0732/ct.nii.gz\"}, {\"image\": \"s0731/ct.nii.gz\"}, {\"image\": \"s0730/ct.nii.gz\"}, {\"image\": \"s0729/ct.nii.gz\"}, {\"image\": \"s0728/ct.nii.gz\"}, {\"image\": \"s0727/ct.nii.gz\"}, {\"image\": \"s0726/ct.nii.gz\"}, {\"image\": \"s0724/ct.nii.gz\"}, {\"image\": \"s0723/ct.nii.gz\"}, {\"image\": \"s0721/ct.nii.gz\"}, {\"image\": \"s0720/ct.nii.gz\"}, {\"image\": \"s0719/ct.nii.gz\"}, {\"image\": \"s0717/ct.nii.gz\"}, {\"image\": \"s0716/ct.nii.gz\"}, {\"image\": \"s0715/ct.nii.gz\"}, {\"image\": \"s0713/ct.nii.gz\"}, {\"image\": \"s0712/ct.nii.gz\"}, {\"image\": \"s0711/ct.nii.gz\"}, {\"image\": \"s0709/ct.nii.gz\"}, {\"image\": \"s0707/ct.nii.gz\"}, {\"image\": \"s0706/ct.nii.gz\"}, {\"image\": \"s0705/ct.nii.gz\"}, {\"image\": \"s0704/ct.nii.gz\"}, {\"image\": \"s0703/ct.nii.gz\"}, {\"image\": \"s0702/ct.nii.gz\"}, {\"image\": \"s0700/ct.nii.gz\"}, {\"image\": \"s0699/ct.nii.gz\"}, {\"image\": \"s0698/ct.nii.gz\"}, {\"image\": \"s0697/ct.nii.gz\"}, {\"image\": \"s0696/ct.nii.gz\"}, {\"image\": \"s0695/ct.nii.gz\"}, {\"image\": \"s0694/ct.nii.gz\"}, {\"image\": \"s0693/ct.nii.gz\"}, {\"image\": \"s0692/ct.nii.gz\"}, {\"image\": \"s0691/ct.nii.gz\"}, {\"image\": \"s0690/ct.nii.gz\"}, {\"image\": \"s0688/ct.nii.gz\"}, {\"image\": \"s0687/ct.nii.gz\"}, {\"image\": \"s0686/ct.nii.gz\"}, {\"image\": \"s0685/ct.nii.gz\"}, {\"image\": \"s0684/ct.nii.gz\"}, {\"image\": \"s0683/ct.nii.gz\"}, {\"image\": \"s0680/ct.nii.gz\"}, {\"image\": \"s0679/ct.nii.gz\"}, {\"image\": \"s0677/ct.nii.gz\"}, {\"image\": \"s0676/ct.nii.gz\"}, {\"image\": \"s0675/ct.nii.gz\"}, {\"image\": \"s0673/ct.nii.gz\"}, {\"image\": \"s0671/ct.nii.gz\"}, {\"image\": \"s0670/ct.nii.gz\"}, {\"image\": \"s0669/ct.nii.gz\"}, {\"image\": \"s0668/ct.nii.gz\"}, {\"image\": \"s0667/ct.nii.gz\"}, {\"image\": \"s0665/ct.nii.gz\"}, {\"image\": \"s0664/ct.nii.gz\"}, {\"image\": \"s0663/ct.nii.gz\"}, {\"image\": \"s0662/ct.nii.gz\"}, {\"image\": \"s0661/ct.nii.gz\"}, {\"image\": \"s0659/ct.nii.gz\"}, {\"image\": \"s0658/ct.nii.gz\"}, {\"image\": \"s0657/ct.nii.gz\"}, {\"image\": \"s0656/ct.nii.gz\"}, {\"image\": \"s0654/ct.nii.gz\"}, {\"image\": \"s0653/ct.nii.gz\"}, {\"image\": \"s0652/ct.nii.gz\"}, {\"image\": \"s0651/ct.nii.gz\"}, {\"image\": \"s0649/ct.nii.gz\"}, {\"image\": \"s0648/ct.nii.gz\"}, {\"image\": \"s0647/ct.nii.gz\"}, {\"image\": \"s0646/ct.nii.gz\"}, {\"image\": \"s0645/ct.nii.gz\"}, {\"image\": \"s0644/ct.nii.gz\"}, {\"image\": \"s0643/ct.nii.gz\"}, {\"image\": \"s0642/ct.nii.gz\"}, {\"image\": \"s0641/ct.nii.gz\"}, {\"image\": \"s0640/ct.nii.gz\"}, {\"image\": \"s0639/ct.nii.gz\"}, {\"image\": \"s0638/ct.nii.gz\"}, {\"image\": \"s0637/ct.nii.gz\"}, {\"image\": \"s0636/ct.nii.gz\"}, {\"image\": \"s0635/ct.nii.gz\"}, {\"image\": \"s0633/ct.nii.gz\"}, {\"image\": \"s0632/ct.nii.gz\"}, {\"image\": \"s0629/ct.nii.gz\"}, {\"image\": \"s0628/ct.nii.gz\"}, {\"image\": \"s0627/ct.nii.gz\"}, {\"image\": \"s0626/ct.nii.gz\"}, {\"image\": \"s0625/ct.nii.gz\"}, {\"image\": \"s0624/ct.nii.gz\"}, {\"image\": \"s0623/ct.nii.gz\"}, {\"image\": \"s0621/ct.nii.gz\"}, {\"image\": \"s0620/ct.nii.gz\"}, {\"image\": \"s0619/ct.nii.gz\"}, {\"image\": \"s0617/ct.nii.gz\"}, {\"image\": \"s0616/ct.nii.gz\"}, {\"image\": \"s0615/ct.nii.gz\"}, {\"image\": \"s0614/ct.nii.gz\"}, {\"image\": \"s0613/ct.nii.gz\"}, {\"image\": \"s0610/ct.nii.gz\"}, {\"image\": \"s0608/ct.nii.gz\"}, {\"image\": \"s0607/ct.nii.gz\"}, {\"image\": \"s0606/ct.nii.gz\"}, {\"image\": \"s0605/ct.nii.gz\"}, {\"image\": \"s0604/ct.nii.gz\"}, {\"image\": \"s0603/ct.nii.gz\"}, {\"image\": \"s0602/ct.nii.gz\"}, {\"image\": \"s0601/ct.nii.gz\"}, {\"image\": \"s0598/ct.nii.gz\"}, {\"image\": \"s0597/ct.nii.gz\"}, {\"image\": \"s0595/ct.nii.gz\"}, {\"image\": \"s0594/ct.nii.gz\"}, {\"image\": \"s0592/ct.nii.gz\"}, {\"image\": \"s0591/ct.nii.gz\"}, {\"image\": \"s0590/ct.nii.gz\"}, {\"image\": \"s0589/ct.nii.gz\"}, {\"image\": \"s0588/ct.nii.gz\"}, {\"image\": \"s0587/ct.nii.gz\"}, {\"image\": \"s0585/ct.nii.gz\"}, {\"image\": \"s0584/ct.nii.gz\"}, {\"image\": \"s0583/ct.nii.gz\"}, {\"image\": \"s0582/ct.nii.gz\"}, {\"image\": \"s0580/ct.nii.gz\"}, {\"image\": \"s0578/ct.nii.gz\"}, {\"image\": \"s0577/ct.nii.gz\"}, {\"image\": \"s0575/ct.nii.gz\"}, {\"image\": \"s0574/ct.nii.gz\"}, {\"image\": \"s0573/ct.nii.gz\"}, {\"image\": \"s0572/ct.nii.gz\"}, {\"image\": \"s0571/ct.nii.gz\"}, {\"image\": \"s0567/ct.nii.gz\"}, {\"image\": \"s0566/ct.nii.gz\"}, {\"image\": \"s0565/ct.nii.gz\"}, {\"image\": \"s0564/ct.nii.gz\"}, {\"image\": \"s0563/ct.nii.gz\"}, {\"image\": \"s0561/ct.nii.gz\"}, {\"image\": \"s0559/ct.nii.gz\"}, {\"image\": \"s0557/ct.nii.gz\"}, {\"image\": \"s0556/ct.nii.gz\"}, {\"image\": \"s0555/ct.nii.gz\"}, {\"image\": \"s0553/ct.nii.gz\"}, {\"image\": \"s0552/ct.nii.gz\"}, {\"image\": \"s0551/ct.nii.gz\"}, {\"image\": \"s0550/ct.nii.gz\"}, {\"image\": \"s0549/ct.nii.gz\"}, {\"image\": \"s0548/ct.nii.gz\"}, {\"image\": \"s0546/ct.nii.gz\"}, {\"image\": \"s0545/ct.nii.gz\"}, {\"image\": \"s0544/ct.nii.gz\"}, {\"image\": \"s0543/ct.nii.gz\"}, {\"image\": \"s0542/ct.nii.gz\"}, {\"image\": \"s0541/ct.nii.gz\"}, {\"image\": \"s0539/ct.nii.gz\"}, {\"image\": \"s0537/ct.nii.gz\"}, {\"image\": \"s0536/ct.nii.gz\"}, {\"image\": \"s0532/ct.nii.gz\"}, {\"image\": \"s0531/ct.nii.gz\"}, {\"image\": \"s0530/ct.nii.gz\"}, {\"image\": \"s0529/ct.nii.gz\"}, {\"image\": \"s0528/ct.nii.gz\"}, {\"image\": \"s0527/ct.nii.gz\"}, {\"image\": \"s0526/ct.nii.gz\"}, {\"image\": \"s0525/ct.nii.gz\"}, {\"image\": \"s0523/ct.nii.gz\"}, {\"image\": \"s0522/ct.nii.gz\"}, {\"image\": \"s0521/ct.nii.gz\"}, {\"image\": \"s0520/ct.nii.gz\"}, {\"image\": \"s0519/ct.nii.gz\"}, {\"image\": \"s0518/ct.nii.gz\"}, {\"image\": \"s0517/ct.nii.gz\"}, {\"image\": \"s0516/ct.nii.gz\"}, {\"image\": \"s0515/ct.nii.gz\"}, {\"image\": \"s0514/ct.nii.gz\"}, {\"image\": \"s0513/ct.nii.gz\"}, {\"image\": \"s0510/ct.nii.gz\"}, {\"image\": \"s0509/ct.nii.gz\"}, {\"image\": \"s0508/ct.nii.gz\"}, {\"image\": \"s0507/ct.nii.gz\"}, {\"image\": \"s0506/ct.nii.gz\"}, {\"image\": \"s0505/ct.nii.gz\"}, {\"image\": \"s0504/ct.nii.gz\"}, {\"image\": \"s0503/ct.nii.gz\"}, {\"image\": \"s0501/ct.nii.gz\"}, {\"image\": \"s0500/ct.nii.gz\"}, {\"image\": \"s0498/ct.nii.gz\"}, {\"image\": \"s0495/ct.nii.gz\"}, {\"image\": \"s0494/ct.nii.gz\"}, {\"image\": \"s0491/ct.nii.gz\"}, {\"image\": \"s0490/ct.nii.gz\"}, {\"image\": \"s0488/ct.nii.gz\"}, {\"image\": \"s0487/ct.nii.gz\"}, {\"image\": \"s0485/ct.nii.gz\"}, {\"image\": \"s0484/ct.nii.gz\"}, {\"image\": \"s0483/ct.nii.gz\"}, {\"image\": \"s0482/ct.nii.gz\"}, {\"image\": \"s0481/ct.nii.gz\"}, {\"image\": \"s0480/ct.nii.gz\"}, {\"image\": \"s0478/ct.nii.gz\"}, {\"image\": \"s0477/ct.nii.gz\"}, {\"image\": \"s0476/ct.nii.gz\"}, {\"image\": \"s0474/ct.nii.gz\"}, {\"image\": \"s0473/ct.nii.gz\"}, {\"image\": \"s0471/ct.nii.gz\"}, {\"image\": \"s0470/ct.nii.gz\"}, {\"image\": \"s0469/ct.nii.gz\"}, {\"image\": \"s0468/ct.nii.gz\"}, {\"image\": \"s0467/ct.nii.gz\"}, {\"image\": \"s0466/ct.nii.gz\"}, {\"image\": \"s0465/ct.nii.gz\"}, {\"image\": \"s0463/ct.nii.gz\"}, {\"image\": \"s0462/ct.nii.gz\"}, {\"image\": \"s0460/ct.nii.gz\"}, {\"image\": \"s0459/ct.nii.gz\"}, {\"image\": \"s0458/ct.nii.gz\"}, {\"image\": \"s0457/ct.nii.gz\"}, {\"image\": \"s0456/ct.nii.gz\"}, {\"image\": \"s0455/ct.nii.gz\"}, {\"image\": \"s0453/ct.nii.gz\"}, {\"image\": \"s0452/ct.nii.gz\"}, {\"image\": \"s0450/ct.nii.gz\"}, {\"image\": \"s0447/ct.nii.gz\"}, {\"image\": \"s0446/ct.nii.gz\"}, {\"image\": \"s0444/ct.nii.gz\"}, {\"image\": \"s0443/ct.nii.gz\"}, {\"image\": \"s0442/ct.nii.gz\"}, {\"image\": \"s0441/ct.nii.gz\"}, {\"image\": \"s0440/ct.nii.gz\"}, {\"image\": \"s0439/ct.nii.gz\"}, {\"image\": \"s0437/ct.nii.gz\"}, {\"image\": \"s0436/ct.nii.gz\"}, {\"image\": \"s0435/ct.nii.gz\"}, {\"image\": \"s0433/ct.nii.gz\"}, {\"image\": \"s0431/ct.nii.gz\"}, {\"image\": \"s0430/ct.nii.gz\"}, {\"image\": \"s0426/ct.nii.gz\"}, {\"image\": \"s0425/ct.nii.gz\"}, {\"image\": \"s0424/ct.nii.gz\"}, {\"image\": \"s0423/ct.nii.gz\"}, {\"image\": \"s0422/ct.nii.gz\"}, {\"image\": \"s0421/ct.nii.gz\"}, {\"image\": \"s0418/ct.nii.gz\"}, {\"image\": \"s0417/ct.nii.gz\"}, {\"image\": \"s0416/ct.nii.gz\"}, {\"image\": \"s0413/ct.nii.gz\"}, {\"image\": \"s0412/ct.nii.gz\"}, {\"image\": \"s0411/ct.nii.gz\"}, {\"image\": \"s0410/ct.nii.gz\"}, {\"image\": \"s0408/ct.nii.gz\"}, {\"image\": \"s0407/ct.nii.gz\"}, {\"image\": \"s0405/ct.nii.gz\"}, {\"image\": \"s0403/ct.nii.gz\"}, {\"image\": \"s0402/ct.nii.gz\"}, {\"image\": \"s0401/ct.nii.gz\"}, {\"image\": \"s0400/ct.nii.gz\"}, {\"image\": \"s0399/ct.nii.gz\"}, {\"image\": \"s0398/ct.nii.gz\"}, {\"image\": \"s0396/ct.nii.gz\"}, {\"image\": \"s0395/ct.nii.gz\"}, {\"image\": \"s0394/ct.nii.gz\"}, {\"image\": \"s0392/ct.nii.gz\"}, {\"image\": \"s0391/ct.nii.gz\"}, {\"image\": \"s0389/ct.nii.gz\"}, {\"image\": \"s0388/ct.nii.gz\"}, {\"image\": \"s0386/ct.nii.gz\"}, {\"image\": \"s0383/ct.nii.gz\"}, {\"image\": \"s0382/ct.nii.gz\"}, {\"image\": \"s0381/ct.nii.gz\"}, {\"image\": \"s0380/ct.nii.gz\"}, {\"image\": \"s0379/ct.nii.gz\"}, {\"image\": \"s0378/ct.nii.gz\"}, {\"image\": \"s0376/ct.nii.gz\"}, {\"image\": \"s0374/ct.nii.gz\"}, {\"image\": \"s0373/ct.nii.gz\"}, {\"image\": \"s0372/ct.nii.gz\"}, {\"image\": \"s0370/ct.nii.gz\"}, {\"image\": \"s0368/ct.nii.gz\"}, {\"image\": \"s0367/ct.nii.gz\"}, {\"image\": \"s0366/ct.nii.gz\"}, {\"image\": \"s0365/ct.nii.gz\"}, {\"image\": \"s0364/ct.nii.gz\"}, {\"image\": \"s0363/ct.nii.gz\"}, {\"image\": \"s0362/ct.nii.gz\"}, {\"image\": \"s0361/ct.nii.gz\"}, {\"image\": \"s0360/ct.nii.gz\"}, {\"image\": \"s0359/ct.nii.gz\"}, {\"image\": \"s0358/ct.nii.gz\"}, {\"image\": \"s0357/ct.nii.gz\"}, {\"image\": \"s0356/ct.nii.gz\"}, {\"image\": \"s0355/ct.nii.gz\"}, {\"image\": \"s0354/ct.nii.gz\"}, {\"image\": \"s0353/ct.nii.gz\"}, {\"image\": \"s0352/ct.nii.gz\"}, {\"image\": \"s0350/ct.nii.gz\"}, {\"image\": \"s0349/ct.nii.gz\"}, {\"image\": \"s0347/ct.nii.gz\"}, {\"image\": \"s0346/ct.nii.gz\"}, {\"image\": \"s0345/ct.nii.gz\"}, {\"image\": \"s0344/ct.nii.gz\"}, {\"image\": \"s0343/ct.nii.gz\"}, {\"image\": \"s0341/ct.nii.gz\"}, {\"image\": \"s0340/ct.nii.gz\"}, {\"image\": \"s0339/ct.nii.gz\"}, {\"image\": \"s0336/ct.nii.gz\"}, {\"image\": \"s0335/ct.nii.gz\"}, {\"image\": \"s0333/ct.nii.gz\"}, {\"image\": \"s0332/ct.nii.gz\"}, {\"image\": \"s0330/ct.nii.gz\"}, {\"image\": \"s0329/ct.nii.gz\"}, {\"image\": \"s0328/ct.nii.gz\"}, {\"image\": \"s0327/ct.nii.gz\"}, {\"image\": \"s0326/ct.nii.gz\"}, {\"image\": \"s0325/ct.nii.gz\"}, {\"image\": \"s0324/ct.nii.gz\"}, {\"image\": \"s0322/ct.nii.gz\"}, {\"image\": \"s0321/ct.nii.gz\"}, {\"image\": \"s0320/ct.nii.gz\"}, {\"image\": \"s0316/ct.nii.gz\"}, {\"image\": \"s0314/ct.nii.gz\"}, {\"image\": \"s0313/ct.nii.gz\"}, {\"image\": \"s0312/ct.nii.gz\"}, {\"image\": \"s0311/ct.nii.gz\"}, {\"image\": \"s0310/ct.nii.gz\"}, {\"image\": \"s0308/ct.nii.gz\"}, {\"image\": \"s0307/ct.nii.gz\"}, {\"image\": \"s0305/ct.nii.gz\"}, {\"image\": \"s0304/ct.nii.gz\"}, {\"image\": \"s0303/ct.nii.gz\"}, {\"image\": \"s0301/ct.nii.gz\"}, {\"image\": \"s0300/ct.nii.gz\"}, {\"image\": \"s0299/ct.nii.gz\"}, {\"image\": \"s0298/ct.nii.gz\"}, {\"image\": \"s0296/ct.nii.gz\"}, {\"image\": \"s0295/ct.nii.gz\"}, {\"image\": \"s0293/ct.nii.gz\"}, {\"image\": \"s0292/ct.nii.gz\"}, {\"image\": \"s0291/ct.nii.gz\"}, {\"image\": \"s0290/ct.nii.gz\"}, {\"image\": \"s0289/ct.nii.gz\"}, {\"image\": \"s0288/ct.nii.gz\"}, {\"image\": \"s0287/ct.nii.gz\"}, {\"image\": \"s0286/ct.nii.gz\"}, {\"image\": \"s0285/ct.nii.gz\"}, {\"image\": \"s0282/ct.nii.gz\"}, {\"image\": \"s0281/ct.nii.gz\"}, {\"image\": \"s0278/ct.nii.gz\"}, {\"image\": \"s0277/ct.nii.gz\"}, {\"image\": \"s0271/ct.nii.gz\"}, {\"image\": \"s0270/ct.nii.gz\"}, {\"image\": \"s0265/ct.nii.gz\"}, {\"image\": \"s0264/ct.nii.gz\"}, {\"image\": \"s0263/ct.nii.gz\"}, {\"image\": \"s0262/ct.nii.gz\"}, {\"image\": \"s0259/ct.nii.gz\"}, {\"image\": \"s0258/ct.nii.gz\"}, {\"image\": \"s0256/ct.nii.gz\"}, {\"image\": \"s0255/ct.nii.gz\"}, {\"image\": \"s0254/ct.nii.gz\"}, {\"image\": \"s0253/ct.nii.gz\"}, {\"image\": \"s0252/ct.nii.gz\"}, {\"image\": \"s0250/ct.nii.gz\"}, {\"image\": \"s0249/ct.nii.gz\"}, {\"image\": \"s0248/ct.nii.gz\"}, {\"image\": \"s0246/ct.nii.gz\"}, {\"image\": \"s0245/ct.nii.gz\"}, {\"image\": \"s0244/ct.nii.gz\"}, {\"image\": \"s0243/ct.nii.gz\"}, {\"image\": \"s0242/ct.nii.gz\"}, {\"image\": \"s0241/ct.nii.gz\"}, {\"image\": \"s0240/ct.nii.gz\"}, {\"image\": \"s0239/ct.nii.gz\"}, {\"image\": \"s0238/ct.nii.gz\"}, {\"image\": \"s0237/ct.nii.gz\"}, {\"image\": \"s0236/ct.nii.gz\"}, {\"image\": \"s0235/ct.nii.gz\"}, {\"image\": \"s0234/ct.nii.gz\"}, {\"image\": \"s0233/ct.nii.gz\"}, {\"image\": \"s0232/ct.nii.gz\"}, {\"image\": \"s0231/ct.nii.gz\"}, {\"image\": \"s0228/ct.nii.gz\"}, {\"image\": \"s0227/ct.nii.gz\"}, {\"image\": \"s0226/ct.nii.gz\"}, {\"image\": \"s0223/ct.nii.gz\"}, {\"image\": \"s0221/ct.nii.gz\"}, {\"image\": \"s0220/ct.nii.gz\"}, {\"image\": \"s0219/ct.nii.gz\"}, {\"image\": \"s0218/ct.nii.gz\"}, {\"image\": \"s0217/ct.nii.gz\"}, {\"image\": \"s0216/ct.nii.gz\"}, {\"image\": \"s0215/ct.nii.gz\"}, {\"image\": \"s0213/ct.nii.gz\"}, {\"image\": \"s0212/ct.nii.gz\"}, {\"image\": \"s0211/ct.nii.gz\"}, {\"image\": \"s0210/ct.nii.gz\"}, {\"image\": \"s0209/ct.nii.gz\"}, {\"image\": \"s0206/ct.nii.gz\"}, {\"image\": \"s0204/ct.nii.gz\"}, {\"image\": \"s0201/ct.nii.gz\"}, {\"image\": \"s0199/ct.nii.gz\"}, {\"image\": \"s0197/ct.nii.gz\"}, {\"image\": \"s0196/ct.nii.gz\"}, {\"image\": \"s0194/ct.nii.gz\"}, {\"image\": \"s0192/ct.nii.gz\"}, {\"image\": \"s0191/ct.nii.gz\"}, {\"image\": \"s0190/ct.nii.gz\"}, {\"image\": \"s0189/ct.nii.gz\"}, {\"image\": \"s0187/ct.nii.gz\"}, {\"image\": \"s0185/ct.nii.gz\"}, {\"image\": \"s0184/ct.nii.gz\"}, {\"image\": \"s0183/ct.nii.gz\"}, {\"image\": \"s0182/ct.nii.gz\"}, {\"image\": \"s0181/ct.nii.gz\"}, {\"image\": \"s0179/ct.nii.gz\"}, {\"image\": \"s0178/ct.nii.gz\"}, {\"image\": \"s0175/ct.nii.gz\"}, {\"image\": \"s0174/ct.nii.gz\"}, {\"image\": \"s0171/ct.nii.gz\"}, {\"image\": \"s0169/ct.nii.gz\"}, {\"image\": \"s0168/ct.nii.gz\"}, {\"image\": \"s0167/ct.nii.gz\"}, {\"image\": \"s0166/ct.nii.gz\"}, {\"image\": \"s0165/ct.nii.gz\"}, {\"image\": \"s0163/ct.nii.gz\"}, {\"image\": \"s0161/ct.nii.gz\"}, {\"image\": \"s0160/ct.nii.gz\"}, {\"image\": \"s0159/ct.nii.gz\"}, {\"image\": \"s0158/ct.nii.gz\"}, {\"image\": \"s0157/ct.nii.gz\"}, {\"image\": \"s0156/ct.nii.gz\"}, {\"image\": \"s0154/ct.nii.gz\"}, {\"image\": \"s0153/ct.nii.gz\"}, {\"image\": \"s0152/ct.nii.gz\"}, {\"image\": \"s0151/ct.nii.gz\"}, {\"image\": \"s0149/ct.nii.gz\"}, {\"image\": \"s0146/ct.nii.gz\"}, {\"image\": \"s0145/ct.nii.gz\"}, {\"image\": \"s0143/ct.nii.gz\"}, {\"image\": \"s0140/ct.nii.gz\"}, {\"image\": \"s0139/ct.nii.gz\"}, {\"image\": \"s0138/ct.nii.gz\"}, {\"image\": \"s0137/ct.nii.gz\"}, {\"image\": \"s0136/ct.nii.gz\"}, {\"image\": \"s0135/ct.nii.gz\"}, {\"image\": \"s0133/ct.nii.gz\"}, {\"image\": \"s0132/ct.nii.gz\"}, {\"image\": \"s0130/ct.nii.gz\"}, {\"image\": \"s0128/ct.nii.gz\"}, {\"image\": \"s0123/ct.nii.gz\"}, {\"image\": \"s0120/ct.nii.gz\"}, {\"image\": \"s0119/ct.nii.gz\"}, {\"image\": \"s0117/ct.nii.gz\"}, {\"image\": \"s0115/ct.nii.gz\"}, {\"image\": \"s0114/ct.nii.gz\"}, {\"image\": \"s0110/ct.nii.gz\"}, {\"image\": \"s0109/ct.nii.gz\"}, {\"image\": \"s0108/ct.nii.gz\"}, {\"image\": \"s0107/ct.nii.gz\"}, {\"image\": \"s0106/ct.nii.gz\"}, {\"image\": \"s0104/ct.nii.gz\"}, {\"image\": \"s0103/ct.nii.gz\"}, {\"image\": \"s0102/ct.nii.gz\"}, {\"image\": \"s0101/ct.nii.gz\"}, {\"image\": \"s0099/ct.nii.gz\"}, {\"image\": \"s0098/ct.nii.gz\"}, {\"image\": \"s0096/ct.nii.gz\"}, {\"image\": \"s0095/ct.nii.gz\"}, {\"image\": \"s0092/ct.nii.gz\"}, {\"image\": \"s0091/ct.nii.gz\"}, {\"image\": \"s0090/ct.nii.gz\"}, {\"image\": \"s0089/ct.nii.gz\"}, {\"image\": \"s0086/ct.nii.gz\"}, {\"image\": \"s0085/ct.nii.gz\"}, {\"image\": \"s0082/ct.nii.gz\"}, {\"image\": \"s0081/ct.nii.gz\"}, {\"image\": \"s0080/ct.nii.gz\"}, {\"image\": \"s0079/ct.nii.gz\"}, {\"image\": \"s0078/ct.nii.gz\"}, {\"image\": \"s0077/ct.nii.gz\"}, {\"image\": \"s0076/ct.nii.gz\"}, {\"image\": \"s0075/ct.nii.gz\"}, {\"image\": \"s0074/ct.nii.gz\"}, {\"image\": \"s0073/ct.nii.gz\"}, {\"image\": \"s0072/ct.nii.gz\"}, {\"image\": \"s0071/ct.nii.gz\"}, {\"image\": \"s0070/ct.nii.gz\"}, {\"image\": \"s0069/ct.nii.gz\"}, {\"image\": \"s0068/ct.nii.gz\"}, {\"image\": \"s0067/ct.nii.gz\"}, {\"image\": \"s0066/ct.nii.gz\"}, {\"image\": \"s0063/ct.nii.gz\"}, {\"image\": \"s0062/ct.nii.gz\"}, {\"image\": \"s0061/ct.nii.gz\"}, {\"image\": \"s0058/ct.nii.gz\"}, {\"image\": \"s0057/ct.nii.gz\"}, {\"image\": \"s0054/ct.nii.gz\"}, {\"image\": \"s0053/ct.nii.gz\"}, {\"image\": \"s0052/ct.nii.gz\"}, {\"image\": \"s0050/ct.nii.gz\"}, {\"image\": \"s0049/ct.nii.gz\"}, {\"image\": \"s0048/ct.nii.gz\"}, {\"image\": \"s0046/ct.nii.gz\"}, {\"image\": \"s0044/ct.nii.gz\"}, {\"image\": \"s0040/ct.nii.gz\"}, {\"image\": \"s0039/ct.nii.gz\"}, {\"image\": \"s0038/ct.nii.gz\"}, {\"image\": \"s0037/ct.nii.gz\"}, {\"image\": \"s0036/ct.nii.gz\"}, {\"image\": \"s0035/ct.nii.gz\"}, {\"image\": \"s0034/ct.nii.gz\"}, {\"image\": \"s0032/ct.nii.gz\"}, {\"image\": \"s0031/ct.nii.gz\"}, {\"image\": \"s0030/ct.nii.gz\"}, {\"image\": \"s0029/ct.nii.gz\"}, {\"image\": \"s0025/ct.nii.gz\"}, {\"image\": \"s0021/ct.nii.gz\"}, {\"image\": \"s0019/ct.nii.gz\"}, {\"image\": \"s0016/ct.nii.gz\"}, {\"image\": \"s0015/ct.nii.gz\"}, {\"image\": \"s0014/ct.nii.gz\"}, {\"image\": \"s0013/ct.nii.gz\"}, {\"image\": \"s0012/ct.nii.gz\"}, {\"image\": \"s0011/ct.nii.gz\"}, {\"image\": \"s0010/ct.nii.gz\"}, {\"image\": \"s0009/ct.nii.gz\"}, {\"image\": \"s0006/ct.nii.gz\"}, {\"image\": \"s0004/ct.nii.gz\"}, {\"image\": \"s0003/ct.nii.gz\"}, {\"image\": \"s0002/ct.nii.gz\"}, {\"image\": \"s0001/ct.nii.gz\"}]}"
  },
  {
    "path": "jsons/__init__.py",
    "content": ""
  },
  {
    "path": "jsons/btcv.json",
    "content": "{\"training\": [{\"image\": \"imagesTr/img0001.nii.gz\"}, {\"image\": \"imagesTr/img0002.nii.gz\"}, {\"image\": \"imagesTr/img0003.nii.gz\"}, {\"image\": \"imagesTr/img0004.nii.gz\"}, {\"image\": \"imagesTr/img0005.nii.gz\"}, {\"image\": \"imagesTr/img0006.nii.gz\"}, {\"image\": \"imagesTr/img0007.nii.gz\"}, {\"image\": \"imagesTr/img0008.nii.gz\"}, {\"image\": \"imagesTr/img0009.nii.gz\"}, {\"image\": \"imagesTr/img0010.nii.gz\"}, {\"image\": \"imagesTr/img0021.nii.gz\"}, {\"image\": \"imagesTr/img0022.nii.gz\"}, {\"image\": \"imagesTr/img0023.nii.gz\"}, {\"image\": \"imagesTr/img0024.nii.gz\"}, {\"image\": \"imagesTr/img0025.nii.gz\"}, {\"image\": \"imagesTr/img0026.nii.gz\"}, {\"image\": \"imagesTr/img0027.nii.gz\"}, {\"image\": \"imagesTr/img0028.nii.gz\"}, {\"image\": \"imagesTr/img0029.nii.gz\"}, {\"image\": \"imagesTr/img0030.nii.gz\"}, {\"image\": \"imagesTr/img0031.nii.gz\"}, {\"image\": \"imagesTr/img0032.nii.gz\"}, {\"image\": \"imagesTr/img0033.nii.gz\"}, {\"image\": \"imagesTr/img0034.nii.gz\"}], \"validation\": [{\"image\": \"imagesTr/img0035.nii.gz\"}, {\"image\": \"imagesTr/img0036.nii.gz\"}, {\"image\": \"imagesTr/img0037.nii.gz\"}, {\"image\": \"imagesTr/img0038.nii.gz\"}, {\"image\": \"imagesTr/img0039.nii.gz\"}, {\"image\": \"imagesTr/img0040.nii.gz\"}]}"
  },
  {
    "path": "jsons/dataset_LUNA16_0.json",
    "content": "{\"training\": [{\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.979083010707182900091062408058.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.979083010707182900091062408058.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.154677396354641150280013275227.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.154677396354641150280013275227.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.752756872840730509471096155114.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.752756872840730509471096155114.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.621916089407825046337959219998.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.621916089407825046337959219998.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.124154461048929153767743874565.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124154461048929153767743874565.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.397062004302272014259317520874.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.397062004302272014259317520874.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.805925269324902055566754756843.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.805925269324902055566754756843.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.295298571102631191572192562523.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.295298571102631191572192562523.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.450501966058662668272378865145.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.450501966058662668272378865145.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.534083630500464995109143618896.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.534083630500464995109143618896.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.130438550890816550994739120843.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.130438550890816550994739120843.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.534006575256943390479252771547.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.534006575256943390479252771547.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.231645134739451754302647733304.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.231645134739451754302647733304.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.334517907433161353885866806005.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334517907433161353885866806005.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.311981398931043315779172047718.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.311981398931043315779172047718.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.868211851413924881662621747734.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.868211851413924881662621747734.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.238522526736091851696274044574.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238522526736091851696274044574.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.305858704835252413616501469037.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.305858704835252413616501469037.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.417815314896088956784723476543.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.417815314896088956784723476543.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.332453873575389860371315979768.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.332453873575389860371315979768.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.333145094436144085379032922488.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.333145094436144085379032922488.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.194440094986948071643661798326.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194440094986948071643661798326.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.404364125369979066736354549484.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.404364125369979066736354549484.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.122763913896761494371822656720.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.122763913896761494371822656720.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.317087518531899043292346860596.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.317087518531899043292346860596.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.194465340552956447447896167830.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194465340552956447447896167830.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.832260670372728970918746541371.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.832260670372728970918746541371.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.716498695101447665580610403574.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.716498695101447665580610403574.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.281489753704424911132261151767.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.281489753704424911132261151767.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.395623571499047043765181005112.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.395623571499047043765181005112.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.141069661700670042960678408762.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141069661700670042960678408762.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.295420274214095686326263147663.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.295420274214095686326263147663.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.216882370221919561230873289517.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216882370221919561230873289517.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.323859712968543712594665815359.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323859712968543712594665815359.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.280972147860943609388015648430.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.280972147860943609388015648430.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.293757615532132808762625441831.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.293757615532132808762625441831.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.210837812047373739447725050963.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.210837812047373739447725050963.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.137763212752154081977261297097.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.137763212752154081977261297097.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.430109407146633213496148200410.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.430109407146633213496148200410.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.139258777898746693365877042411.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139258777898746693365877042411.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.278660284797073139172446973682.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.278660284797073139172446973682.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.303421828981831854739626597495.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.303421828981831854739626597495.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.134996872583497382954024478441.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134996872583497382954024478441.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.213140617640021803112060161074.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.213140617640021803112060161074.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.286647622786041008124419915089.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286647622786041008124419915089.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.315214756157389122376518747372.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315214756157389122376518747372.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.193808128386712859512130599234.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193808128386712859512130599234.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.169128136262002764211589185953.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.169128136262002764211589185953.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.616033753016904899083676284739.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.616033753016904899083676284739.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.183184435049555024219115904825.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183184435049555024219115904825.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.503980049263254396021509831276.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.503980049263254396021509831276.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.179162671133894061547290922949.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179162671133894061547290922949.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.336894364358709782463716339027.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336894364358709782463716339027.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.139595277234735528205899724196.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139595277234735528205899724196.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.300271604576987336866436407488.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300271604576987336866436407488.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.275766318636944297772360944907.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275766318636944297772360944907.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.892375496445736188832556446335.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.892375496445736188832556446335.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.162901839201654862079549658100.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162901839201654862079549658100.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.140527383975300992150799777603.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.140527383975300992150799777603.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.325164338773720548739146851679.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.325164338773720548739146851679.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.222087811960706096424718056430.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.222087811960706096424718056430.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.250397690690072950000431855143.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250397690690072950000431855143.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.479402560265137632920333093071.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.479402560265137632920333093071.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.756684168227383088294595834066.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.756684168227383088294595834066.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.168605638657404145360275453085.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168605638657404145360275453085.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.173106154739244262091404659845.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173106154739244262091404659845.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.104562737760173137525888934217.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.104562737760173137525888934217.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.128881800399702510818644205032.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.128881800399702510818644205032.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.243094273518213382155770295147.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.243094273518213382155770295147.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.121824995088859376862458155637.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121824995088859376862458155637.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.114218724025049818743426522343.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114218724025049818743426522343.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.111017101339429664883879536171.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111017101339429664883879536171.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.134370886216012873213579659366.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134370886216012873213579659366.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.168037818448885856452592057286.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168037818448885856452592057286.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.259543921154154401875872845498.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259543921154154401875872845498.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.314789075871001236641548593165.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.314789075871001236641548593165.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.184019785706727365023450012318.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.184019785706727365023450012318.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.272961322147784625028175033640.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272961322147784625028175033640.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.171919524048654494439256263785.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171919524048654494439256263785.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.113697708991260454310623082679.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.113697708991260454310623082679.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.458525794434429386945463560826.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.458525794434429386945463560826.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.674809958213117379592437424616.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.674809958213117379592437424616.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.206539885154775002929031534291.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.206539885154775002929031534291.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.108231420525711026834210228428.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.108231420525711026834210228428.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.309672797925724868457151381131.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309672797925724868457151381131.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.161073793312426102774780216551.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161073793312426102774780216551.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.690929968028676628605553365896.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.690929968028676628605553365896.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.208737629504245244513001631764.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.208737629504245244513001631764.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.216652640878960522552873394709.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216652640878960522552873394709.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.335866409407244673864352309754.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.335866409407244673864352309754.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.282512043257574309474415322775.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.282512043257574309474415322775.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.270390050141765094612147226290.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270390050141765094612147226290.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.146603910507557786636779705509.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.146603910507557786636779705509.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.561458563853929400124470098603.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.561458563853929400124470098603.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.162207236104936931957809623059.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162207236104936931957809623059.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.768276876111112560631432843476.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.768276876111112560631432843476.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.247769845138587733933485039556.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.247769845138587733933485039556.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.179049373636438705059720603192.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179049373636438705059720603192.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.145759169833745025756371695397.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145759169833745025756371695397.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.952265563663939823135367733681.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.952265563663939823135367733681.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.655242448149322898770987310561.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.655242448149322898770987310561.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.186021279664749879526003668137.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.186021279664749879526003668137.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.331211682377519763144559212009.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.331211682377519763144559212009.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.308183340111270052562662456038.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.308183340111270052562662456038.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.663019255629770796363333877035.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.663019255629770796363333877035.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.310395752124284049604069960014.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.310395752124284049604069960014.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.161002239822118346732951898613.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161002239822118346732951898613.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.231002159523969307155990628066.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.231002159523969307155990628066.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.193408384740507320589857096592.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193408384740507320589857096592.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.226456162308124493341905600418.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226456162308124493341905600418.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.326057189095429101398977448288.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.326057189095429101398977448288.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.861997885565255340442123234170.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.861997885565255340442123234170.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.231834776365874788440767645596.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.231834776365874788440767645596.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.100684836163890911914061745866.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100684836163890911914061745866.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.128059192202504367870633619224.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.128059192202504367870633619224.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.888291896309937415860209787179.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.888291896309937415860209787179.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.152684536713461901635595118048.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.152684536713461901635595118048.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.183843376225716802567192412456.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183843376225716802567192412456.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.287966244644280690737019247886.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.287966244644280690737019247886.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.910607280658963002048724648683.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.910607280658963002048724648683.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.163994693532965040247348251579.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163994693532965040247348251579.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.935683764293840351008008793409.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.935683764293840351008008793409.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.197987940182806628828566429132.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.197987940182806628828566429132.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.802595762867498341201607992711.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.802595762867498341201607992711.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.259018373683540453277752706262.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259018373683540453277752706262.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.144943344795414353192059796098.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.144943344795414353192059796098.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.801945620899034889998809817499.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.801945620899034889998809817499.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.200558451375970945040979397866.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200558451375970945040979397866.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.334105754605642100456249422350.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334105754605642100456249422350.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.162718361851587451505896742103.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162718361851587451505896742103.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.970264865033574190975654369557.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.970264865033574190975654369557.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.306948744223170422945185006551.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306948744223170422945185006551.nii.gz\"}, {\"image\": \"subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.106719103982792863757268101375.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106719103982792863757268101375.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.443400977949406454649939526179.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.443400977949406454649939526179.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.212346425055214308006918165305.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.212346425055214308006918165305.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.504324996863016748259361352296.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.504324996863016748259361352296.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.232071262560365924176679652948.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.232071262560365924176679652948.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.159996104466052855396410079250.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.159996104466052855396410079250.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.299767339686526858593516834230.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.299767339686526858593516834230.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.283733738239331719775105586296.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.283733738239331719775105586296.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.184412674007117333405073397832.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.184412674007117333405073397832.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.922852847124879997825997808179.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.922852847124879997825997808179.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.939152384493874708850321969356.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.939152384493874708850321969356.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.240969450540588211676803094518.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.240969450540588211676803094518.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.461155505515403114280165935891.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.461155505515403114280165935891.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.126704785377921920210612476953.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126704785377921920210612476953.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.281967919138248195763602360723.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.281967919138248195763602360723.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.306788423710427765311352901943.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306788423710427765311352901943.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.199171741859530285887752432478.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199171741859530285887752432478.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.144883090372691745980459537053.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.144883090372691745980459537053.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.100621383016233746780170740405.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100621383016233746780170740405.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.147250707071097813243473865421.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.147250707071097813243473865421.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.143412474064515942785157561636.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143412474064515942785157561636.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.776429308535398795601496131524.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.776429308535398795601496131524.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.176362912420491262783064585333.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176362912420491262783064585333.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.117383608379722740629083782428.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.117383608379722740629083782428.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.943403138251347598519939390311.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.943403138251347598519939390311.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.199975006921901879512837687266.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199975006921901879512837687266.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.156322145453198768801776721493.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156322145453198768801776721493.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.227796349777753378641347819780.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227796349777753378641347819780.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.226383054119800793308721198594.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226383054119800793308721198594.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.311236942972970815890902714604.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.311236942972970815890902714604.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.270152671889301412052226973069.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270152671889301412052226973069.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.121993590721161347818774929286.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121993590721161347818774929286.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.235364978775280910367690540811.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.235364978775280910367690540811.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.280072876841890439628529365478.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.280072876841890439628529365478.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.225154811831720426832024114593.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.225154811831720426832024114593.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.102133688497886810253331438797.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.102133688497886810253331438797.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.139444426690868429919252698606.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139444426690868429919252698606.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.191301539558980174217770205256.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.191301539558980174217770205256.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.124663713663969377020085460568.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124663713663969377020085460568.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.183924380327950237519832859527.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183924380327950237519832859527.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.159521777966998275980367008904.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.159521777966998275980367008904.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.885292267869246639232975687131.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.885292267869246639232975687131.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.153536305742006952753134773630.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153536305742006952753134773630.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.259227883564429312164962953756.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259227883564429312164962953756.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.267519732763035023633235877753.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.267519732763035023633235877753.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.137375498893536422914241295628.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.137375498893536422914241295628.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.466284753932369813717081722101.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.466284753932369813717081722101.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.557875302364105947813979213632.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.557875302364105947813979213632.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.265133389948279331857097127422.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265133389948279331857097127422.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.743969234977916254223533321294.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.743969234977916254223533321294.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.323753921818102744511069914832.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323753921818102744511069914832.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.192256506776434538421891524301.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.192256506776434538421891524301.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.302557165094691896097534021075.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.302557165094691896097534021075.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.213022585153512920098588556742.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.213022585153512920098588556742.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.253283426904813468115158375647.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.253283426904813468115158375647.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.283569726884265181140892667131.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.283569726884265181140892667131.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.470912100568074901744259213968.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.470912100568074901744259213968.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.223098610241551815995595311693.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.223098610241551815995595311693.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.199220738144407033276946096708.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199220738144407033276946096708.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.296863826932699509516219450076.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.296863826932699509516219450076.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.163901773171373940247829492387.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163901773171373940247829492387.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.669518152156802508672627785405.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.669518152156802508672627785405.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.221945191226273284587353530424.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.221945191226273284587353530424.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.113586291551175790743673929831.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.113586291551175790743673929831.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.267957701183569638795986183786.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.267957701183569638795986183786.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.220205300714852483483213840572.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.220205300714852483483213840572.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.116492508532884962903000261147.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.116492508532884962903000261147.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.133378195429627807109985347209.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.133378195429627807109985347209.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.325580698241281352835338693869.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.325580698241281352835338693869.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.216526102138308489357443843021.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216526102138308489357443843021.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.217589936421986638139451480826.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217589936421986638139451480826.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.252634638822000832774167856951.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252634638822000832774167856951.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.217697417596902141600884006982.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217697417596902141600884006982.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.253322967203074795232627653819.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.253322967203074795232627653819.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.803808126682275425758092691689.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.803808126682275425758092691689.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.964952370561266624992539111877.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.964952370561266624992539111877.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.187108608022306504546286626125.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187108608022306504546286626125.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.624425075947752229712087113746.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.624425075947752229712087113746.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.227885601428639043345478571594.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227885601428639043345478571594.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.190298296009658115773239776160.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.190298296009658115773239776160.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.172845185165807139298420209778.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.172845185165807139298420209778.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.142485715518010940961688015191.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.142485715518010940961688015191.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.707218743153927597786179232739.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.707218743153927597786179232739.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.156579001330474859527530187095.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156579001330474859527530187095.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.217955041973656886482758642958.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217955041973656886482758642958.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.692598144815688523679745963696.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.692598144815688523679745963696.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.230078008964732806419498631442.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230078008964732806419498631442.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.265453131727473342790950829556.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265453131727473342790950829556.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.272259794130271010519952623746.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272259794130271010519952623746.nii.gz\"}, {\"image\": \"subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.187966156856911682643615997798.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187966156856911682643615997798.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.194488534645348916700259325236.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194488534645348916700259325236.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.237428977311365557972720635401.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.237428977311365557972720635401.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.172573195301625265149778785969.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.172573195301625265149778785969.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.100953483028192176989979435275.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100953483028192176989979435275.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.268838889380981659524993261082.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268838889380981659524993261082.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.203741923654363010377298352671.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.203741923654363010377298352671.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.404768898286087278137462774930.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.404768898286087278137462774930.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.278010349511857248000260557753.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.278010349511857248000260557753.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.191617711875409989053242965150.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.191617711875409989053242965150.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.191266041369462391833537519639.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.191266041369462391833537519639.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.323541312620128092852212458228.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323541312620128092852212458228.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.334022941831199910030220864961.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334022941831199910030220864961.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.276556509002726404418399209377.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.276556509002726404418399209377.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.487745546557477250336016826588.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.487745546557477250336016826588.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.199069398344356765037879821616.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199069398344356765037879821616.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.339142594937666268384335506819.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339142594937666268384335506819.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.319009811633846643966578282371.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.319009811633846643966578282371.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.268589491017129166376960414534.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268589491017129166376960414534.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.225227615446398900698431118292.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.225227615446398900698431118292.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.135657246677982059395844827629.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.135657246677982059395844827629.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.205852555362702089950453265567.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205852555362702089950453265567.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.125356649712550043958727288500.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.125356649712550043958727288500.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.244204120220889433826451158706.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244204120220889433826451158706.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.148447286464082095534651426689.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.148447286464082095534651426689.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.313283554967554803238484128406.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313283554967554803238484128406.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.254254303842550572473665729969.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254254303842550572473665729969.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.324567010179873305471925391582.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.324567010179873305471925391582.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.614147706162329660656328811671.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.614147706162329660656328811671.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.215086589927307766627151367533.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215086589927307766627151367533.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.202187810895588720702176009630.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202187810895588720702176009630.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.177785764461425908755977367558.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177785764461425908755977367558.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.244447966386688625240438849169.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244447966386688625240438849169.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.398955972049286139436103068984.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.398955972049286139436103068984.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.324290109423920971676288828329.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.324290109423920971676288828329.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.822128649427327893802314908658.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.822128649427327893802314908658.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.199670099218798685977406484591.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199670099218798685977406484591.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.204303454658845815034433453512.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204303454658845815034433453512.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.272190966764020277652079081128.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272190966764020277652079081128.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.961063442349005937536597225349.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.961063442349005937536597225349.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.277662902666135640561346462196.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.277662902666135640561346462196.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.314519596680450457855054746285.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.314519596680450457855054746285.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.202476538079060560282495099956.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202476538079060560282495099956.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.975426625618184773401026809852.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.975426625618184773401026809852.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.252697338970999211181671881792.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252697338970999211181671881792.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.603166427542096384265514998412.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.603166427542096384265514998412.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.106164978370116976238911317774.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106164978370116976238911317774.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.229664630348267553620068691756.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229664630348267553620068691756.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.292057261351416339496913597985.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292057261351416339496913597985.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.219349715895470349269596532320.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219349715895470349269596532320.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.160216916075817913953530562493.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.160216916075817913953530562493.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.269075535958871753309238331179.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.269075535958871753309238331179.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.167919147233131417984739058859.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167919147233131417984739058859.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.210426531621179400035178209430.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.210426531621179400035178209430.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.154703816225841204080664115280.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.154703816225841204080664115280.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.208511362832825683639135205368.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.208511362832825683639135205368.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.969607480572818589276327766720.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.969607480572818589276327766720.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.145474881373882284343459153872.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145474881373882284343459153872.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.275007193025729362844652516689.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275007193025729362844652516689.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.179730018513720561213088132029.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179730018513720561213088132029.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.244681063194071446501270815660.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244681063194071446501270815660.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.367204840301639918160517361062.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.367204840301639918160517361062.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.300246184547502297539521283806.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300246184547502297539521283806.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.336225579776978874775723463327.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336225579776978874775723463327.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.710845873679853791427022019413.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.710845873679853791427022019413.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.171667800241622018839592854574.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171667800241622018839592854574.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.970428941353693253759289796610.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.970428941353693253759289796610.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.202464973819273687476049035824.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202464973819273687476049035824.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.293593766328917170359373773080.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.293593766328917170359373773080.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.264090899378396711987322794314.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.264090899378396711987322794314.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.965620538050807352935663552285.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.965620538050807352935663552285.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.162845309248822193437735868939.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162845309248822193437735868939.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.100620385482151095585000946543.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100620385482151095585000946543.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.127965161564033605177803085629.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.127965161564033605177803085629.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.619372068417051974713149104919.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.619372068417051974713149104919.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.625270601160880745954773142570.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.625270601160880745954773142570.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.274052674198758621258447180130.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.274052674198758621258447180130.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.321465552859463184018938648244.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.321465552859463184018938648244.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.126631670596873065041988320084.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126631670596873065041988320084.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.319066480138812986026181758474.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.319066480138812986026181758474.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.197063290812663596858124411210.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.197063290812663596858124411210.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.121391737347333465796214915391.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121391737347333465796214915391.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.177985905159808659201278495182.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177985905159808659201278495182.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.149463915556499304732434215056.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.149463915556499304732434215056.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.192419869605596446455526220766.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.192419869605596446455526220766.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.177685820605315926524514718990.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177685820605315926524514718990.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.481278873893653517789960724156.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.481278873893653517789960724156.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.842317928015463083368074520378.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.842317928015463083368074520378.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.123697637451437522065941162930.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.123697637451437522065941162930.nii.gz\"}, {\"image\": \"subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.850739282072340578344345230132.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.850739282072340578344345230132.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.265775376735520890308424143898.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265775376735520890308424143898.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.161855583909753609742728521805.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161855583909753609742728521805.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.177252583002664900748714851615.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177252583002664900748714851615.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.100530488926682752765845212286.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100530488926682752765845212286.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.333319057944372470283038483725.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.333319057944372470283038483725.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.185226274332527104841463955058.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.185226274332527104841463955058.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.307835307280028057486413359377.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.307835307280028057486413359377.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.141430002307216644912805017227.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141430002307216644912805017227.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.141511313712034597336182402384.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141511313712034597336182402384.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.339546614783708685476232944897.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339546614783708685476232944897.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.259123825760999546551970425757.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259123825760999546551970425757.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.148229375703208214308676934766.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.148229375703208214308676934766.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.312704771348460502013249647868.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.312704771348460502013249647868.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.885168397833922082085837240429.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.885168397833922082085837240429.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.218476624578721885561483687176.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.218476624578721885561483687176.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.304676828064484590312919543151.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.304676828064484590312919543151.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.300146276266881736689307479986.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300146276266881736689307479986.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.146987333806092287055399155268.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.146987333806092287055399155268.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.780558315515979171413904604168.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.780558315515979171413904604168.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.164988920331211858091402361989.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.164988920331211858091402361989.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.216252660192313507027754194207.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216252660192313507027754194207.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.897684031374557757145405000951.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.897684031374557757145405000951.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.316393351033132458296975008261.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.316393351033132458296975008261.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.779493719385047675154892222907.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.779493719385047675154892222907.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.232011770495640253949434620907.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.232011770495640253949434620907.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.463214953282361219537913355115.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.463214953282361219537913355115.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.103115201714075993579787468219.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.103115201714075993579787468219.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.390009458146468860187238398197.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.390009458146468860187238398197.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.248357157975955379661896491341.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.248357157975955379661896491341.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.631047517458234322522264161877.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.631047517458234322522264161877.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.265960756233787099041040311282.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265960756233787099041040311282.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.811825890493256320617655474043.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.811825890493256320617655474043.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.178680586845223339579041794709.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.178680586845223339579041794709.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.428038562098395445838061018440.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.428038562098395445838061018440.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.797637294244261543517154417124.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.797637294244261543517154417124.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.670107649586205629860363487713.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.670107649586205629860363487713.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.244590453955380448651329424024.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244590453955380448651329424024.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.230416590143922549745658357505.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230416590143922549745658357505.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.385151742584074711135621089321.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.385151742584074711135621089321.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.211051626197585058967163339846.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.211051626197585058967163339846.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.156821379677057223126714881626.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156821379677057223126714881626.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.200837896655745926888305239398.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200837896655745926888305239398.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.584871944187559733312703328980.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.584871944187559733312703328980.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.390513733720659266816639651938.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.390513733720659266816639651938.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.741709061958490690246385302477.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.741709061958490690246385302477.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.330425234131526435132846006585.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330425234131526435132846006585.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.303865116731361029078599241306.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.303865116731361029078599241306.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.214800939017429618305208626314.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.214800939017429618305208626314.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.209269973797560820442292189762.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.209269973797560820442292189762.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.320967206808467952819309001585.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.320967206808467952819309001585.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.114195693932194925962391697338.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114195693932194925962391697338.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.119806527488108718706404165837.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119806527488108718706404165837.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.438308540025607517017949816111.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.438308540025607517017949816111.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.276710697414087561012670296643.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.276710697414087561012670296643.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.826829446346820089862659555750.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.826829446346820089862659555750.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.173101104804533997398137418032.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173101104804533997398137418032.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.211071908915618528829547301883.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.211071908915618528829547301883.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.799582546798528864710752164515.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.799582546798528864710752164515.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.268992195564407418480563388746.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268992195564407418480563388746.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.203179378754043776171267611064.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.203179378754043776171267611064.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.401389720232123950202941034290.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.401389720232123950202941034290.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.189483585244687808087477024767.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.189483585244687808087477024767.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.107351566259572521472765997306.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.107351566259572521472765997306.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.270215889102603268207599305185.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270215889102603268207599305185.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.242761658169703141430370511586.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.242761658169703141430370511586.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.163931625580639955914619627409.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163931625580639955914619627409.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.287560874054243719452635194040.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.287560874054243719452635194040.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.252358625003143649770119512644.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252358625003143649770119512644.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.125067060506283419853742462394.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.125067060506283419853742462394.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.205993750485568250373835565680.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205993750485568250373835565680.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.419601611032172899567156073142.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.419601611032172899567156073142.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.115386642382564804180764325545.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.115386642382564804180764325545.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.104780906131535625872840889059.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.104780906131535625872840889059.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.202643836890896697853521610450.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202643836890896697853521610450.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.329326052298830421573852261436.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.329326052298830421573852261436.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.228511122591230092662900221600.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.228511122591230092662900221600.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.320111824803959660037459294083.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.320111824803959660037459294083.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.204802250386343794613980417281.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204802250386343794613980417281.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.232058316950007760548968840196.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.232058316950007760548968840196.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.198698492013538481395497694975.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.198698492013538481395497694975.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.122914038048856168343065566972.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.122914038048856168343065566972.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.449254134266555649028108149727.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.449254134266555649028108149727.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.337845202462615014431060697507.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.337845202462615014431060697507.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.304700823314998198591652152637.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.304700823314998198591652152637.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.261678072503577216586082745513.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.261678072503577216586082745513.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.910435939545691201820711078950.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.910435939545691201820711078950.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.211956804948320236390242845468.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.211956804948320236390242845468.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.145283812746259413053188838096.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145283812746259413053188838096.nii.gz\"}, {\"image\": \"subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.142154819868944114554521645782.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.142154819868944114554521645782.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.338114620394879648539943280992.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338114620394879648539943280992.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.245349763807614756148761326488.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245349763807614756148761326488.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955814083231537823157605135.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955814083231537823157605135.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.308655308958459380153492314021.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.308655308958459380153492314021.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.188484197846284733942365679565.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188484197846284733942365679565.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.174935793360491516757154875981.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174935793360491516757154875981.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.339882192295517122002429068974.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339882192295517122002429068974.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.285926554490515269336267972830.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.285926554490515269336267972830.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.205523326998654833765855998037.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205523326998654833765855998037.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.246589849815292078281051154201.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246589849815292078281051154201.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.234400932423244218697302970157.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.234400932423244218697302970157.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.133132722052053001903031735878.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.133132722052053001903031735878.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.101228986346984399347858840086.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.101228986346984399347858840086.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.160124400349792614505500125883.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.160124400349792614505500125883.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.258220324170977900491673635112.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.258220324170977900491673635112.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.504845428620607044098514803031.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.504845428620607044098514803031.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.132817748896065918417924920957.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.132817748896065918417924920957.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.201890795870532056891161597218.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.201890795870532056891161597218.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.160586340600816116143631200450.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.160586340600816116143631200450.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.750792629100457382099842515038.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.750792629100457382099842515038.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.219428004988664846407984058588.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219428004988664846407984058588.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.138894439026794145866157853158.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138894439026794145866157853158.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.244442540088515471945035689377.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244442540088515471945035689377.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.229171189693734694696158152904.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229171189693734694696158152904.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.627998298349675613581885874395.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.627998298349675613581885874395.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.439153572396640163898529626096.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.439153572396640163898529626096.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.255999614855292116767517149228.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.255999614855292116767517149228.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.275986221854423197884953496664.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275986221854423197884953496664.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.273525289046256012743471155680.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.273525289046256012743471155680.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.328695385904874796172316226975.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.328695385904874796172316226975.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.334184846571549530235084187602.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334184846571549530235084187602.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.176638348958425792989125209419.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176638348958425792989125209419.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.143782059748737055784173697516.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143782059748737055784173697516.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.110678335949765929063942738609.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.110678335949765929063942738609.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.100332161840553388986847034053.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100332161840553388986847034053.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.106419850406056634877579573537.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106419850406056634877579573537.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.178391668569567816549737454720.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.178391668569567816549737454720.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.112740418331256326754121315800.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.112740418331256326754121315800.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.980362852713685276785310240144.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.980362852713685276785310240144.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.323408652979949774528873200770.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323408652979949774528873200770.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.323899724653546164058849558431.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323899724653546164058849558431.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.138904664700896606480369521124.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138904664700896606480369521124.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.100398138793540579077826395208.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100398138793540579077826395208.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.219281726101239572270900838145.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219281726101239572270900838145.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.120196332569034738680965284519.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.120196332569034738680965284519.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.245391706475696258069508046497.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245391706475696258069508046497.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.866845763956586959109892274084.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.866845763956586959109892274084.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.200841000324240313648595016964.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200841000324240313648595016964.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.290135156874098366424871975734.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.290135156874098366424871975734.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.129567032250534530765928856531.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129567032250534530765928856531.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.111780708132595903430640048766.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111780708132595903430640048766.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.338875090785618956575597613546.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338875090785618956575597613546.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.188265424231150847356515802868.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188265424231150847356515802868.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.553241901808946577644850294647.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.553241901808946577644850294647.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.174907798609768549012640380786.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174907798609768549012640380786.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.246225645401227472829175288633.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246225645401227472829175288633.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.394470743585708729682444806008.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.394470743585708729682444806008.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.299476369290630280560355838785.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.299476369290630280560355838785.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.238855414831158993232534884296.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238855414831158993232534884296.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.745109871503276594185453478952.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.745109871503276594185453478952.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.725023183844147505748475581290.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.725023183844147505748475581290.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.910757789941076242457816491305.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.910757789941076242457816491305.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.262736997975960398949912434623.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.262736997975960398949912434623.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.257840703452266097926250569223.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.257840703452266097926250569223.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.693480911433291675609148051914.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.693480911433291675609148051914.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.296738183013079390785739615169.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.296738183013079390785739615169.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.154837327827713479309898027966.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.154837327827713479309898027966.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.596908385953413160131451426904.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.596908385953413160131451426904.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.224465398054769500989828256685.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.224465398054769500989828256685.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.143410010885830403003179808334.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143410010885830403003179808334.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.275755514659958628040305922764.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275755514659958628040305922764.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.162351539386551708034407968929.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162351539386551708034407968929.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.986011151772797848993829243183.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.986011151772797848993829243183.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.118140393257625250121502185026.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.118140393257625250121502185026.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.134638281277099121660656324702.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134638281277099121660656324702.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.842980983137518332429408284002.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.842980983137518332429408284002.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.291539125579672469833850180824.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.291539125579672469833850180824.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.183056151780567460322586876100.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183056151780567460322586876100.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.190144948425835566841437565646.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.190144948425835566841437565646.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.194246472548954252250399902051.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194246472548954252250399902051.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.140239815496047437552471323962.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.140239815496047437552471323962.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.338104567770715523699587505022.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338104567770715523699587505022.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.413896555982844732694353377538.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.413896555982844732694353377538.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.301582691063019848479942618641.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.301582691063019848479942618641.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.205615524269596458818376243313.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205615524269596458818376243313.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.238042459915048190592571019348.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238042459915048190592571019348.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.152706273988004688708784163325.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.152706273988004688708784163325.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.397202838387416555106806022938.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.397202838387416555106806022938.nii.gz\"}, {\"image\": \"subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.111258527162678142285870245028.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111258527162678142285870245028.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.297251044869095073091780740645.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297251044869095073091780740645.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.306558074682524259000586270818.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306558074682524259000586270818.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.249450003033735700817635168066.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249450003033735700817635168066.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.803987517543436570820681016103.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.803987517543436570820681016103.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.266009527139315622265711325223.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.266009527139315622265711325223.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.167237290696350215427953159586.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167237290696350215427953159586.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.240630002689062442926543993263.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.240630002689062442926543993263.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955999522338651429118207446.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955999522338651429118207446.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.168737928729363683423228050295.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168737928729363683423228050295.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.161067514225109999586362698069.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161067514225109999586362698069.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.321935195060268166151738328001.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.321935195060268166151738328001.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.275849601663847251574860892603.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275849601663847251574860892603.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.475325201787910087416720919680.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.475325201787910087416720919680.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.338447145504282422142824032832.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338447145504282422142824032832.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.254138388912084634057282064266.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254138388912084634057282064266.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.119304665257760307862874140576.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119304665257760307862874140576.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.282779922503707013097174625409.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.282779922503707013097174625409.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.123654356399290048011621921476.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.123654356399290048011621921476.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.235217371152464582553341729176.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.235217371152464582553341729176.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.226564372605239604660221582288.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226564372605239604660221582288.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.877026508860018521147620598474.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.877026508860018521147620598474.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.658611160253017715059194304729.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.658611160253017715059194304729.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.251215764736737018371915284679.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.251215764736737018371915284679.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.171682845383273105440297561095.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171682845383273105440297561095.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.307921770358136677021532761235.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.307921770358136677021532761235.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.173556680294801532247454313511.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173556680294801532247454313511.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.513023675145166449943177283490.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.513023675145166449943177283490.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.245248446973732759194067808002.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245248446973732759194067808002.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.725236073737175770730904408416.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.725236073737175770730904408416.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.130765375502800983459674173881.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.130765375502800983459674173881.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.955688628308192728558382581802.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.955688628308192728558382581802.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.465032801496479029639448332481.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.465032801496479029639448332481.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.669435869708883155232318480131.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.669435869708883155232318480131.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.149893110752986700464921264055.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.149893110752986700464921264055.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.316911475886263032009840828684.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.316911475886263032009840828684.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.176030616406569931557298712518.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176030616406569931557298712518.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.308153138776443962077214577161.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.308153138776443962077214577161.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.463588161905537526756964393219.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.463588161905537526756964393219.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.170921541362033046216100409521.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170921541362033046216100409521.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.241717018262666382493757419144.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.241717018262666382493757419144.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.329404588567903628160652715124.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.329404588567903628160652715124.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.170052181746004939527661217512.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170052181746004939527661217512.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.252814707117018427472206147014.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252814707117018427472206147014.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.948414623428298219623354433437.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.948414623428298219623354433437.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.404457313935200882843898832756.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.404457313935200882843898832756.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.129007566048223160327836686225.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129007566048223160327836686225.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.315770913282450940389971401304.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315770913282450940389971401304.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.652347820272212119124022644822.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.652347820272212119124022644822.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.221017801605543296514746423389.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.221017801605543296514746423389.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.119515474430718803379832249911.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119515474430718803379832249911.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.182798854785392200340436516930.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.182798854785392200340436516930.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.618434772073433276874225174904.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.618434772073433276874225174904.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.330544495001617450666819906758.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330544495001617450666819906758.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.164790817284381538042494285101.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.164790817284381538042494285101.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.120842785645314664964010792308.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.120842785645314664964010792308.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.414288023902112119945238126594.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.414288023902112119945238126594.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.292194861362266467652267941663.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292194861362266467652267941663.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.147325126373007278009743173696.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.147325126373007278009743173696.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.329624439086643515259182406526.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.329624439086643515259182406526.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.313756547848086902190878548835.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313756547848086902190878548835.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.270951128717816232360812849541.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270951128717816232360812849541.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.277452631455527999380186898011.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.277452631455527999380186898011.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.247816269490470394602288565775.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.247816269490470394602288565775.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.168985655485163461062675655739.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168985655485163461062675655739.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.233652865358649579816568545171.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.233652865358649579816568545171.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.150684298696437181894923266019.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.150684298696437181894923266019.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.117040183261056772902616195387.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.117040183261056772902616195387.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.190937805243443708408459490152.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.190937805243443708408459490152.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.569096986145782511000054443951.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.569096986145782511000054443951.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.316900421002460665752357657094.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.316900421002460665752357657094.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.200725988589959521302320481687.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200725988589959521302320481687.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.286217539434358186648717203667.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286217539434358186648717203667.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.153646219551578201092527860224.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153646219551578201092527860224.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.315187221221054114974341475212.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315187221221054114974341475212.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.156016499715048493339281864474.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156016499715048493339281864474.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.151764021165118974848436095034.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.151764021165118974848436095034.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.106630482085576298661469304872.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106630482085576298661469304872.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.561423049201987049884663740668.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.561423049201987049884663740668.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.107109359065300889765026303943.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.107109359065300889765026303943.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.341557859428950960906150406596.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.341557859428950960906150406596.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.254473943359963613733707320244.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254473943359963613733707320244.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.119209873306155771318545953948.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119209873306155771318545953948.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.233433352108264931671753343044.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.233433352108264931671753343044.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.174168737938619557573021395302.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174168737938619557573021395302.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.136830368929967292376608088362.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.136830368929967292376608088362.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.255409701134762680010928250229.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.255409701134762680010928250229.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.290410217650314119074833254861.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.290410217650314119074833254861.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.416701701108520592702405866796.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.416701701108520592702405866796.nii.gz\"}, {\"image\": \"subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.177888806135892723698313903329.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177888806135892723698313903329.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.116097642684124305074876564522.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.116097642684124305074876564522.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.900182736599353600185270496549.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.900182736599353600185270496549.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.908250781706513856628130123235.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.908250781706513856628130123235.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.270788655216695628640355888562.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270788655216695628640355888562.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.223650122819238796121876338881.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.223650122819238796121876338881.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.122621219961396951727742490470.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.122621219961396951727742490470.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.143813757344903170810482790787.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143813757344903170810482790787.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.248425363469507808613979846863.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.248425363469507808613979846863.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.328789598898469177563438457842.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.328789598898469177563438457842.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.105495028985881418176186711228.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.105495028985881418176186711228.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.550599855064600241623943717588.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.550599855064600241623943717588.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.994459772950022352718462251777.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.994459772950022352718462251777.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.187803155574314810830688534991.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187803155574314810830688534991.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.238019241099704094018548301753.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238019241099704094018548301753.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.111496024928645603833332252962.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111496024928645603833332252962.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.116703382344406837243058680403.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.116703382344406837243058680403.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.227707494413800460340110762069.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227707494413800460340110762069.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.219618492426142913407827034169.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219618492426142913407827034169.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.447468612991222399440694673357.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.447468612991222399440694673357.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.163217526257871051722166468085.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163217526257871051722166468085.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.230675342744370103160629638194.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230675342744370103160629638194.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.297433269262659217151107535012.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297433269262659217151107535012.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.139889514693390832525232698200.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139889514693390832525232698200.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.249404938669582150398726875826.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249404938669582150398726875826.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.141149610914910880857802344415.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141149610914910880857802344415.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.106379658920626694402549886949.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106379658920626694402549886949.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.125124219978170516876304987559.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.125124219978170516876304987559.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.113679818447732724990336702075.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.113679818447732724990336702075.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.295462530340364058116953738925.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.295462530340364058116953738925.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.130036599816889919308975074972.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.130036599816889919308975074972.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.271307051432838466826189754230.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.271307051432838466826189754230.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.276351267409869539593937734609.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.276351267409869539593937734609.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.199282854229880908602362094937.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199282854229880908602362094937.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.307946352302138765071461362398.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.307946352302138765071461362398.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.242624386080831911167122628616.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.242624386080831911167122628616.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.613212850444255764524630781782.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.613212850444255764524630781782.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.233001470265230594739708503198.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.233001470265230594739708503198.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.944888107209008719031293531091.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.944888107209008719031293531091.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.114249388265341701207347458535.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114249388265341701207347458535.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.483655032093002252444764787700.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.483655032093002252444764787700.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.897279226481700053115245043064.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.897279226481700053115245043064.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.362762275895885013176610377950.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.362762275895885013176610377950.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.339039410276356623209709113755.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339039410276356623209709113755.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.266581250778073944645044950856.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.266581250778073944645044950856.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.264251211689085893915477907261.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.264251211689085893915477907261.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.957384617596077920906744920611.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.957384617596077920906744920611.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.214252223927572015414741039150.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.214252223927572015414741039150.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.664989286137882319237192185951.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.664989286137882319237192185951.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.179209990684978588019929720099.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179209990684978588019929720099.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.332829333783605240302521201463.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.332829333783605240302521201463.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.686193079844756926365065559979.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.686193079844756926365065559979.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.323535944958374186208096541480.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323535944958374186208096541480.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.171177995014336749670107905732.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171177995014336749670107905732.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.336198008634390022174744544656.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336198008634390022174744544656.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.294120933998772507043263238704.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.294120933998772507043263238704.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.252709517998555732486024866345.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252709517998555732486024866345.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.215640837032688688030770057224.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215640837032688688030770057224.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.837810280808122125183730411210.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.837810280808122125183730411210.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.170825539570536865106681134236.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170825539570536865106681134236.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.261700367741314729940340271960.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.261700367741314729940340271960.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.749871569713868632259874663577.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.749871569713868632259874663577.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.315918264676377418120578391325.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315918264676377418120578391325.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.167500254299688235071950909530.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167500254299688235071950909530.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.280125803152924778388346920341.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.280125803152924778388346920341.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.256542095129414948017808425649.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.256542095129414948017808425649.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.272348349298439120568330857680.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272348349298439120568330857680.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.129982010889624423230394257528.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129982010889624423230394257528.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.314836406260772370397541392345.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.314836406260772370397541392345.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.195913706607582347421429908613.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.195913706607582347421429908613.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.228934821089041845791238006047.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.228934821089041845791238006047.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.279300249795483097365868125932.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.279300249795483097365868125932.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.664409965623578819357819577077.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.664409965623578819357819577077.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.185154482385982570363528682299.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.185154482385982570363528682299.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.159665703190517688573100822213.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.159665703190517688573100822213.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.248360766706804179966476685510.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.248360766706804179966476685510.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.771831598853841017505646275338.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.771831598853841017505646275338.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.140253591510022414496468423138.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.140253591510022414496468423138.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.592821488053137951302246128864.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.592821488053137951302246128864.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.151669338315069779994664893123.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.151669338315069779994664893123.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.246758220302211646532176593724.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246758220302211646532176593724.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.131150737314367975651717513386.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.131150737314367975651717513386.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.245546033414728092794968890929.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245546033414728092794968890929.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.174692377730646477496286081479.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174692377730646477496286081479.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.279953669991076107785464313394.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.279953669991076107785464313394.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.249032660919473722154870746474.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249032660919473722154870746474.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.306112617218006614029386065035.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306112617218006614029386065035.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.226889213794065160713547677129.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226889213794065160713547677129.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.303066851236267189733420290986.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.303066851236267189733420290986.nii.gz\"}, {\"image\": \"subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.323426705628838942177546503237.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323426705628838942177546503237.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.339484970190920330170416228517.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339484970190920330170416228517.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.776800177074349870648765614630.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.776800177074349870648765614630.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.257515388956260258681136624817.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.257515388956260258681136624817.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.246178337114401749164850220976.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246178337114401749164850220976.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.724562063158320418413995627171.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.724562063158320418413995627171.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.219254430927834326484477690403.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219254430927834326484477690403.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.225515255547637437801620523312.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.225515255547637437801620523312.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.226152078193253087875725735761.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226152078193253087875725735761.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.175318131822744218104175746898.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.175318131822744218104175746898.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.283878926524838648426928238498.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.283878926524838648426928238498.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.292049618819567427252971059233.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292049618819567427252971059233.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.206028343897359374907954580114.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.206028343897359374907954580114.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.397522780537301776672854630421.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.397522780537301776672854630421.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.882070241245008756731854510592.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.882070241245008756731854510592.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.300136985030081433029390459071.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300136985030081433029390459071.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.286422846896797433168187085942.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286422846896797433168187085942.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.272123398257168239653655006815.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272123398257168239653655006815.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.675543413149938600000570588203.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.675543413149938600000570588203.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.296066944953051278419805374238.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.296066944953051278419805374238.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.204287915902811325371247860532.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204287915902811325371247860532.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.792381786708289670758399079830.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.792381786708289670758399079830.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.206097113343059612247503064658.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.206097113343059612247503064658.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.254957696184671649675053562027.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254957696184671649675053562027.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.301462380687644451483231621986.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.301462380687644451483231621986.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.204566802718283633558802774757.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204566802718283633558802774757.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.108193664222196923321844991231.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.108193664222196923321844991231.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.486999111981013268988489262668.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.486999111981013268988489262668.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.249314567767437206995861966896.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249314567767437206995861966896.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.487268565754493433372433148666.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.487268565754493433372433148666.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.236698827306171960683086245994.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.236698827306171960683086245994.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.153181766344026020914478182395.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153181766344026020914478182395.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.203425588524695836343069893813.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.203425588524695836343069893813.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.194766721609772924944646251928.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194766721609772924944646251928.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.198016798894102791158686961192.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.198016798894102791158686961192.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.213854687290736562463866711534.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.213854687290736562463866711534.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.305887072264491016857673607285.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.305887072264491016857673607285.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.671278273674156798801285503514.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.671278273674156798801285503514.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.168833925301530155818375859047.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168833925301530155818375859047.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.306520140119968755187868602181.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306520140119968755187868602181.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.188059920088313909273628445208.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188059920088313909273628445208.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.701514276942509393419164159551.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.701514276942509393419164159551.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.297964221542942838344351735414.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297964221542942838344351735414.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.939216568327879462530496768794.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.939216568327879462530496768794.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.622243923620914676263059698181.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.622243923620914676263059698181.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.299806338046301317870803017534.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.299806338046301317870803017534.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.373433682859788429397781158572.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.373433682859788429397781158572.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.766881513533845439335142582269.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.766881513533845439335142582269.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.297988578825170426663869669862.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297988578825170426663869669862.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.137773550852881583165286615668.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.137773550852881583165286615668.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.518487185634324801733841260431.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.518487185634324801733841260431.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.286627485198831346082954437212.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286627485198831346082954437212.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.143622857676008763729469324839.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143622857676008763729469324839.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.997611074084993415992563148335.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.997611074084993415992563148335.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.317613170669207528926259976488.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.317613170669207528926259976488.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.200513183558872708878454294671.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200513183558872708878454294671.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.239358021703233250639913775427.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.239358021703233250639913775427.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.161633200801003804714818844696.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161633200801003804714818844696.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.257383535269991165447822992959.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.257383535269991165447822992959.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.100225287222365663678666836860.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100225287222365663678666836860.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.259124675432205040899951626253.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259124675432205040899951626253.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.131939324905446238286154504249.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.131939324905446238286154504249.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.336102335330125765000317290445.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336102335330125765000317290445.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.241083615484551649610616348856.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.241083615484551649610616348856.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.183982839679953938397312236359.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183982839679953938397312236359.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.149041668385192796520281592139.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.149041668385192796520281592139.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.608029415915051219877530734559.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.608029415915051219877530734559.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.288701997968615460794642979503.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.288701997968615460794642979503.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.271220641987745483198036913951.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.271220641987745483198036913951.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.931383239747372227838946053237.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.931383239747372227838946053237.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.478062284228419671253422844986.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.478062284228419671253422844986.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.346115813056769250958550383763.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.346115813056769250958550383763.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.302403227435841351528721627052.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.302403227435841351528721627052.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.774060103415303828812229821954.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.774060103415303828812229821954.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.324649110927013926557500550446.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.324649110927013926557500550446.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.888615810685807330497715730842.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.888615810685807330497715730842.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.207341668080525761926965850679.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.207341668080525761926965850679.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.177086402277715068525592995222.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177086402277715068525592995222.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.262873069163227096134627700599.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.262873069163227096134627700599.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.897161587681142256575045076919.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.897161587681142256575045076919.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.161821150841552408667852639317.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161821150841552408667852639317.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.247060297988514823071467295949.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.247060297988514823071467295949.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.454273545863197752384437758130.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.454273545863197752384437758130.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.202283133206014258077705539227.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202283133206014258077705539227.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.253317247142837717905329340520.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.253317247142837717905329340520.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.153732973534937692357111055819.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153732973534937692357111055819.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.179943248049071805421192715219.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179943248049071805421192715219.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.172243743899615313644757844726.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.172243743899615313644757844726.nii.gz\"}, {\"image\": \"subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.814122498113547115932318256859.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.814122498113547115932318256859.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.215785045378334625097907422785.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215785045378334625097907422785.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.195557219224169985110295082004.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.195557219224169985110295082004.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.199261544234308780356714831537.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199261544234308780356714831537.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.187694838527128312070807533473.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187694838527128312070807533473.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.387954549120924524005910602207.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.387954549120924524005910602207.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.124822907934319930841506266464.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124822907934319930841506266464.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.176869045992276345870480098568.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176869045992276345870480098568.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.292994770358625142596171316474.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292994770358625142596171316474.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.765459236550358748053283544075.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.765459236550358748053283544075.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.771741891125176943862272696845.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.771741891125176943862272696845.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.217754016294471278921686508169.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217754016294471278921686508169.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.182192086929819295877506541021.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.182192086929819295877506541021.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.392861216720727557882279374324.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.392861216720727557882279374324.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.193721075067404532739943086458.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193721075067404532739943086458.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.250481236093201801255751845296.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250481236093201801255751845296.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.174449669706458092793093760291.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174449669706458092793093760291.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.237915456403882324748189195892.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.237915456403882324748189195892.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.309901913847714156367981722205.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309901913847714156367981722205.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.330043769832606379655473292782.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330043769832606379655473292782.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.229860476925100292554329427970.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229860476925100292554329427970.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.272344603176687884771013620823.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272344603176687884771013620823.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.334166493392278943610545989413.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334166493392278943610545989413.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.121108220866971173712229588402.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121108220866971173712229588402.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.268030488196493755113553009785.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268030488196493755113553009785.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.436403998650924660479049012235.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.436403998650924660479049012235.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.311476128731958142981941696518.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.311476128731958142981941696518.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.212608679077007918190529579976.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.212608679077007918190529579976.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.300392272203629213913702120739.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300392272203629213913702120739.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.254929810944557499537650429296.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254929810944557499537650429296.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.194632613233275988184244485809.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194632613233275988184244485809.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.222052723822248889877676736332.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.222052723822248889877676736332.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.153985109349433321657655488650.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153985109349433321657655488650.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.558286136379689377915919180358.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.558286136379689377915919180358.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.139577698050713461261415990027.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139577698050713461261415990027.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.229960820686439513664996214638.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229960820686439513664996214638.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.124656777236468248920498636247.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124656777236468248920498636247.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.229096941293122177107846044795.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229096941293122177107846044795.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.765930210026773090100532964804.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.765930210026773090100532964804.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.145510611155363050427743946446.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145510611155363050427743946446.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.170706757615202213033480003264.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170706757615202213033480003264.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.927394449308471452920270961822.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.927394449308471452920270961822.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.114914167428485563471327801935.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114914167428485563471327801935.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.337005960787660957389988207064.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.337005960787660957389988207064.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.291156498203266896953765649282.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.291156498203266896953765649282.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.440226700369921575481834344455.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.440226700369921575481834344455.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.167661207884826429102690781600.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167661207884826429102690781600.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.237215747217294006286437405216.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.237215747217294006286437405216.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.265780642925621389994857727416.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265780642925621389994857727416.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.712472578497712558367294720243.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.712472578497712558367294720243.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.340158437895922179455019686521.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.340158437895922179455019686521.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.109882169963817627559804568094.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.109882169963817627559804568094.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.328944769569002417592093467626.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.328944769569002417592093467626.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.141345499716190654505508410197.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141345499716190654505508410197.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.286061375572911414226912429210.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286061375572911414226912429210.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.603126300703296693942875967838.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.603126300703296693942875967838.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.148935306123327835217659769212.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.148935306123327835217659769212.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.112767175295249119452142211437.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.112767175295249119452142211437.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.300693623747082239407271583452.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300693623747082239407271583452.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.340012777775661021262977442176.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.340012777775661021262977442176.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.196251645377731223510086726530.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.196251645377731223510086726530.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.259453428008507791234730686014.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259453428008507791234730686014.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.220596530836092324070084384692.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.220596530836092324070084384692.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.855232435861303786204450738044.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.855232435861303786204450738044.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.230491296081537726468075344411.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230491296081537726468075344411.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.215104063467523905369326175410.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215104063467523905369326175410.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.946129570505893110165820050204.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.946129570505893110165820050204.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.747803439040091794717626507402.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.747803439040091794717626507402.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.150097650621090951325113116280.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.150097650621090951325113116280.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.129650136453746261130135157590.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129650136453746261130135157590.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.733642690503782454656013446707.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.733642690503782454656013446707.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.292576688635952269497781991202.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292576688635952269497781991202.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.193964947698259739624715468431.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193964947698259739624715468431.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.188619674701053082195613114069.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188619674701053082195613114069.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.134519406153127654901640638633.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134519406153127654901640638633.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.173931884906244951746140865701.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173931884906244951746140865701.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.121805476976020513950614465787.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121805476976020513950614465787.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.330643702676971528301859647742.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330643702676971528301859647742.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.306140003699110313373771452136.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306140003699110313373771452136.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.472487466001405705666001578363.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.472487466001405705666001578363.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.188385286346390202873004762827.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188385286346390202873004762827.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.227968442353440630355230778531.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227968442353440630355230778531.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.138674679709964033277400089532.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138674679709964033277400089532.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.312127933722985204808706697221.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.312127933722985204808706697221.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.138813197521718693188313387015.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138813197521718693188313387015.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.179683407589764683292800449011.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179683407589764683292800449011.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.300270516469599170290456821227.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300270516469599170290456821227.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.265570697208310960298668720669.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265570697208310960298668720669.nii.gz\"}, {\"image\": \"subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.102681962408431413578140925249.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.102681962408431413578140925249.nii.gz\"}], \"validation\": [{\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.227962600322799211676960828223.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227962600322799211676960828223.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.905371958588660410240398317235.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.905371958588660410240398317235.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.564534197011295112247542153557.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.564534197011295112247542153557.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.129055977637338639741695800950.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129055977637338639741695800950.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.219087313261026510628926082729.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219087313261026510628926082729.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.313334055029671473836954456733.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313334055029671473836954456733.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.109002525524522225658609808059.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.109002525524522225658609808059.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.219909753224298157409438012179.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219909753224298157409438012179.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.128023902651233986592378348912.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.128023902651233986592378348912.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.188376349804761988217597754952.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188376349804761988217597754952.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.640729228179368154416184318668.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.640729228179368154416184318668.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.323302986710576400812869264321.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323302986710576400812869264321.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.241570579760883349458693655367.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.241570579760883349458693655367.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.310626494937915759224334597176.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.310626494937915759224334597176.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.277445975068759205899107114231.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.277445975068759205899107114231.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.250863365157630276148828903732.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250863365157630276148828903732.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.525937963993475482158828421281.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.525937963993475482158828421281.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.202811684116768680758082619196.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202811684116768680758082619196.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.313835996725364342034830119490.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313835996725364342034830119490.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.294188507421106424248264912111.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.294188507421106424248264912111.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.187451715205085403623595258748.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187451715205085403623595258748.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.126264578931778258890371755354.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126264578931778258890371755354.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.511347030803753100045216493273.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.511347030803753100045216493273.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.108197895896446896160048741492.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.108197895896446896160048741492.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.250438451287314206124484591986.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250438451287314206124484591986.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.313605260055394498989743099991.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313605260055394498989743099991.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.272042302501586336192628818865.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272042302501586336192628818865.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.302134342469412607966016057827.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.302134342469412607966016057827.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.724251104254976962355686318345.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.724251104254976962355686318345.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.826812708000318290301835871780.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.826812708000318290301835871780.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.249530219848512542668813996730.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249530219848512542668813996730.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.975254950136384517744116790879.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.975254950136384517744116790879.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.310548927038333190233889983845.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.310548927038333190233889983845.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.126121460017257137098781143514.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126121460017257137098781143514.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.144438612068946916340281098509.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.144438612068946916340281098509.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.566816709786169715745131047975.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.566816709786169715745131047975.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.657775098760536289051744981056.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.657775098760536289051744981056.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.111172165674661221381920536987.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111172165674661221381920536987.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.269689294231892620436462818860.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.269689294231892620436462818860.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.138080888843357047811238713686.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138080888843357047811238713686.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.105756658031515062000744821260.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.105756658031515062000744821260.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.898642529028521482602829374444.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.898642529028521482602829374444.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.146429221666426688999739595820.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.146429221666426688999739595820.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.188209889686363159853715266493.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188209889686363159853715266493.nii.gz\"}, {\"image\": \"subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.139713436241461669335487719526.nii.gz\", \"label\": \"seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139713436241461669335487719526.nii.gz\"}]}"
  },
  {
    "path": "jsons/dataset_TCIAcovid19_0.json",
    "content": "{\"training\": [{\"image\": \"data/volume-covid19-A-0269.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0012.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0721_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0154.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0358.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0734_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0304.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0640.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0263.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0737_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0253.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0388.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0708_day049.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0007.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0175.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0523.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0138.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0675.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0308.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0025.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0318_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0366.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0282.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0733_day069.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0261_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0463.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0180_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0220.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0298.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0573.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0140.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0648.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0600.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0111.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0418.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0084.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0221.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0569.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0598.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0421.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0703_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0074_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0683.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0369_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0329.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0455.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0720_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0021.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0352.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0454.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0032.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0315.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0017.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0123.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0734_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0394.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0125.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0056.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0213.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0229.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0737_day013.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0515.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0420.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0417.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0426.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0107.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0710_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0722_day018.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0535.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0005.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0022.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0079.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0416.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0458.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0560.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0449.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0580.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0737_day003.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0721_day020.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0545.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0733_day004.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0047_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0106.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0593.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0715_day019.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0655.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0687.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0692.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0373.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0647.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0161.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0685.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0083.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0680.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0349.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0390.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0603.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0716_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0372.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0703_day006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0433.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0691.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0443.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0261_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0509.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0704_day006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0231.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0542.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0492.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0717_day052.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0597.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0067.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0666.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0723_day043.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0239.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0048.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0481.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0673.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0716_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0053.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0178.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0198.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0428.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0035.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0189_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0602.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0029.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0577.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0638.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0710_day024.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0391.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0562.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0383_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0723_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0092.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0196_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0671.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0697.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0033.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0584.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0131.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0468.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0527.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0703_day010.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0558.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0241.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0259.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0216.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0612.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0736_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0009.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0710_day007.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0088.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0581.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0393.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0554.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0736_day010.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0335.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0483.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0473.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0401.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0502.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0714_day006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0467.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0361.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0028_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0232.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0202_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0363.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0500.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0112.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0145.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0271.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0332.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0281.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0171_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0336.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0531.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0579.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0713_day017.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0695.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0708_day018.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0639.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0607.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0082.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0174.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0257.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0260.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0240.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0126.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0661.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0036.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0410.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0398.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0665.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0493.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0706_day015.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0619.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0708_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0440.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0326.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0556.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0644.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0080.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0375.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0503.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0662.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0299.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0234.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0546.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0344.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0149.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0738_day052.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0645.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0272.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0177.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0738_day010.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0631.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0570.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0737_day006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0709_day005.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0287_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0262.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0223.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0444.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0723_day004.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day050.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0703_day016.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0715_day012.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0115.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0643.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0448.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0575.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0110.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0290.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0676.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0287_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0381_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0594.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day021.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0700_day015.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0658.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0735_day021.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day014.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0038.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0169.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0651.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0561.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0284.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0701_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0637.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0074_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0237.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0128.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0707_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0212_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0437.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0707_day060.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0374.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0170.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0327.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0185.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0517.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0324.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0117.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0736_day014.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0310.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0078.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0081.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0547.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0738_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0085.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0267.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0582.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0474.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0718_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0227.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0432.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0150.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0345.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0629.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0385.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0165.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0700_day056.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0530.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0507.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0709_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0674.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0293.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0485.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0722_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0586.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0244.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0713_day050.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0608.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0615.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0100.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0054.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0498.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0070.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0479.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0031.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0425.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0252.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0663.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0387.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0203.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0708_day003.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0153.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0472.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0490.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0397.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0505.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0320.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0616.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0510.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0694.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0550.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0291_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0116.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0279.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0734_day005.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0590.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0690.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0578.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0277.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0195.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0617.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0325.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0610.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0011.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0714_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0351.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0353.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0636.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0670.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0713_day012.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0156.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0682.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0157.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0548.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0303_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0212_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0275.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0717_day005.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0707_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0430.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0004.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0099.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0027_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0529.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0190.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0075.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0606.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0064.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0718_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0735_day010.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0042.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0522.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0249.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0718_day054.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0342.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0094.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0016.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0047_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0439.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0635.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0370.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0219.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0091.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0355.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0109.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0069.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0377.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0405.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0343.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0071.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0604.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0371.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0722_day003.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0362.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0596.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0576.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0357.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0681.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0480.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0184_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0139.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0559.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0423.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0013.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0181.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0659.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0499.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0464.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0331.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0735_day004.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0118.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0180_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0166.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0066.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0536.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0096.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0512.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0713_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0571.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0346.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0129.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0222.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0184_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0737_day044.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0289.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0609.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0356.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0669.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0186.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0189_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0553.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0625.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0641.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0010.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0041.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0657.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0703_day021.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0314.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0504.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0413.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0019.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0202_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0422.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0233.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0585.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0328.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0487.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0296.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0414.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0630.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0323.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0679.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0164.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day011.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0392.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0710_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0330.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0046.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0270.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0678.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0633.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0734_day012.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0144.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0715_day052.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0402.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0519.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0382.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0183.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0540.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0668.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0475.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0722_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0001.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0484.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0201.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0722_day011.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0710_day013.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0660.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0516.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0721_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0686.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0288.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0256_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0614.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0039.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0708_day011.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0258.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0242.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0098.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0412.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0549.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0235.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0407.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0461.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0618.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0719_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0403.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0442.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0478.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0706_day011.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0450.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0142.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0136.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0506.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0360.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0488.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0713_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0340.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0457.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0557.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0622.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0134.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0020.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0102.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0255.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0268.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0406.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0438.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0632.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0254.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0624.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0524.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0305.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0589.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0247.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0369_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0445.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0130.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0037.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0204.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0319.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0030.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0073.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0034.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0588.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0316.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0404.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0626.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0068.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0179.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0720_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0435.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0710_day057.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0246.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0228.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0627.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0303_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0057.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0133.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0698.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0567.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0533.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0620.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0537.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0147.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0114.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0347.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0256_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0605.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0735_day052.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0501.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0592.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0089.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0735_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0408.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0318_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0135.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0477.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0653.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0424.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0552.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0090.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0028_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0137.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0199.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0538.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0664.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0295.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0359.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0532.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0736_day050.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0708_day008.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0714_day051.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0706_day049.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0705_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0285.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0566.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0667.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0301.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0378.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0101.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0734_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0446.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0196_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0543.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0706_day006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0451.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0595.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0427.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0384.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0302.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0146.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0341.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0564.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0591.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0194.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0712_day022.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0076.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0555.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0273.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0447.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0339.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0309.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0023.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0672.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0043.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0469.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0311.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0654.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0286.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0688.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0621.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0719_day060.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0226.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0095.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0306.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0108.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0051.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0077.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0225.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0705_day004.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0689.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0124.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0072.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0040.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day062.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0491.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0539.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0350.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day025.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0337.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0158.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0470.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0396.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0121.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0574.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0526.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0436.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0494.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0014.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0465.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0702_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0173.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0706_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0684.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0193.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0541.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0551.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0044.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0206.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0365.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0214.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0188.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0736_day007.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0143.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0601.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0063.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0313.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0415.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0703_day003.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0528.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0476.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0563.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0568.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0354.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0704_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0104.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0613.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0652.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0738_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0245.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0700_day007.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0291_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0383_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0276.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0187.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0338.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0650.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0452.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0419.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0453.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0719_day002.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0701_day006.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0718_day017.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0348.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0086.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0055.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0274.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0482.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0587.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0599.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0431.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0456.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0294.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0218.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0712_day068.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0381_0.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0696.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0623.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0514.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0441.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0266.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0699.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0518.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0333.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0712_day005.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0045.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0460.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0386.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0152.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0734_day020.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0525.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0544.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0376.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0251.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0462.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0733_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0151.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0715_day007.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0486.nii.gz\"}], \"validation\": [{\"image\": \"data/volume-covid19-A-0656.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0495.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0705_day043.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0264.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0300.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0722_day047.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0434.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0015.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0700_day010.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0411.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0572.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0003.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0693.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0715_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0307.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0489.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0521.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0215.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0400.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0280.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0236.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0167_1.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0733_day007.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0701_day055.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0513.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0712_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0182.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0511.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0321.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0120.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0367.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0534.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0700_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0168.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0649.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0409.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0018.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0191.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0297.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0429.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0238.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0717_day000.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0380.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0646.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0026.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0176.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0611.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0224.nii.gz\"}, {\"image\": \"data/volume-covid19-A-0628.nii.gz\"}]}"
  },
  {
    "path": "jsons/flare23.json",
    "content": "{\"training\": [{\"image\": \"imagesTr2200/1-500/FLARE23_0001_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0002_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0003_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0004_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0005_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0006_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0007_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0008_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0009_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0010_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0011_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0012_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0013_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0014_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0015_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0016_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0017_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0018_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0019_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0020_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0021_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0022_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0023_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0024_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0025_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0026_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0027_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0028_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0029_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0030_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0031_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0032_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0033_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0034_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0035_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0036_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0037_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0038_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0039_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0040_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0041_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0042_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0043_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0044_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0045_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0046_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0047_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0048_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0049_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0050_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0051_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0052_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0053_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0054_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0055_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0056_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0057_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0058_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0059_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0060_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0061_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0062_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0063_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0064_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0065_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0066_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0067_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0068_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0069_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0070_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0071_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0072_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0073_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0074_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0075_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0076_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0077_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0078_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0079_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0080_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0081_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0082_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0083_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0084_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0085_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0086_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0087_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0088_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0089_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0090_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0091_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0092_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0093_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0094_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0095_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0096_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0097_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0098_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0099_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0100_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0101_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0102_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0103_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0104_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0105_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0106_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0107_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0108_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0109_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0110_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0111_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0112_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0113_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0114_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0115_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0116_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0117_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0118_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0119_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0120_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0121_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0122_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0123_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0124_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0125_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0126_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0127_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0128_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0129_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0130_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0131_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0132_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0133_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0134_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0135_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0136_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0137_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0138_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0139_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0140_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0141_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0142_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0143_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0144_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0145_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0146_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0147_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0148_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0149_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0150_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0151_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0152_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0153_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0154_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0155_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0156_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0157_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0158_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0159_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0160_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0161_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0162_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0163_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0164_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0165_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0166_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0167_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0168_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0169_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0170_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0171_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0172_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0173_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0174_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0175_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0176_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0177_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0178_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0179_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0180_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0181_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0182_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0183_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0184_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0185_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0186_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0187_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0188_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0189_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0190_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0191_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0192_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0193_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0194_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0195_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0196_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0197_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0198_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0199_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0200_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0201_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0202_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0203_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0204_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0205_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0206_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0207_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0208_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0209_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0210_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0211_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0212_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0213_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0214_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0215_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0216_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0217_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0218_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0219_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0220_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0221_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0222_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0223_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0224_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0225_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0226_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0227_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0228_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0229_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0230_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0231_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0232_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0233_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0234_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0235_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0236_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0237_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0238_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0239_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0240_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0241_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0242_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0243_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0244_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0245_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0246_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0247_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0248_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0249_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0250_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0251_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0252_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0253_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0254_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0255_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0256_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0257_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0258_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0259_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0260_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0261_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0262_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0263_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0264_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0265_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0266_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0267_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0268_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0269_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0270_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0271_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0272_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0273_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0274_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0275_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0276_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0277_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0278_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0279_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0280_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0281_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0282_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0283_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0284_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0285_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0286_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0287_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0288_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0289_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0290_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0291_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0292_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0293_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0294_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0295_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0296_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0297_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0298_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0299_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0300_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0301_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0302_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0303_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0304_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0305_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0306_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0307_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0308_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0309_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0310_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0311_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0312_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0313_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0314_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0315_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0316_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0317_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0318_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0319_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0320_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0321_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0322_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0323_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0324_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0325_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0326_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0327_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0328_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0329_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0330_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0331_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0332_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0333_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0334_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0335_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0336_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0337_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0338_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0339_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0340_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0341_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0342_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0343_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0344_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0345_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0346_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0347_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0348_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0349_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0350_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0351_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0352_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0353_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0354_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0355_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0356_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0357_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0358_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0359_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0360_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0361_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0362_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0363_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0364_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0365_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0366_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0367_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0368_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0369_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0370_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0371_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0372_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0373_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0374_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0375_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0376_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0377_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0378_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0379_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0380_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0381_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0382_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0383_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0384_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0385_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0386_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0387_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0388_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0389_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0390_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0391_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0392_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0393_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0394_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0395_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0396_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0397_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0398_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0399_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0400_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0401_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0402_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0403_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0404_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0405_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0406_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0407_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0408_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0409_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0410_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0411_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0412_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0413_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0414_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0415_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0416_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0417_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0418_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0419_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0420_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0421_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0422_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0423_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0424_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0425_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0426_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0427_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0428_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0429_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0430_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0431_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0432_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0433_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0434_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0435_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0436_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0437_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0438_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0439_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0440_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0441_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0442_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0443_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0444_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0445_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0446_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0447_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0448_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0449_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0450_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0451_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0452_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0453_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0454_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0455_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0456_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0457_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0458_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0459_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0460_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0461_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0462_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0463_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0464_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0465_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0466_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0467_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0468_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0469_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0470_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0471_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0472_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0473_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0474_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0475_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0476_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0477_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0478_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0479_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0480_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0481_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0482_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0483_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0484_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0485_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0486_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0487_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0488_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0489_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0490_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0491_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0492_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0493_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0494_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0495_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0496_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0497_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0498_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0499_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1-500/FLARE23_0500_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1001_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1002_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1003_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1004_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1005_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1006_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1007_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1008_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1009_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1010_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1011_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1012_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1013_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1014_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1015_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1016_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1017_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1018_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1019_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1020_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1021_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1022_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1023_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1024_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1025_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1026_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1027_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1028_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1029_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1030_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1031_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1032_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1033_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1034_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1035_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1036_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1037_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1038_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1039_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1040_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1041_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1042_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1043_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1044_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1045_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1046_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1047_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1048_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1049_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1050_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1051_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1052_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1053_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1054_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1055_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1056_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1057_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1058_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1059_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1060_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1061_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1062_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1063_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1064_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1065_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1066_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1067_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1068_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1069_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1070_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1071_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1072_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1073_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1074_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1075_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1076_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1077_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1078_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1079_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1080_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1081_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1082_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1083_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1084_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1085_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1086_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1087_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1088_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1089_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1090_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1091_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1092_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1093_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1094_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1095_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1096_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1097_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1098_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1099_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1100_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1101_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1102_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1103_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1104_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1105_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1106_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1107_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1108_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1109_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1110_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1111_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1112_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1113_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1114_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1115_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1116_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1117_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1118_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1119_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1120_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1121_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1122_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1123_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1124_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1125_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1126_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1127_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1128_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1129_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1130_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1131_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1132_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1133_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1134_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1135_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1136_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1137_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1138_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1139_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1140_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1141_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1142_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1143_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1144_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1145_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1146_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1147_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1148_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1149_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1150_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1151_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1152_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1153_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1154_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1155_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1156_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1157_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1158_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1159_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1160_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1161_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1162_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1163_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1164_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1165_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1166_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1167_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1168_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1169_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1170_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1171_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1172_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1173_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1174_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1175_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1176_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1177_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1178_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1179_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1180_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1181_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1182_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1183_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1184_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1185_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1186_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1187_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1188_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1189_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1190_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1191_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1192_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1193_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1194_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1195_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1196_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1197_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1198_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1199_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1200_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1201_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1202_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1203_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1204_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1205_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1206_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1207_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1208_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1209_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1210_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1211_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1212_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1213_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1214_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1215_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1216_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1217_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1218_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1219_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1220_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1221_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1222_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1223_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1224_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1225_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1226_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1227_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1228_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1229_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1230_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1231_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1232_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1233_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1234_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1235_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1236_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1237_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1238_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1239_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1240_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1241_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1242_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1243_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1244_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1245_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1246_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1247_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1248_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1249_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1250_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1251_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1252_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1253_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1254_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1255_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1256_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1257_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1258_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1259_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1260_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1261_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1262_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1263_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1264_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1265_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1266_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1267_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1268_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1269_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1270_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1271_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1272_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1273_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1274_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1275_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1276_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1277_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1278_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1279_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1280_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1281_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1282_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1283_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1284_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1285_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1286_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1287_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1288_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1289_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1290_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1291_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1292_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1293_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1294_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1295_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1296_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1297_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1298_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1299_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1300_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1301_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1302_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1303_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1304_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1305_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1306_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1307_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1308_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1309_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1310_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1311_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1312_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1313_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1314_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1315_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1316_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1317_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1318_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1319_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1320_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1321_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1322_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1323_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1324_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1325_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1326_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1327_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1328_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1329_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1330_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1331_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1332_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1333_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1334_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1335_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1336_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1337_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1338_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1339_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1340_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1341_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1342_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1343_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1344_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1345_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1346_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1347_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1348_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1349_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1350_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1351_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1352_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1353_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1354_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1355_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1356_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1357_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1358_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1359_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1360_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1361_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1362_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1363_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1364_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1365_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1366_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1367_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1368_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1369_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1370_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1371_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1372_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1373_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1374_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1375_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1376_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1377_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1378_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1379_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1380_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1381_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1382_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1383_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1384_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1385_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1386_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1387_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1388_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1389_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1390_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1391_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1392_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1393_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1394_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1395_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1396_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1397_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1398_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1399_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1400_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1401_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1402_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1403_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1404_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1405_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1406_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1407_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1408_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1409_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1410_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1411_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1412_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1413_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1414_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1415_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1416_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1417_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1418_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1419_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1420_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1421_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1422_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1423_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1424_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1425_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1426_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1427_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1428_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1429_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1430_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1431_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1432_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1433_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1434_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1435_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1436_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1437_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1438_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1439_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1440_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1441_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1442_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1443_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1444_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1445_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1446_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1447_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1448_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1449_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1450_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1451_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1452_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1453_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1454_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1455_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1456_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1457_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1458_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1459_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1460_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1461_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1462_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1463_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1464_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1465_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1466_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1467_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1468_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1469_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1470_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1471_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1472_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1473_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1474_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1475_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1476_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1477_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1478_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1479_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1480_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1481_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1482_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1483_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1484_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1485_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1486_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1487_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1488_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1489_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1490_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1491_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1492_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1493_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1494_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1495_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1496_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1497_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1498_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1499_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1001-1500/FLARE23_1500_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1501_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1502_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1503_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1504_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1505_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1506_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1507_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1508_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1509_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1510_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1511_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1512_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1513_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1514_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1515_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1516_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1517_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1518_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1519_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1520_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1521_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1522_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1523_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1524_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1525_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1526_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1527_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1528_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1529_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1530_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1531_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1532_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1533_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1534_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1535_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1536_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1537_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1538_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1539_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1540_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1541_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1542_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1543_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1544_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1545_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1546_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1547_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1548_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1549_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1550_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1551_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1552_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1553_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1554_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1555_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1556_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1557_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1558_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1559_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1560_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1561_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1562_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1563_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1564_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1565_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1566_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1567_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1568_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1569_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1570_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1571_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1572_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1573_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1574_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1575_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1576_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1577_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1578_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1579_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1580_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1581_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1582_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1583_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1584_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1585_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1586_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1587_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1588_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1589_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1590_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1591_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1592_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1593_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1594_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1595_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1596_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1597_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1598_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1599_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1600_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1601_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1602_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1603_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1604_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1605_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1606_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1607_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1608_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1609_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1610_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1611_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1612_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1613_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1614_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1615_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1616_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1617_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1618_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1619_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1620_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1621_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1622_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1623_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1624_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1625_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1626_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1627_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1628_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1629_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1630_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1631_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1632_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1633_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1634_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1635_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1636_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1637_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1638_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1639_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1640_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1641_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1642_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1643_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1644_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1645_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1646_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1647_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1648_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1649_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1650_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1651_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1652_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1653_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1654_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1655_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1656_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1657_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1658_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1659_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1660_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1661_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1662_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1663_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1664_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1665_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1666_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1667_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1668_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1669_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1670_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1671_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1672_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1673_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1674_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1675_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1676_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1677_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1678_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1679_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1680_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1681_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1682_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1683_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1684_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1685_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1686_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1687_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1688_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1689_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1690_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1691_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1692_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1693_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1694_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1695_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1696_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1697_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1698_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1699_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1700_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1701_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1702_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1703_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1704_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1705_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1706_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1707_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1708_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1709_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1710_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1711_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1712_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1713_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1714_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1715_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1716_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1717_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1718_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1719_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1720_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1721_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1722_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1723_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1724_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1725_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1726_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1727_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1728_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1729_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1730_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1731_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1732_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1733_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1734_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1735_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1736_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1737_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1738_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1739_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1740_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1741_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1742_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1743_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1744_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1745_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1746_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1747_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1748_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1749_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1750_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1751_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1752_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1753_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1754_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1755_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1756_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1757_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1758_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1759_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1760_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1761_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1762_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1763_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1764_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1765_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1766_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1767_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1768_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1769_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1770_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1771_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1772_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1773_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1774_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1775_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1776_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1777_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1778_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1779_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1780_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1781_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1782_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1783_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1784_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1785_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1786_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1787_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1788_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1789_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1790_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1791_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1792_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1793_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1794_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1795_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1796_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1797_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1798_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1799_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1800_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1801_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1802_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1803_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1804_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1805_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1806_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1807_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1808_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1809_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1810_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1811_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1812_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1813_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1814_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1815_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1816_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1817_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1818_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1819_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1820_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1821_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1822_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1823_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1824_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1825_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1826_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1827_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1828_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1829_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1830_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1831_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1832_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1833_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1834_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1835_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1836_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1837_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1838_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1839_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1840_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1841_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1842_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1843_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1844_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1845_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1846_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1847_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1848_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1849_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1850_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1851_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1852_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1853_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1854_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1855_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1856_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1857_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1858_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1859_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1860_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1861_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1862_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1863_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1864_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1865_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1866_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1867_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1868_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1869_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1870_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1871_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1872_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1873_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1874_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1875_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1876_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1877_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1878_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1879_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1880_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1881_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1882_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1883_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1884_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1885_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1886_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1887_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1888_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1889_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1890_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1891_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1892_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1893_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1894_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1895_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1896_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1897_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1898_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1899_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1900_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1901_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1902_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1903_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1904_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1905_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1906_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1907_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1908_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1909_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1910_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1911_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1912_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1913_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1914_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1915_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1916_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1917_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1918_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1919_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1920_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1921_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1922_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1923_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1924_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1925_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1926_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1927_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1928_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1929_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1930_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1931_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1932_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1933_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1934_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1935_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1936_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1937_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1938_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1939_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1940_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1941_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1942_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1943_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1944_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1945_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1946_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1947_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1948_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1949_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1950_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1951_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1952_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1953_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1954_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1955_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1956_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1957_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1958_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1959_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1960_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1961_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1962_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1963_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1964_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1965_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1966_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1967_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1968_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1969_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1970_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1971_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1972_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1973_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1974_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1975_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1976_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1977_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1978_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1979_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1980_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1981_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1982_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1983_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1984_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1985_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1986_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1987_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1988_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1989_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1990_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1991_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1992_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1993_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1994_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1995_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1996_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1997_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1998_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_1999_0000.nii.gz\"}, {\"image\": \"imagesTr2200/1501-2000/FLARE23_2000_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2001_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2002_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2003_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2004_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2005_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2006_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2007_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2008_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2009_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2010_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2011_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2012_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2013_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2014_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2015_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2016_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2017_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2018_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2019_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2020_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2021_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2022_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2023_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2024_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2025_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2026_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2027_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2028_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2029_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2030_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2031_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2032_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2033_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2034_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2035_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2036_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2037_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2038_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2039_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2040_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2041_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2042_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2043_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2044_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2045_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2046_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2047_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2048_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2049_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2050_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2051_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2052_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2053_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2054_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2055_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2056_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2057_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2058_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2059_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2060_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2061_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2062_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2063_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2064_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2065_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2066_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2067_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2068_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2069_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2070_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2071_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2072_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2073_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2074_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2075_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2076_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2077_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2078_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2079_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2080_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2081_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2082_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2083_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2084_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2085_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2086_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2087_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2088_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2089_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2090_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2091_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2092_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2093_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2094_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2095_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2096_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2097_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2098_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2099_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2100_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2101_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2102_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2103_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2104_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2105_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2106_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2107_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2108_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2109_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2110_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2111_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2112_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2113_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2114_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2115_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2116_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2117_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2118_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2119_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2120_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2121_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2122_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2123_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2124_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2125_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2126_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2127_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2128_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2129_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2130_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2131_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2132_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2133_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2134_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2135_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2136_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2137_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2138_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2139_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2140_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2141_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2142_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2143_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2144_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2145_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2146_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2147_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2148_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2149_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2150_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2151_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2152_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2153_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2154_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2155_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2156_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2157_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2158_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2159_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2160_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2161_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2162_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2163_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2164_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2165_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2166_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2167_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2168_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2169_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2170_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2171_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2172_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2173_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2174_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2175_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2176_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2177_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2178_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2179_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2180_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2181_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2182_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2183_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2184_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2185_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2186_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2187_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2188_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2189_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2190_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2191_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2192_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2193_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2194_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2195_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2196_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2197_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2198_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2199_0000.nii.gz\"}, {\"image\": \"imagesTr2200/2001-2200/FLARE23_2200_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0501_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0502_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0503_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0504_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0505_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0506_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0507_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0508_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0509_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0510_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0511_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0512_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0513_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0514_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0515_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0516_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0517_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0518_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0519_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0520_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0521_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0522_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0523_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0524_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0525_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0526_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0527_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0528_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0529_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0530_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0531_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0532_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0533_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0534_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0535_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0536_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0537_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0538_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0539_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0540_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0541_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0542_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0543_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0544_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0545_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0546_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0547_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0548_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0549_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0550_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0551_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0552_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0553_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0554_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0555_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0556_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0557_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0558_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0559_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0560_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0561_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0562_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0563_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0564_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0565_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0566_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0567_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0568_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0569_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0570_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0571_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0572_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0573_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0574_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0575_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0576_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0577_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0578_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0579_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0580_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0581_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0582_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0583_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0584_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0585_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0586_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0587_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0588_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0589_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0590_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0591_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0592_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0593_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0594_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0595_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0596_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0597_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0598_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0599_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0600_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0601_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0602_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0603_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0604_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0605_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0606_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0607_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0608_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0609_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0610_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0611_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0612_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0613_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0614_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0615_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0616_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0617_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0618_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0619_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0620_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0621_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0622_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0623_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0624_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0625_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0626_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0627_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0628_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0629_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0630_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0631_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0632_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0633_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0634_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0635_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0636_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0637_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0638_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0639_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0640_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0641_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0642_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0643_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0644_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0645_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0646_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0647_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0648_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0649_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0650_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0651_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0652_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0653_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0654_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0655_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0656_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0657_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0658_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0659_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0660_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0661_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0662_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0663_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0664_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0665_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0666_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0667_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0668_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0669_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0670_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0671_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0672_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0673_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0674_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0675_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0676_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0677_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0678_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0679_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0680_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0681_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0682_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0683_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0684_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0685_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0686_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0687_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0688_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0689_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0690_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0691_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0692_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0693_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0694_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0695_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0696_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0697_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0698_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0699_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0700_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0701_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0702_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0703_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0704_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0705_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0706_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0707_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0708_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0709_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0710_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0711_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0712_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0713_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0714_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0715_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0716_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0717_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0718_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0719_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0720_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0721_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0722_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0723_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0724_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0725_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0726_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0727_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0728_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0729_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0730_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0731_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0732_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0733_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0734_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0735_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0736_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0737_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0738_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0739_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0740_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0741_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0742_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0743_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0744_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0745_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0746_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0747_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0748_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0749_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0750_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0751_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0752_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0753_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0754_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0755_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0756_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0757_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0758_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0759_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0760_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0761_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0762_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0763_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0764_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0765_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0766_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0767_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0768_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0769_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0770_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0771_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0772_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0773_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0774_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0775_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0776_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0777_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0778_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0779_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0780_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0781_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0782_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0783_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0784_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0785_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0786_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0787_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0788_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0789_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0790_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0791_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0792_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0793_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0794_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0795_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0796_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0797_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0798_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0799_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0800_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0801_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0802_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0803_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0804_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0805_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0806_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0807_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0808_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0809_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0810_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0811_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0812_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0813_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0814_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0815_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0816_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0817_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0818_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0819_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0820_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0821_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0822_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0823_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0824_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0825_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0826_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0827_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0828_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0829_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0830_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0831_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0832_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0833_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0834_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0835_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0836_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0837_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0838_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0839_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0840_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0841_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0842_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0843_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0844_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0845_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0846_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0847_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0848_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0849_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0850_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0851_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0852_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0853_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0854_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0855_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0856_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0857_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0858_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0859_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0860_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0861_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0862_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0863_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0864_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0865_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0866_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0867_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0868_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0869_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0870_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0871_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0872_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0873_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0874_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0875_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0876_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0877_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0878_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0879_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0880_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0881_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0882_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0883_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0884_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0885_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0886_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0887_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0888_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0889_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0890_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0891_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0892_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0893_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0894_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0895_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0896_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0897_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0898_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0899_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0900_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0901_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0902_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0903_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0904_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0905_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0906_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0907_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0908_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0909_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0910_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0911_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0912_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0913_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0914_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0915_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0916_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0917_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0918_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0919_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0920_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0921_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0922_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0923_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0924_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0925_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0926_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0927_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0928_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0929_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0930_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0931_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0932_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0933_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0934_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0935_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0936_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0937_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0938_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0939_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0940_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0941_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0942_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0943_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0944_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0945_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0946_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0947_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0948_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0949_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0950_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0951_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0952_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0953_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0954_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0955_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0956_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0957_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0958_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0959_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0960_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0961_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0962_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0963_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0964_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0965_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0966_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0967_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0968_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0969_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0970_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0971_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0972_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0973_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0974_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0975_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0976_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0977_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0978_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0979_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0980_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0981_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0982_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0983_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0984_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0985_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0986_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0987_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0988_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0989_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0990_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0991_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0992_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0993_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0994_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0995_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0996_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0997_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0998_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_0999_0000.nii.gz\"}, {\"image\": \"imagesTr2200/501-1000/FLARE23_1000_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3101_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3102_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3103_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3104_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3105_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3106_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3107_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3108_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3109_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3110_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3111_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3112_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3113_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3114_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3115_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3116_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3117_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3118_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3119_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3120_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3121_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3122_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3123_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3124_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3125_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3126_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3127_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3128_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3129_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3130_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3131_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3132_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3133_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3134_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3135_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3136_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3137_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3138_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3139_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3140_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3141_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3142_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3143_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3144_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3145_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3146_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3147_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3148_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3149_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3150_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3151_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3152_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3153_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3154_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3155_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3156_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3157_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3158_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3159_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3160_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3161_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3162_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3163_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3164_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3165_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3166_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3167_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3168_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3169_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3170_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3171_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3172_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3173_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3174_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3175_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3176_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3177_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3178_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3179_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3180_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3181_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3182_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3183_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3184_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3185_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3186_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3187_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3188_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3189_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3190_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3191_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3192_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3193_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3194_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3195_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3196_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3197_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3198_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3199_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3200_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3201_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3202_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3203_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3204_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3205_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3206_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3207_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3208_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3209_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3210_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3211_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3212_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3213_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3214_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3215_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3216_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3217_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3218_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3219_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3220_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3221_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3222_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3223_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3224_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3225_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3226_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3227_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3228_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3229_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3230_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3231_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3232_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3233_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3234_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3235_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3236_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3237_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3238_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3239_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3240_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3241_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3242_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3243_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3244_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3245_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3246_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3247_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3248_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3249_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3250_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3251_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3252_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3253_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3254_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3255_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3256_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3257_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3258_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3259_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3260_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3261_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3262_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3263_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3264_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3265_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3266_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3267_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3268_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3269_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3270_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3271_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3272_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3273_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3274_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3275_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3276_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3277_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3278_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3279_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3280_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3281_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3282_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3283_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3284_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3285_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3286_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3287_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3288_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3289_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3290_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3291_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3292_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3293_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3294_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3295_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3296_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3297_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3298_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3299_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3300_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3301_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3302_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3303_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3304_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3305_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3306_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3307_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3308_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3309_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3310_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3311_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3312_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3313_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3314_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3315_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3316_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3317_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3318_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3319_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3320_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3321_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3322_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3323_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3324_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3325_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3326_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3327_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3328_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3329_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3330_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3331_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3332_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3333_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3334_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3335_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3336_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3337_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3338_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3339_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3340_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3341_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3342_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3343_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3344_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3345_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3346_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3347_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3348_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3349_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3350_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3351_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3352_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3353_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3354_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3355_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3356_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3357_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3358_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3359_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3360_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3361_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3362_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3363_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3364_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3365_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3366_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3367_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3368_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3369_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3370_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3371_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3372_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3373_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3374_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3375_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3376_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3377_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3378_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3379_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3380_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3381_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3382_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3383_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3384_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3385_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3386_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3387_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3388_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3389_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3390_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3391_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3392_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3393_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3394_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3395_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3396_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3397_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3398_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3399_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3400_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3401_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3402_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3403_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3404_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3405_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3406_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3407_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3408_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3409_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3410_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3411_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3412_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3413_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3414_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3415_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3416_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3417_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3418_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3419_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3420_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3421_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3422_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3423_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3424_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3425_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3426_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3427_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3428_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3429_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3430_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3431_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3432_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3433_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3434_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3435_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3436_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3437_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3438_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3439_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3440_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3441_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3442_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3443_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3444_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3445_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3446_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3447_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3448_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3449_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3450_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3451_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3452_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3453_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3454_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3455_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3456_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3457_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3458_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3459_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3460_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3461_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3462_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3463_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3464_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3465_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3466_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3467_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3468_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3469_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3470_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3471_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3472_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3473_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3474_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3475_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3476_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3477_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3478_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3479_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3480_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3481_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3482_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3483_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3484_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3485_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3486_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3487_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3488_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3489_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3490_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3491_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3492_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3493_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3494_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3495_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3496_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3497_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3498_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3499_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3500_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3501_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3502_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3503_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3504_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3505_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3506_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3507_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3508_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3509_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3510_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3511_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3512_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3513_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3514_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3515_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3516_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3517_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3518_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3519_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3520_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3521_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3522_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3523_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3524_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3525_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3526_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3527_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3528_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3529_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3530_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3531_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3532_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3533_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3534_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3535_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3536_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3537_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3538_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3539_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3540_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3541_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3542_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3543_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3544_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3545_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3546_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3547_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3548_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3549_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3550_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3551_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3552_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3553_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3554_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3555_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3556_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3557_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3558_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3559_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3560_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3561_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3562_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3563_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3564_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3565_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3566_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3567_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3568_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3569_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3570_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3571_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3572_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3573_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3574_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3575_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3576_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3577_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3578_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3579_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3580_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3581_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3582_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3583_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3584_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3585_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3586_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3587_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3588_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3589_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3590_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3591_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3592_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3593_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3594_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3595_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3596_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3597_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3598_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3599_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3600_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3601_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3602_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3603_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3604_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3605_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3606_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3607_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3608_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3609_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3610_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3611_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3612_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3613_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3614_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3615_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3616_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3617_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3618_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3619_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3620_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3621_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3622_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3623_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3624_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3625_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3626_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3627_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3628_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3629_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3630_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3631_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3632_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3633_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3634_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3635_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3636_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3637_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3638_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3639_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3640_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3641_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3642_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3643_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3644_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3645_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3646_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3647_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3648_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3649_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3650_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3651_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3652_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3653_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3654_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3655_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3656_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3657_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3658_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3659_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3660_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3661_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3662_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3663_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3664_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3665_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3666_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3667_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3668_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3669_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3670_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3671_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3672_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3673_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3674_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3675_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3676_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3677_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3678_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3679_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3680_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3681_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3682_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3683_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3684_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3685_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3686_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3687_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3688_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3689_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3690_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3691_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3692_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3693_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3694_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3695_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3696_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3697_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3698_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3699_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3700_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3701_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3702_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3703_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3704_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3705_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3706_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3707_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3708_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3709_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3710_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3711_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3712_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3713_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3714_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3715_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3716_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3717_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3718_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3719_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3720_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3721_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3722_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3723_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3724_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3725_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3726_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3727_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3728_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3729_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3730_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3731_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3732_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3733_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3734_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3735_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3736_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3737_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3738_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3739_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3740_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3741_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3742_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3743_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3744_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3745_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3746_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3747_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3748_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3749_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3750_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3751_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3752_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3753_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3754_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3755_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3756_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3757_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3758_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3759_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3760_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3761_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3762_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3763_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3764_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3765_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3766_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3767_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3768_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3769_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3770_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3771_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3772_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3773_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3774_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3775_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3776_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3777_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3778_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3779_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3780_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3781_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3782_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3783_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3784_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3785_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3786_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3787_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3788_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3789_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3790_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3791_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3792_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3793_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3794_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3795_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3796_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3797_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3798_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3799_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3800_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3801_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3802_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3803_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3804_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3805_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3806_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3807_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3808_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3809_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3810_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3811_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3812_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3813_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3814_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3815_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3816_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3817_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3818_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3819_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3820_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3821_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3822_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3823_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3824_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3825_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3826_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3827_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3828_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3829_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3830_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3831_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3832_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3833_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3834_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3835_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3836_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3837_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3838_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3839_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3840_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3841_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3842_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3843_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3844_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3845_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3846_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3847_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3848_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3849_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3850_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3851_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3852_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3853_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3854_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3855_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3856_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3857_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3858_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3859_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3860_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3861_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3862_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3863_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3864_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3865_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3866_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3867_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3868_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3869_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3870_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3871_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3872_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3873_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3874_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3875_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3876_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3877_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3878_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3879_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3880_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3881_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3882_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3883_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3884_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3885_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3886_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3887_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3888_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3889_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3890_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3891_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3892_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3893_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3894_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3895_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3896_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3897_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3898_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3899_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3900_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3901_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3902_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3903_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3904_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3905_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3906_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3907_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3908_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3909_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3910_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3911_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3912_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3913_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3914_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3915_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3916_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3917_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3918_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3919_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3920_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3921_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3922_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3923_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3924_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3925_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3926_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3927_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3928_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3929_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3930_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3931_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3932_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3933_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3934_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3935_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3936_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3937_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3938_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3939_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3940_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3941_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3942_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3943_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3944_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3945_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3946_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3947_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3948_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3949_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3950_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3951_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3952_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3953_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3954_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3955_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3956_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3957_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3958_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3959_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3960_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3961_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3962_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3963_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3964_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3965_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3966_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3967_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3968_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3969_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3970_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3971_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3972_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3973_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3974_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3975_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3976_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3977_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3978_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3979_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3980_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3981_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3982_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3983_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3984_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3985_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3986_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3987_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3988_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3989_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3990_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3991_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3992_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3993_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3994_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3995_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3996_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3997_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3998_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_3999_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabel3101-4000/FLARE23_4000_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2201_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2202_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2203_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2204_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2205_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2206_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2207_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2208_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2209_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2210_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2211_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2212_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2213_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2214_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2215_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2216_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2217_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2218_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2219_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2220_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2221_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2222_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2223_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2224_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2225_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2226_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2227_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2228_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2229_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2230_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2231_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2232_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2233_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2234_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2235_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2236_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2237_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2238_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2239_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2240_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2241_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2242_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2243_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2244_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2245_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2246_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2247_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2248_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2249_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2250_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2251_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2252_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2253_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2254_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2255_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2256_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2257_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2258_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2259_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2260_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2261_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2262_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2263_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2264_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2265_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2266_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2267_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2268_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2269_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2270_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2271_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2272_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2273_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2274_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2275_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2276_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2277_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2278_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2279_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2280_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2281_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2282_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2283_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2284_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2285_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2286_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2287_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2288_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2289_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2290_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2291_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2292_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2293_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2294_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2295_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2296_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2297_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2298_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2299_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2300_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2301_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2302_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2303_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2304_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2305_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2306_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2307_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2308_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2309_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2310_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2311_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2312_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2313_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2314_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2315_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2316_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2317_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2318_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2319_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2320_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2321_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2322_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2323_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2324_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2325_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2326_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2327_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2328_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2329_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2330_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2331_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2332_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2333_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2334_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2335_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2336_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2337_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2338_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2339_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2340_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2341_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2342_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2343_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2344_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2345_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2346_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2347_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2348_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2349_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2350_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2351_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2352_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2353_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2354_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2355_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2356_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2357_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2358_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2359_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2360_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2361_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2362_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2363_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2364_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2365_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2366_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2367_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2368_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2369_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2370_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2371_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2372_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2373_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2374_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2375_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2376_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2377_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2378_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2379_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2380_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2381_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2382_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2383_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2384_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2385_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2386_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2387_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2388_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2389_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2390_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2391_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2392_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2393_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2394_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2395_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2396_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2397_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2398_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2399_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2400_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2401_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2402_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2403_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2404_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2405_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2406_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2407_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2408_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2409_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2410_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2411_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2412_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2413_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2414_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2415_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2416_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2417_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2418_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2419_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2420_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2421_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2422_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2423_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2424_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2425_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2426_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2427_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2428_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2429_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2430_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2431_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2432_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2433_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2434_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2435_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2436_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2437_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2438_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2439_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2440_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2441_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2442_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2443_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2444_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2445_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2446_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2447_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2448_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2449_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2450_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2451_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2452_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2453_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2454_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2455_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2456_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2457_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2458_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2459_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2460_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2461_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2462_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2463_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2464_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2465_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2466_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2467_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2468_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2469_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2470_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2471_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2472_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2473_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2474_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2475_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2476_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2477_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2478_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2479_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2480_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2481_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2482_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2483_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2484_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2485_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2486_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2487_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2488_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2489_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2490_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2491_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2492_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2493_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2494_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2495_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2496_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2497_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2498_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2499_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2500_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2501_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2502_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2503_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2504_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2505_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2506_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2507_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2508_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2509_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2510_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2511_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2512_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2513_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2514_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2515_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2516_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2517_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2518_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2519_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2520_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2521_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2522_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2523_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2524_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2525_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2526_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2527_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2528_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2529_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2530_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2531_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2532_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2533_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2534_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2535_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2536_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2537_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2538_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2539_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2540_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2541_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2542_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2543_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2544_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2545_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2546_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2547_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2548_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2549_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2550_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2551_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2552_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2553_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2554_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2555_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2556_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2557_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2558_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2559_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2560_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2561_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2562_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2563_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2564_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2565_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2566_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2567_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2568_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2569_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2570_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2571_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2572_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2573_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2574_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2575_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2576_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2577_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2578_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2579_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2580_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2581_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2582_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2583_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2584_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2585_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2586_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2587_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2588_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2589_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2590_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2591_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2592_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2593_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2594_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2595_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2596_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2597_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2598_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2599_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2600_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2601_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2602_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2603_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2604_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2605_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2606_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2607_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2608_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2609_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2610_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2611_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2612_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2613_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2614_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2615_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2616_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2617_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2618_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2619_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2620_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2621_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2622_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2623_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2624_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2625_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2626_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2627_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2628_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2629_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2630_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2631_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2632_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2633_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2634_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2635_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2636_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2637_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2638_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2639_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2640_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2641_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2642_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2643_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2644_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2645_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2646_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2647_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2648_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2649_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2650_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2651_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2652_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2653_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2654_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2655_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2656_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2657_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2658_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2659_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2660_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2661_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2662_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2663_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2664_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2665_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2666_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2667_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2668_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2669_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2670_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2671_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2672_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2673_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2674_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2675_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2676_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2677_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2678_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2679_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2680_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2681_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2682_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2683_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2684_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2685_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2686_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2687_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2688_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2689_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2690_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2691_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2692_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2693_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2694_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2695_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2696_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2697_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2698_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2699_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2700_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2701_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2702_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2703_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2704_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2705_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2706_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2707_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2708_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2709_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2710_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2711_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2712_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2713_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2714_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2715_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2716_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2717_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2718_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2719_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2720_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2721_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2722_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2723_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2724_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2725_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2726_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2727_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2728_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2729_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2730_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2731_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2732_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2733_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2734_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2735_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2736_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2737_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2738_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2739_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2740_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2741_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2742_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2743_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2744_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2745_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2746_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2747_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2748_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2749_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2750_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2751_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2752_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2753_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2754_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2755_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2756_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2757_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2758_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2759_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2760_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2761_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2762_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2763_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2764_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2765_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2766_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2767_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2768_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2769_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2770_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2771_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2772_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2773_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2774_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2775_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2776_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2777_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2778_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2779_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2780_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2781_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2782_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2783_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2784_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2785_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2786_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2787_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2788_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2789_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2790_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2791_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2792_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2793_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2794_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2795_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2796_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2797_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2798_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2799_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2800_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2801_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2802_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2803_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2804_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2805_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2806_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2807_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2808_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2809_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2810_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2811_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2812_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2813_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2814_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2815_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2816_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2817_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2818_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2819_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2820_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2821_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2822_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2823_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2824_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2825_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2826_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2827_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2828_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2829_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2830_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2831_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2832_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2833_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2834_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2835_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2836_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2837_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2838_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2839_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2840_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2841_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2842_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2843_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2844_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2845_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2846_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2847_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2848_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2849_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2850_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2851_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2852_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2853_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2854_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2855_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2856_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2857_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2858_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2859_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2860_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2861_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2862_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2863_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2864_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2865_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2866_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2867_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2868_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2869_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2870_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2871_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2872_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2873_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2874_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2875_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2876_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2877_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2878_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2879_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2880_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2881_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2882_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2883_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2884_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2885_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2886_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2887_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2888_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2889_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2890_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2891_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2892_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2893_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2894_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2895_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2896_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2897_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2898_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2899_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2900_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2901_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2902_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2903_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2904_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2905_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2906_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2907_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2908_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2909_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2910_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2911_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2912_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2913_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2914_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2915_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2916_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2917_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2918_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2919_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2920_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2921_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2922_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2923_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2924_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2925_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2926_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2927_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2928_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2929_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2930_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2931_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2932_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2933_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2934_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2935_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2936_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2937_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2938_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2939_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2940_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2941_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2942_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2943_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2944_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2945_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2946_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2947_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2948_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2949_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2950_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2951_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2952_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2953_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2954_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2955_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2956_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2957_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2958_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2959_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2960_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2961_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2962_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2963_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2964_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2965_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2966_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2967_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2968_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2969_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2970_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2971_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2972_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2973_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2974_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2975_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2976_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2977_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2978_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2979_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2980_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2981_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2982_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2983_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2984_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2985_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2986_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2987_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2988_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2989_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2990_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2991_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2992_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2993_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2994_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2995_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2996_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2997_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2998_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_2999_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3000_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3001_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3002_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3003_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3004_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3005_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3006_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3007_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3008_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3009_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3010_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3011_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3012_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3013_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3014_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3015_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3016_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3017_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3018_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3019_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3020_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3021_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3022_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3023_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3024_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3025_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3026_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3027_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3028_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3029_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3030_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3031_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3032_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3033_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3034_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3035_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3036_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3037_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3038_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3039_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3040_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3041_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3042_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3043_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3044_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3045_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3046_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3047_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3048_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3049_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3050_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3051_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3052_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3053_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3054_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3055_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3056_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3057_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3058_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3059_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3060_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3061_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3062_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3063_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3064_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3065_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3066_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3067_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3068_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3069_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3070_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3071_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3072_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3073_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3074_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3075_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3076_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3077_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3078_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3079_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3080_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3081_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3082_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3083_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3084_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3085_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3086_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3087_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3088_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3089_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3090_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3091_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3092_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3093_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3094_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3095_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3096_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3097_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3098_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3099_0000.nii.gz\"}, {\"image\": \"unlabelTr1800/unlabelTr2201-3100/FLARE23_3100_0000.nii.gz\"}]}"
  },
  {
    "path": "jsons/stoic21.json",
    "content": "{\"training\": [{\"image\": \"nii_gz/9994.nii.gz\"}, {\"image\": \"nii_gz/9978.nii.gz\"}, {\"image\": \"nii_gz/9974.nii.gz\"}, {\"image\": \"nii_gz/9968.nii.gz\"}, {\"image\": \"nii_gz/9966.nii.gz\"}, {\"image\": \"nii_gz/996.nii.gz\"}, {\"image\": \"nii_gz/9957.nii.gz\"}, {\"image\": \"nii_gz/9948.nii.gz\"}, {\"image\": \"nii_gz/9947.nii.gz\"}, {\"image\": \"nii_gz/9937.nii.gz\"}, {\"image\": \"nii_gz/9934.nii.gz\"}, {\"image\": \"nii_gz/9933.nii.gz\"}, {\"image\": \"nii_gz/9929.nii.gz\"}, {\"image\": \"nii_gz/9922.nii.gz\"}, {\"image\": \"nii_gz/9921.nii.gz\"}, {\"image\": \"nii_gz/9920.nii.gz\"}, {\"image\": \"nii_gz/9918.nii.gz\"}, {\"image\": \"nii_gz/9909.nii.gz\"}, {\"image\": \"nii_gz/9899.nii.gz\"}, {\"image\": \"nii_gz/9893.nii.gz\"}, {\"image\": \"nii_gz/9888.nii.gz\"}, {\"image\": \"nii_gz/9887.nii.gz\"}, {\"image\": \"nii_gz/987.nii.gz\"}, {\"image\": \"nii_gz/9862.nii.gz\"}, {\"image\": \"nii_gz/9857.nii.gz\"}, {\"image\": \"nii_gz/9853.nii.gz\"}, {\"image\": \"nii_gz/9849.nii.gz\"}, {\"image\": \"nii_gz/9844.nii.gz\"}, {\"image\": \"nii_gz/984.nii.gz\"}, {\"image\": \"nii_gz/9837.nii.gz\"}, {\"image\": \"nii_gz/9832.nii.gz\"}, {\"image\": \"nii_gz/9818.nii.gz\"}, {\"image\": \"nii_gz/9810.nii.gz\"}, {\"image\": \"nii_gz/9801.nii.gz\"}, {\"image\": \"nii_gz/9789.nii.gz\"}, {\"image\": \"nii_gz/9788.nii.gz\"}, {\"image\": \"nii_gz/9783.nii.gz\"}, {\"image\": \"nii_gz/9776.nii.gz\"}, {\"image\": \"nii_gz/9773.nii.gz\"}, {\"image\": \"nii_gz/9770.nii.gz\"}, {\"image\": \"nii_gz/9766.nii.gz\"}, {\"image\": \"nii_gz/976.nii.gz\"}, {\"image\": \"nii_gz/9759.nii.gz\"}, {\"image\": \"nii_gz/9758.nii.gz\"}, {\"image\": \"nii_gz/9755.nii.gz\"}, {\"image\": \"nii_gz/9754.nii.gz\"}, {\"image\": \"nii_gz/9752.nii.gz\"}, {\"image\": \"nii_gz/9749.nii.gz\"}, {\"image\": \"nii_gz/9741.nii.gz\"}, {\"image\": \"nii_gz/9735.nii.gz\"}, {\"image\": \"nii_gz/9733.nii.gz\"}, {\"image\": \"nii_gz/9732.nii.gz\"}, {\"image\": \"nii_gz/9728.nii.gz\"}, {\"image\": \"nii_gz/9724.nii.gz\"}, {\"image\": \"nii_gz/9723.nii.gz\"}, {\"image\": \"nii_gz/9720.nii.gz\"}, {\"image\": \"nii_gz/9716.nii.gz\"}, {\"image\": \"nii_gz/9711.nii.gz\"}, {\"image\": \"nii_gz/9709.nii.gz\"}, {\"image\": \"nii_gz/9708.nii.gz\"}, {\"image\": \"nii_gz/9706.nii.gz\"}, {\"image\": \"nii_gz/97.nii.gz\"}, {\"image\": \"nii_gz/9696.nii.gz\"}, {\"image\": \"nii_gz/9692.nii.gz\"}, {\"image\": \"nii_gz/9678.nii.gz\"}, {\"image\": \"nii_gz/9676.nii.gz\"}, {\"image\": \"nii_gz/9673.nii.gz\"}, {\"image\": \"nii_gz/9670.nii.gz\"}, {\"image\": \"nii_gz/9661.nii.gz\"}, {\"image\": \"nii_gz/9658.nii.gz\"}, {\"image\": \"nii_gz/9650.nii.gz\"}, {\"image\": \"nii_gz/9647.nii.gz\"}, {\"image\": \"nii_gz/9645.nii.gz\"}, {\"image\": \"nii_gz/9644.nii.gz\"}, {\"image\": \"nii_gz/9641.nii.gz\"}, {\"image\": \"nii_gz/9630.nii.gz\"}, {\"image\": \"nii_gz/9626.nii.gz\"}, {\"image\": \"nii_gz/9621.nii.gz\"}, {\"image\": \"nii_gz/9620.nii.gz\"}, {\"image\": \"nii_gz/959.nii.gz\"}, {\"image\": \"nii_gz/9586.nii.gz\"}, {\"image\": \"nii_gz/9585.nii.gz\"}, {\"image\": \"nii_gz/9584.nii.gz\"}, {\"image\": \"nii_gz/9582.nii.gz\"}, {\"image\": \"nii_gz/9577.nii.gz\"}, {\"image\": \"nii_gz/9572.nii.gz\"}, {\"image\": \"nii_gz/9562.nii.gz\"}, {\"image\": \"nii_gz/9561.nii.gz\"}, {\"image\": \"nii_gz/955.nii.gz\"}, {\"image\": \"nii_gz/9546.nii.gz\"}, {\"image\": \"nii_gz/9543.nii.gz\"}, {\"image\": \"nii_gz/9533.nii.gz\"}, {\"image\": \"nii_gz/9531.nii.gz\"}, {\"image\": \"nii_gz/9520.nii.gz\"}, {\"image\": \"nii_gz/952.nii.gz\"}, {\"image\": \"nii_gz/9515.nii.gz\"}, {\"image\": \"nii_gz/9498.nii.gz\"}, {\"image\": \"nii_gz/9497.nii.gz\"}, {\"image\": \"nii_gz/9490.nii.gz\"}, {\"image\": \"nii_gz/9489.nii.gz\"}, {\"image\": \"nii_gz/9487.nii.gz\"}, {\"image\": \"nii_gz/9480.nii.gz\"}, {\"image\": \"nii_gz/9476.nii.gz\"}, {\"image\": \"nii_gz/9473.nii.gz\"}, {\"image\": \"nii_gz/947.nii.gz\"}, {\"image\": \"nii_gz/9459.nii.gz\"}, {\"image\": \"nii_gz/9450.nii.gz\"}, {\"image\": \"nii_gz/9448.nii.gz\"}, {\"image\": \"nii_gz/9445.nii.gz\"}, {\"image\": \"nii_gz/9441.nii.gz\"}, {\"image\": \"nii_gz/944.nii.gz\"}, {\"image\": \"nii_gz/9433.nii.gz\"}, {\"image\": \"nii_gz/9427.nii.gz\"}, {\"image\": \"nii_gz/9421.nii.gz\"}, {\"image\": \"nii_gz/9420.nii.gz\"}, {\"image\": \"nii_gz/9410.nii.gz\"}, {\"image\": \"nii_gz/9403.nii.gz\"}, {\"image\": \"nii_gz/9395.nii.gz\"}, {\"image\": \"nii_gz/9387.nii.gz\"}, {\"image\": \"nii_gz/9383.nii.gz\"}, {\"image\": \"nii_gz/9379.nii.gz\"}, {\"image\": \"nii_gz/9372.nii.gz\"}, {\"image\": \"nii_gz/9366.nii.gz\"}, {\"image\": \"nii_gz/9360.nii.gz\"}, {\"image\": \"nii_gz/9359.nii.gz\"}, {\"image\": \"nii_gz/9354.nii.gz\"}, {\"image\": \"nii_gz/9348.nii.gz\"}, {\"image\": \"nii_gz/9341.nii.gz\"}, {\"image\": \"nii_gz/9337.nii.gz\"}, {\"image\": \"nii_gz/9329.nii.gz\"}, {\"image\": \"nii_gz/9325.nii.gz\"}, {\"image\": \"nii_gz/9320.nii.gz\"}, {\"image\": \"nii_gz/9318.nii.gz\"}, {\"image\": \"nii_gz/9313.nii.gz\"}, {\"image\": \"nii_gz/9311.nii.gz\"}, {\"image\": \"nii_gz/9304.nii.gz\"}, {\"image\": \"nii_gz/930.nii.gz\"}, {\"image\": \"nii_gz/9285.nii.gz\"}, {\"image\": \"nii_gz/9272.nii.gz\"}, {\"image\": \"nii_gz/927.nii.gz\"}, {\"image\": \"nii_gz/9269.nii.gz\"}, {\"image\": \"nii_gz/9267.nii.gz\"}, {\"image\": \"nii_gz/9252.nii.gz\"}, {\"image\": \"nii_gz/925.nii.gz\"}, {\"image\": \"nii_gz/9237.nii.gz\"}, {\"image\": \"nii_gz/9234.nii.gz\"}, {\"image\": \"nii_gz/9228.nii.gz\"}, {\"image\": \"nii_gz/9223.nii.gz\"}, {\"image\": \"nii_gz/9220.nii.gz\"}, {\"image\": \"nii_gz/9218.nii.gz\"}, {\"image\": \"nii_gz/9211.nii.gz\"}, {\"image\": \"nii_gz/9210.nii.gz\"}, {\"image\": \"nii_gz/9207.nii.gz\"}, {\"image\": \"nii_gz/9205.nii.gz\"}, {\"image\": \"nii_gz/9201.nii.gz\"}, {\"image\": \"nii_gz/92.nii.gz\"}, {\"image\": \"nii_gz/9195.nii.gz\"}, {\"image\": \"nii_gz/9189.nii.gz\"}, {\"image\": \"nii_gz/9180.nii.gz\"}, {\"image\": \"nii_gz/9176.nii.gz\"}, {\"image\": \"nii_gz/9169.nii.gz\"}, {\"image\": \"nii_gz/9168.nii.gz\"}, {\"image\": \"nii_gz/9165.nii.gz\"}, {\"image\": \"nii_gz/916.nii.gz\"}, {\"image\": \"nii_gz/9157.nii.gz\"}, {\"image\": \"nii_gz/9154.nii.gz\"}, {\"image\": \"nii_gz/9153.nii.gz\"}, {\"image\": \"nii_gz/9141.nii.gz\"}, {\"image\": \"nii_gz/9128.nii.gz\"}, {\"image\": \"nii_gz/9116.nii.gz\"}, {\"image\": \"nii_gz/9113.nii.gz\"}, {\"image\": \"nii_gz/9111.nii.gz\"}, {\"image\": \"nii_gz/9104.nii.gz\"}, {\"image\": \"nii_gz/9102.nii.gz\"}, {\"image\": \"nii_gz/9098.nii.gz\"}, {\"image\": \"nii_gz/9096.nii.gz\"}, {\"image\": \"nii_gz/9075.nii.gz\"}, {\"image\": \"nii_gz/9072.nii.gz\"}, {\"image\": \"nii_gz/9071.nii.gz\"}, {\"image\": \"nii_gz/9058.nii.gz\"}, {\"image\": \"nii_gz/9056.nii.gz\"}, {\"image\": \"nii_gz/9044.nii.gz\"}, {\"image\": \"nii_gz/9043.nii.gz\"}, {\"image\": \"nii_gz/9032.nii.gz\"}, {\"image\": \"nii_gz/9023.nii.gz\"}, {\"image\": \"nii_gz/902.nii.gz\"}, {\"image\": \"nii_gz/901.nii.gz\"}, {\"image\": \"nii_gz/9004.nii.gz\"}, {\"image\": \"nii_gz/9003.nii.gz\"}, {\"image\": \"nii_gz/9002.nii.gz\"}, {\"image\": \"nii_gz/9001.nii.gz\"}, {\"image\": \"nii_gz/8999.nii.gz\"}, {\"image\": \"nii_gz/8998.nii.gz\"}, {\"image\": \"nii_gz/8996.nii.gz\"}, {\"image\": \"nii_gz/8977.nii.gz\"}, {\"image\": \"nii_gz/8974.nii.gz\"}, {\"image\": \"nii_gz/8972.nii.gz\"}, {\"image\": \"nii_gz/8970.nii.gz\"}, {\"image\": \"nii_gz/8958.nii.gz\"}, {\"image\": \"nii_gz/8952.nii.gz\"}, {\"image\": \"nii_gz/8950.nii.gz\"}, {\"image\": \"nii_gz/8949.nii.gz\"}, {\"image\": \"nii_gz/8948.nii.gz\"}, {\"image\": \"nii_gz/8945.nii.gz\"}, {\"image\": \"nii_gz/8932.nii.gz\"}, {\"image\": \"nii_gz/8930.nii.gz\"}, {\"image\": \"nii_gz/8928.nii.gz\"}, {\"image\": \"nii_gz/8926.nii.gz\"}, {\"image\": \"nii_gz/892.nii.gz\"}, {\"image\": \"nii_gz/8917.nii.gz\"}, {\"image\": \"nii_gz/8910.nii.gz\"}, {\"image\": \"nii_gz/891.nii.gz\"}, {\"image\": \"nii_gz/8907.nii.gz\"}, {\"image\": \"nii_gz/8906.nii.gz\"}, {\"image\": \"nii_gz/8892.nii.gz\"}, {\"image\": \"nii_gz/8889.nii.gz\"}, {\"image\": \"nii_gz/8880.nii.gz\"}, {\"image\": \"nii_gz/8868.nii.gz\"}, {\"image\": \"nii_gz/886.nii.gz\"}, {\"image\": \"nii_gz/8853.nii.gz\"}, {\"image\": \"nii_gz/8845.nii.gz\"}, {\"image\": \"nii_gz/8839.nii.gz\"}, {\"image\": \"nii_gz/8838.nii.gz\"}, {\"image\": \"nii_gz/8837.nii.gz\"}, {\"image\": \"nii_gz/8833.nii.gz\"}, {\"image\": \"nii_gz/883.nii.gz\"}, {\"image\": \"nii_gz/8828.nii.gz\"}, {\"image\": \"nii_gz/8823.nii.gz\"}, {\"image\": \"nii_gz/8813.nii.gz\"}, {\"image\": \"nii_gz/8807.nii.gz\"}, {\"image\": \"nii_gz/8804.nii.gz\"}, {\"image\": \"nii_gz/88.nii.gz\"}, {\"image\": \"nii_gz/8792.nii.gz\"}, {\"image\": \"nii_gz/8790.nii.gz\"}, {\"image\": \"nii_gz/8788.nii.gz\"}, {\"image\": \"nii_gz/8784.nii.gz\"}, {\"image\": \"nii_gz/8782.nii.gz\"}, {\"image\": \"nii_gz/8780.nii.gz\"}, {\"image\": \"nii_gz/8776.nii.gz\"}, {\"image\": \"nii_gz/8766.nii.gz\"}, {\"image\": \"nii_gz/8765.nii.gz\"}, {\"image\": \"nii_gz/8758.nii.gz\"}, {\"image\": \"nii_gz/8757.nii.gz\"}, {\"image\": \"nii_gz/8752.nii.gz\"}, {\"image\": \"nii_gz/8751.nii.gz\"}, {\"image\": \"nii_gz/8746.nii.gz\"}, {\"image\": \"nii_gz/8736.nii.gz\"}, {\"image\": \"nii_gz/8734.nii.gz\"}, {\"image\": \"nii_gz/8726.nii.gz\"}, {\"image\": \"nii_gz/8724.nii.gz\"}, {\"image\": \"nii_gz/8708.nii.gz\"}, {\"image\": \"nii_gz/8703.nii.gz\"}, {\"image\": \"nii_gz/8699.nii.gz\"}, {\"image\": \"nii_gz/8694.nii.gz\"}, {\"image\": \"nii_gz/8692.nii.gz\"}, {\"image\": \"nii_gz/8690.nii.gz\"}, {\"image\": \"nii_gz/8682.nii.gz\"}, {\"image\": \"nii_gz/868.nii.gz\"}, {\"image\": \"nii_gz/8675.nii.gz\"}, {\"image\": \"nii_gz/8673.nii.gz\"}, {\"image\": \"nii_gz/8670.nii.gz\"}, {\"image\": \"nii_gz/8656.nii.gz\"}, {\"image\": \"nii_gz/8644.nii.gz\"}, {\"image\": \"nii_gz/8631.nii.gz\"}, {\"image\": \"nii_gz/8626.nii.gz\"}, {\"image\": \"nii_gz/8625.nii.gz\"}, {\"image\": \"nii_gz/8622.nii.gz\"}, {\"image\": \"nii_gz/8617.nii.gz\"}, {\"image\": \"nii_gz/8613.nii.gz\"}, {\"image\": \"nii_gz/861.nii.gz\"}, {\"image\": \"nii_gz/8609.nii.gz\"}, {\"image\": \"nii_gz/8607.nii.gz\"}, {\"image\": \"nii_gz/8603.nii.gz\"}, {\"image\": \"nii_gz/8597.nii.gz\"}, {\"image\": \"nii_gz/8585.nii.gz\"}, {\"image\": \"nii_gz/8582.nii.gz\"}, {\"image\": \"nii_gz/8577.nii.gz\"}, {\"image\": \"nii_gz/8576.nii.gz\"}, {\"image\": \"nii_gz/8568.nii.gz\"}, {\"image\": \"nii_gz/8563.nii.gz\"}, {\"image\": \"nii_gz/8560.nii.gz\"}, {\"image\": \"nii_gz/8557.nii.gz\"}, {\"image\": \"nii_gz/8551.nii.gz\"}, {\"image\": \"nii_gz/8541.nii.gz\"}, {\"image\": \"nii_gz/8540.nii.gz\"}, {\"image\": \"nii_gz/8537.nii.gz\"}, {\"image\": \"nii_gz/8534.nii.gz\"}, {\"image\": \"nii_gz/8530.nii.gz\"}, {\"image\": \"nii_gz/8529.nii.gz\"}, {\"image\": \"nii_gz/8521.nii.gz\"}, {\"image\": \"nii_gz/8519.nii.gz\"}, {\"image\": \"nii_gz/8511.nii.gz\"}, {\"image\": \"nii_gz/8509.nii.gz\"}, {\"image\": \"nii_gz/8504.nii.gz\"}, {\"image\": \"nii_gz/8500.nii.gz\"}, {\"image\": \"nii_gz/85.nii.gz\"}, {\"image\": \"nii_gz/8495.nii.gz\"}, {\"image\": \"nii_gz/8489.nii.gz\"}, {\"image\": \"nii_gz/8488.nii.gz\"}, {\"image\": \"nii_gz/8486.nii.gz\"}, {\"image\": \"nii_gz/8483.nii.gz\"}, {\"image\": \"nii_gz/848.nii.gz\"}, {\"image\": \"nii_gz/8479.nii.gz\"}, {\"image\": \"nii_gz/8478.nii.gz\"}, {\"image\": \"nii_gz/8470.nii.gz\"}, {\"image\": \"nii_gz/8469.nii.gz\"}, {\"image\": \"nii_gz/8467.nii.gz\"}, {\"image\": \"nii_gz/8462.nii.gz\"}, {\"image\": \"nii_gz/8457.nii.gz\"}, {\"image\": \"nii_gz/8453.nii.gz\"}, {\"image\": \"nii_gz/8449.nii.gz\"}, {\"image\": \"nii_gz/8442.nii.gz\"}, {\"image\": \"nii_gz/8440.nii.gz\"}, {\"image\": \"nii_gz/8439.nii.gz\"}, {\"image\": \"nii_gz/8432.nii.gz\"}, {\"image\": \"nii_gz/8429.nii.gz\"}, {\"image\": \"nii_gz/8426.nii.gz\"}, {\"image\": \"nii_gz/842.nii.gz\"}, {\"image\": \"nii_gz/8417.nii.gz\"}, {\"image\": \"nii_gz/8416.nii.gz\"}, {\"image\": \"nii_gz/8413.nii.gz\"}, {\"image\": \"nii_gz/8410.nii.gz\"}, {\"image\": \"nii_gz/8407.nii.gz\"}, {\"image\": \"nii_gz/8401.nii.gz\"}, {\"image\": \"nii_gz/839.nii.gz\"}, {\"image\": \"nii_gz/8389.nii.gz\"}, {\"image\": \"nii_gz/8382.nii.gz\"}, {\"image\": \"nii_gz/838.nii.gz\"}, {\"image\": \"nii_gz/8366.nii.gz\"}, {\"image\": \"nii_gz/8364.nii.gz\"}, {\"image\": \"nii_gz/836.nii.gz\"}, {\"image\": \"nii_gz/8359.nii.gz\"}, {\"image\": \"nii_gz/8350.nii.gz\"}, {\"image\": \"nii_gz/8348.nii.gz\"}, {\"image\": \"nii_gz/8338.nii.gz\"}, {\"image\": \"nii_gz/8337.nii.gz\"}, {\"image\": \"nii_gz/8325.nii.gz\"}, {\"image\": \"nii_gz/8322.nii.gz\"}, {\"image\": \"nii_gz/8319.nii.gz\"}, {\"image\": \"nii_gz/8318.nii.gz\"}, {\"image\": \"nii_gz/831.nii.gz\"}, {\"image\": \"nii_gz/8303.nii.gz\"}, {\"image\": \"nii_gz/83.nii.gz\"}, {\"image\": \"nii_gz/8290.nii.gz\"}, {\"image\": \"nii_gz/8281.nii.gz\"}, {\"image\": \"nii_gz/8278.nii.gz\"}, {\"image\": \"nii_gz/827.nii.gz\"}, {\"image\": \"nii_gz/8268.nii.gz\"}, {\"image\": \"nii_gz/8265.nii.gz\"}, {\"image\": \"nii_gz/8263.nii.gz\"}, {\"image\": \"nii_gz/8260.nii.gz\"}, {\"image\": \"nii_gz/8256.nii.gz\"}, {\"image\": \"nii_gz/8237.nii.gz\"}, {\"image\": \"nii_gz/8229.nii.gz\"}, {\"image\": \"nii_gz/8228.nii.gz\"}, {\"image\": \"nii_gz/8227.nii.gz\"}, {\"image\": \"nii_gz/8222.nii.gz\"}, {\"image\": \"nii_gz/8221.nii.gz\"}, {\"image\": \"nii_gz/822.nii.gz\"}, {\"image\": \"nii_gz/8209.nii.gz\"}, {\"image\": \"nii_gz/8202.nii.gz\"}, {\"image\": \"nii_gz/8198.nii.gz\"}, {\"image\": \"nii_gz/8194.nii.gz\"}, {\"image\": \"nii_gz/8190.nii.gz\"}, {\"image\": \"nii_gz/8189.nii.gz\"}, {\"image\": \"nii_gz/8188.nii.gz\"}, {\"image\": \"nii_gz/8187.nii.gz\"}, {\"image\": \"nii_gz/8180.nii.gz\"}, {\"image\": \"nii_gz/8170.nii.gz\"}, {\"image\": \"nii_gz/8164.nii.gz\"}, {\"image\": \"nii_gz/8158.nii.gz\"}, {\"image\": \"nii_gz/8156.nii.gz\"}, {\"image\": \"nii_gz/8152.nii.gz\"}, {\"image\": \"nii_gz/8147.nii.gz\"}, {\"image\": \"nii_gz/8145.nii.gz\"}, {\"image\": \"nii_gz/8144.nii.gz\"}, {\"image\": \"nii_gz/8142.nii.gz\"}, {\"image\": \"nii_gz/814.nii.gz\"}, {\"image\": \"nii_gz/8137.nii.gz\"}, {\"image\": \"nii_gz/8133.nii.gz\"}, {\"image\": \"nii_gz/8130.nii.gz\"}, {\"image\": \"nii_gz/8113.nii.gz\"}, {\"image\": \"nii_gz/8112.nii.gz\"}, {\"image\": \"nii_gz/8109.nii.gz\"}, {\"image\": \"nii_gz/8105.nii.gz\"}, {\"image\": \"nii_gz/8103.nii.gz\"}, {\"image\": \"nii_gz/8102.nii.gz\"}, {\"image\": \"nii_gz/8094.nii.gz\"}, {\"image\": \"nii_gz/8093.nii.gz\"}, {\"image\": \"nii_gz/8090.nii.gz\"}, {\"image\": \"nii_gz/8085.nii.gz\"}, {\"image\": \"nii_gz/8081.nii.gz\"}, {\"image\": \"nii_gz/8072.nii.gz\"}, {\"image\": \"nii_gz/8060.nii.gz\"}, {\"image\": \"nii_gz/8056.nii.gz\"}, {\"image\": \"nii_gz/8028.nii.gz\"}, {\"image\": \"nii_gz/8027.nii.gz\"}, {\"image\": \"nii_gz/802.nii.gz\"}, {\"image\": \"nii_gz/8018.nii.gz\"}, {\"image\": \"nii_gz/8015.nii.gz\"}, {\"image\": \"nii_gz/7995.nii.gz\"}, {\"image\": \"nii_gz/7982.nii.gz\"}, {\"image\": \"nii_gz/7981.nii.gz\"}, {\"image\": \"nii_gz/7977.nii.gz\"}, {\"image\": \"nii_gz/7973.nii.gz\"}, {\"image\": \"nii_gz/7959.nii.gz\"}, {\"image\": \"nii_gz/7947.nii.gz\"}, {\"image\": \"nii_gz/7940.nii.gz\"}, {\"image\": \"nii_gz/7924.nii.gz\"}, {\"image\": \"nii_gz/792.nii.gz\"}, {\"image\": \"nii_gz/7918.nii.gz\"}, {\"image\": \"nii_gz/7915.nii.gz\"}, {\"image\": \"nii_gz/7906.nii.gz\"}, {\"image\": \"nii_gz/7904.nii.gz\"}, {\"image\": \"nii_gz/7903.nii.gz\"}, {\"image\": \"nii_gz/7901.nii.gz\"}, {\"image\": \"nii_gz/7893.nii.gz\"}, {\"image\": \"nii_gz/789.nii.gz\"}, {\"image\": \"nii_gz/7888.nii.gz\"}, {\"image\": \"nii_gz/7886.nii.gz\"}, {\"image\": \"nii_gz/7885.nii.gz\"}, {\"image\": \"nii_gz/7871.nii.gz\"}, {\"image\": \"nii_gz/7867.nii.gz\"}, {\"image\": \"nii_gz/7866.nii.gz\"}, {\"image\": \"nii_gz/785.nii.gz\"}, {\"image\": \"nii_gz/7843.nii.gz\"}, {\"image\": \"nii_gz/7842.nii.gz\"}, {\"image\": \"nii_gz/7840.nii.gz\"}, {\"image\": \"nii_gz/7831.nii.gz\"}, {\"image\": \"nii_gz/7828.nii.gz\"}, {\"image\": \"nii_gz/7818.nii.gz\"}, {\"image\": \"nii_gz/7814.nii.gz\"}, {\"image\": \"nii_gz/781.nii.gz\"}, {\"image\": \"nii_gz/7808.nii.gz\"}, {\"image\": \"nii_gz/7806.nii.gz\"}, {\"image\": \"nii_gz/7805.nii.gz\"}, {\"image\": \"nii_gz/7803.nii.gz\"}, {\"image\": \"nii_gz/7802.nii.gz\"}, {\"image\": \"nii_gz/78.nii.gz\"}, {\"image\": \"nii_gz/7798.nii.gz\"}, {\"image\": \"nii_gz/7792.nii.gz\"}, {\"image\": \"nii_gz/7788.nii.gz\"}, {\"image\": \"nii_gz/7782.nii.gz\"}, {\"image\": \"nii_gz/7777.nii.gz\"}, {\"image\": \"nii_gz/7763.nii.gz\"}, {\"image\": \"nii_gz/7758.nii.gz\"}, {\"image\": \"nii_gz/7752.nii.gz\"}, {\"image\": \"nii_gz/7750.nii.gz\"}, {\"image\": \"nii_gz/7749.nii.gz\"}, {\"image\": \"nii_gz/7744.nii.gz\"}, {\"image\": \"nii_gz/7741.nii.gz\"}, {\"image\": \"nii_gz/7735.nii.gz\"}, {\"image\": \"nii_gz/7734.nii.gz\"}, {\"image\": \"nii_gz/7733.nii.gz\"}, {\"image\": \"nii_gz/7730.nii.gz\"}, {\"image\": \"nii_gz/7728.nii.gz\"}, {\"image\": \"nii_gz/7718.nii.gz\"}, {\"image\": \"nii_gz/7716.nii.gz\"}, {\"image\": \"nii_gz/771.nii.gz\"}, {\"image\": \"nii_gz/7708.nii.gz\"}, {\"image\": \"nii_gz/7703.nii.gz\"}, {\"image\": \"nii_gz/7702.nii.gz\"}, {\"image\": \"nii_gz/7696.nii.gz\"}, {\"image\": \"nii_gz/7694.nii.gz\"}, {\"image\": \"nii_gz/7692.nii.gz\"}, {\"image\": \"nii_gz/7689.nii.gz\"}, {\"image\": \"nii_gz/7688.nii.gz\"}, {\"image\": \"nii_gz/7684.nii.gz\"}, {\"image\": \"nii_gz/768.nii.gz\"}, {\"image\": \"nii_gz/7673.nii.gz\"}, {\"image\": \"nii_gz/7669.nii.gz\"}, {\"image\": \"nii_gz/7663.nii.gz\"}, {\"image\": \"nii_gz/7656.nii.gz\"}, {\"image\": \"nii_gz/7655.nii.gz\"}, {\"image\": \"nii_gz/7653.nii.gz\"}, {\"image\": \"nii_gz/7652.nii.gz\"}, {\"image\": \"nii_gz/7651.nii.gz\"}, {\"image\": \"nii_gz/7648.nii.gz\"}, {\"image\": \"nii_gz/7638.nii.gz\"}, {\"image\": \"nii_gz/7634.nii.gz\"}, {\"image\": \"nii_gz/7624.nii.gz\"}, {\"image\": \"nii_gz/7619.nii.gz\"}, {\"image\": \"nii_gz/7615.nii.gz\"}, {\"image\": \"nii_gz/7614.nii.gz\"}, {\"image\": \"nii_gz/7613.nii.gz\"}, {\"image\": \"nii_gz/7611.nii.gz\"}, {\"image\": \"nii_gz/7609.nii.gz\"}, {\"image\": \"nii_gz/7582.nii.gz\"}, {\"image\": \"nii_gz/7572.nii.gz\"}, {\"image\": \"nii_gz/757.nii.gz\"}, {\"image\": \"nii_gz/7563.nii.gz\"}, {\"image\": \"nii_gz/755.nii.gz\"}, {\"image\": \"nii_gz/7537.nii.gz\"}, {\"image\": \"nii_gz/7531.nii.gz\"}, {\"image\": \"nii_gz/753.nii.gz\"}, {\"image\": \"nii_gz/7529.nii.gz\"}, {\"image\": \"nii_gz/7517.nii.gz\"}, {\"image\": \"nii_gz/7516.nii.gz\"}, {\"image\": \"nii_gz/7503.nii.gz\"}, {\"image\": \"nii_gz/7501.nii.gz\"}, {\"image\": \"nii_gz/7500.nii.gz\"}, {\"image\": \"nii_gz/750.nii.gz\"}, {\"image\": \"nii_gz/7499.nii.gz\"}, {\"image\": \"nii_gz/749.nii.gz\"}, {\"image\": \"nii_gz/7488.nii.gz\"}, {\"image\": \"nii_gz/7487.nii.gz\"}, {\"image\": \"nii_gz/7484.nii.gz\"}, {\"image\": \"nii_gz/7480.nii.gz\"}, {\"image\": \"nii_gz/7470.nii.gz\"}, {\"image\": \"nii_gz/747.nii.gz\"}, {\"image\": \"nii_gz/7464.nii.gz\"}, {\"image\": \"nii_gz/7459.nii.gz\"}, {\"image\": \"nii_gz/7448.nii.gz\"}, {\"image\": \"nii_gz/7447.nii.gz\"}, {\"image\": \"nii_gz/7443.nii.gz\"}, {\"image\": \"nii_gz/7434.nii.gz\"}, {\"image\": \"nii_gz/7433.nii.gz\"}, {\"image\": \"nii_gz/7432.nii.gz\"}, {\"image\": \"nii_gz/7430.nii.gz\"}, {\"image\": \"nii_gz/7429.nii.gz\"}, {\"image\": \"nii_gz/7428.nii.gz\"}, {\"image\": \"nii_gz/7421.nii.gz\"}, {\"image\": \"nii_gz/7420.nii.gz\"}, {\"image\": \"nii_gz/7412.nii.gz\"}, {\"image\": \"nii_gz/7410.nii.gz\"}, {\"image\": \"nii_gz/7408.nii.gz\"}, {\"image\": \"nii_gz/7407.nii.gz\"}, {\"image\": \"nii_gz/74.nii.gz\"}, {\"image\": \"nii_gz/7382.nii.gz\"}, {\"image\": \"nii_gz/7378.nii.gz\"}, {\"image\": \"nii_gz/7377.nii.gz\"}, {\"image\": \"nii_gz/7376.nii.gz\"}, {\"image\": \"nii_gz/7349.nii.gz\"}, {\"image\": \"nii_gz/7342.nii.gz\"}, {\"image\": \"nii_gz/7333.nii.gz\"}, {\"image\": \"nii_gz/7331.nii.gz\"}, {\"image\": \"nii_gz/7324.nii.gz\"}, {\"image\": \"nii_gz/7320.nii.gz\"}, {\"image\": \"nii_gz/7310.nii.gz\"}, {\"image\": \"nii_gz/7309.nii.gz\"}, {\"image\": \"nii_gz/7306.nii.gz\"}, {\"image\": \"nii_gz/7305.nii.gz\"}, {\"image\": \"nii_gz/7301.nii.gz\"}, {\"image\": \"nii_gz/7300.nii.gz\"}, {\"image\": \"nii_gz/7299.nii.gz\"}, {\"image\": \"nii_gz/7295.nii.gz\"}, {\"image\": \"nii_gz/7293.nii.gz\"}, {\"image\": \"nii_gz/7285.nii.gz\"}, {\"image\": \"nii_gz/7279.nii.gz\"}, {\"image\": \"nii_gz/7273.nii.gz\"}, {\"image\": \"nii_gz/7269.nii.gz\"}, {\"image\": \"nii_gz/7260.nii.gz\"}, {\"image\": \"nii_gz/7257.nii.gz\"}, {\"image\": \"nii_gz/7255.nii.gz\"}, {\"image\": \"nii_gz/724.nii.gz\"}, {\"image\": \"nii_gz/7238.nii.gz\"}, {\"image\": \"nii_gz/7221.nii.gz\"}, {\"image\": \"nii_gz/7218.nii.gz\"}, {\"image\": \"nii_gz/7217.nii.gz\"}, {\"image\": \"nii_gz/7212.nii.gz\"}, {\"image\": \"nii_gz/7198.nii.gz\"}, {\"image\": \"nii_gz/7197.nii.gz\"}, {\"image\": \"nii_gz/7194.nii.gz\"}, {\"image\": \"nii_gz/7192.nii.gz\"}, {\"image\": \"nii_gz/7187.nii.gz\"}, {\"image\": \"nii_gz/7178.nii.gz\"}, {\"image\": \"nii_gz/7174.nii.gz\"}, {\"image\": \"nii_gz/7169.nii.gz\"}, {\"image\": \"nii_gz/7165.nii.gz\"}, {\"image\": \"nii_gz/7160.nii.gz\"}, {\"image\": \"nii_gz/7158.nii.gz\"}, {\"image\": \"nii_gz/7141.nii.gz\"}, {\"image\": \"nii_gz/7136.nii.gz\"}, {\"image\": \"nii_gz/7135.nii.gz\"}, {\"image\": \"nii_gz/7121.nii.gz\"}, {\"image\": \"nii_gz/7117.nii.gz\"}, {\"image\": \"nii_gz/7103.nii.gz\"}, {\"image\": \"nii_gz/7090.nii.gz\"}, {\"image\": \"nii_gz/7086.nii.gz\"}, {\"image\": \"nii_gz/7081.nii.gz\"}, {\"image\": \"nii_gz/7077.nii.gz\"}, {\"image\": \"nii_gz/7068.nii.gz\"}, {\"image\": \"nii_gz/7066.nii.gz\"}, {\"image\": \"nii_gz/7065.nii.gz\"}, {\"image\": \"nii_gz/7060.nii.gz\"}, {\"image\": \"nii_gz/7053.nii.gz\"}, {\"image\": \"nii_gz/705.nii.gz\"}, {\"image\": \"nii_gz/7041.nii.gz\"}, {\"image\": \"nii_gz/7035.nii.gz\"}, {\"image\": \"nii_gz/7026.nii.gz\"}, {\"image\": \"nii_gz/7024.nii.gz\"}, {\"image\": \"nii_gz/7022.nii.gz\"}, {\"image\": \"nii_gz/7017.nii.gz\"}, {\"image\": \"nii_gz/7012.nii.gz\"}, {\"image\": \"nii_gz/701.nii.gz\"}, {\"image\": \"nii_gz/7006.nii.gz\"}, {\"image\": \"nii_gz/7004.nii.gz\"}, {\"image\": \"nii_gz/7002.nii.gz\"}, {\"image\": \"nii_gz/70.nii.gz\"}, {\"image\": \"nii_gz/6999.nii.gz\"}, {\"image\": \"nii_gz/6990.nii.gz\"}, {\"image\": \"nii_gz/6986.nii.gz\"}, {\"image\": \"nii_gz/6981.nii.gz\"}, {\"image\": \"nii_gz/6979.nii.gz\"}, {\"image\": \"nii_gz/6966.nii.gz\"}, {\"image\": \"nii_gz/6965.nii.gz\"}, {\"image\": \"nii_gz/6964.nii.gz\"}, {\"image\": \"nii_gz/6959.nii.gz\"}, {\"image\": \"nii_gz/6958.nii.gz\"}, {\"image\": \"nii_gz/695.nii.gz\"}, {\"image\": \"nii_gz/6947.nii.gz\"}, {\"image\": \"nii_gz/6941.nii.gz\"}, {\"image\": \"nii_gz/6930.nii.gz\"}, {\"image\": \"nii_gz/693.nii.gz\"}, {\"image\": \"nii_gz/6923.nii.gz\"}, {\"image\": \"nii_gz/692.nii.gz\"}, {\"image\": \"nii_gz/6918.nii.gz\"}, {\"image\": \"nii_gz/6913.nii.gz\"}, {\"image\": \"nii_gz/6909.nii.gz\"}, {\"image\": \"nii_gz/6902.nii.gz\"}, {\"image\": \"nii_gz/6901.nii.gz\"}, {\"image\": \"nii_gz/6899.nii.gz\"}, {\"image\": \"nii_gz/689.nii.gz\"}, {\"image\": \"nii_gz/6889.nii.gz\"}, {\"image\": \"nii_gz/6882.nii.gz\"}, {\"image\": \"nii_gz/6878.nii.gz\"}, {\"image\": \"nii_gz/6871.nii.gz\"}, {\"image\": \"nii_gz/6859.nii.gz\"}, {\"image\": \"nii_gz/6853.nii.gz\"}, {\"image\": \"nii_gz/6851.nii.gz\"}, {\"image\": \"nii_gz/685.nii.gz\"}, {\"image\": \"nii_gz/6844.nii.gz\"}, {\"image\": \"nii_gz/6843.nii.gz\"}, {\"image\": \"nii_gz/6842.nii.gz\"}, {\"image\": \"nii_gz/684.nii.gz\"}, {\"image\": \"nii_gz/6833.nii.gz\"}, {\"image\": \"nii_gz/6814.nii.gz\"}, {\"image\": \"nii_gz/6809.nii.gz\"}, {\"image\": \"nii_gz/6804.nii.gz\"}, {\"image\": \"nii_gz/6802.nii.gz\"}, {\"image\": \"nii_gz/68.nii.gz\"}, {\"image\": \"nii_gz/6792.nii.gz\"}, {\"image\": \"nii_gz/6781.nii.gz\"}, {\"image\": \"nii_gz/6775.nii.gz\"}, {\"image\": \"nii_gz/6774.nii.gz\"}, {\"image\": \"nii_gz/6773.nii.gz\"}, {\"image\": \"nii_gz/6766.nii.gz\"}, {\"image\": \"nii_gz/6765.nii.gz\"}, {\"image\": \"nii_gz/676.nii.gz\"}, {\"image\": \"nii_gz/6756.nii.gz\"}, {\"image\": \"nii_gz/6755.nii.gz\"}, {\"image\": \"nii_gz/6749.nii.gz\"}, {\"image\": \"nii_gz/6745.nii.gz\"}, {\"image\": \"nii_gz/6744.nii.gz\"}, {\"image\": \"nii_gz/674.nii.gz\"}, {\"image\": \"nii_gz/6730.nii.gz\"}, {\"image\": \"nii_gz/6729.nii.gz\"}, {\"image\": \"nii_gz/6728.nii.gz\"}, {\"image\": \"nii_gz/6715.nii.gz\"}, {\"image\": \"nii_gz/6713.nii.gz\"}, {\"image\": \"nii_gz/6706.nii.gz\"}, {\"image\": \"nii_gz/6697.nii.gz\"}, {\"image\": \"nii_gz/6688.nii.gz\"}, {\"image\": \"nii_gz/6684.nii.gz\"}, {\"image\": \"nii_gz/6679.nii.gz\"}, {\"image\": \"nii_gz/6671.nii.gz\"}, {\"image\": \"nii_gz/6670.nii.gz\"}, {\"image\": \"nii_gz/6665.nii.gz\"}, {\"image\": \"nii_gz/6662.nii.gz\"}, {\"image\": \"nii_gz/6660.nii.gz\"}, {\"image\": \"nii_gz/6653.nii.gz\"}, {\"image\": \"nii_gz/6651.nii.gz\"}, {\"image\": \"nii_gz/665.nii.gz\"}, {\"image\": \"nii_gz/6646.nii.gz\"}, {\"image\": \"nii_gz/6636.nii.gz\"}, {\"image\": \"nii_gz/6635.nii.gz\"}, {\"image\": \"nii_gz/6631.nii.gz\"}, {\"image\": \"nii_gz/6624.nii.gz\"}, {\"image\": \"nii_gz/6619.nii.gz\"}, {\"image\": \"nii_gz/6605.nii.gz\"}, {\"image\": \"nii_gz/6601.nii.gz\"}, {\"image\": \"nii_gz/6598.nii.gz\"}, {\"image\": \"nii_gz/6589.nii.gz\"}, {\"image\": \"nii_gz/658.nii.gz\"}, {\"image\": \"nii_gz/6575.nii.gz\"}, {\"image\": \"nii_gz/657.nii.gz\"}, {\"image\": \"nii_gz/6566.nii.gz\"}, {\"image\": \"nii_gz/6554.nii.gz\"}, {\"image\": \"nii_gz/6533.nii.gz\"}, {\"image\": \"nii_gz/6531.nii.gz\"}, {\"image\": \"nii_gz/6517.nii.gz\"}, {\"image\": \"nii_gz/6516.nii.gz\"}, {\"image\": \"nii_gz/6496.nii.gz\"}, {\"image\": \"nii_gz/6484.nii.gz\"}, {\"image\": \"nii_gz/6472.nii.gz\"}, {\"image\": \"nii_gz/6469.nii.gz\"}, {\"image\": \"nii_gz/6466.nii.gz\"}, {\"image\": \"nii_gz/6452.nii.gz\"}, {\"image\": \"nii_gz/6446.nii.gz\"}, {\"image\": \"nii_gz/6444.nii.gz\"}, {\"image\": \"nii_gz/6443.nii.gz\"}, {\"image\": \"nii_gz/6435.nii.gz\"}, {\"image\": \"nii_gz/6433.nii.gz\"}, {\"image\": \"nii_gz/643.nii.gz\"}, {\"image\": \"nii_gz/6429.nii.gz\"}, {\"image\": \"nii_gz/6426.nii.gz\"}, {\"image\": \"nii_gz/6425.nii.gz\"}, {\"image\": \"nii_gz/6419.nii.gz\"}, {\"image\": \"nii_gz/6416.nii.gz\"}, {\"image\": \"nii_gz/6395.nii.gz\"}, {\"image\": \"nii_gz/6387.nii.gz\"}, {\"image\": \"nii_gz/6385.nii.gz\"}, {\"image\": \"nii_gz/638.nii.gz\"}, {\"image\": \"nii_gz/6379.nii.gz\"}, {\"image\": \"nii_gz/6374.nii.gz\"}, {\"image\": \"nii_gz/6366.nii.gz\"}, {\"image\": \"nii_gz/6362.nii.gz\"}, {\"image\": \"nii_gz/636.nii.gz\"}, {\"image\": \"nii_gz/6353.nii.gz\"}, {\"image\": \"nii_gz/6349.nii.gz\"}, {\"image\": \"nii_gz/6348.nii.gz\"}, {\"image\": \"nii_gz/6346.nii.gz\"}, {\"image\": \"nii_gz/6345.nii.gz\"}, {\"image\": \"nii_gz/6344.nii.gz\"}, {\"image\": \"nii_gz/6340.nii.gz\"}, {\"image\": \"nii_gz/634.nii.gz\"}, {\"image\": \"nii_gz/633.nii.gz\"}, {\"image\": \"nii_gz/6329.nii.gz\"}, {\"image\": \"nii_gz/6328.nii.gz\"}, {\"image\": \"nii_gz/6324.nii.gz\"}, {\"image\": \"nii_gz/631.nii.gz\"}, {\"image\": \"nii_gz/6308.nii.gz\"}, {\"image\": \"nii_gz/6306.nii.gz\"}, {\"image\": \"nii_gz/6303.nii.gz\"}, {\"image\": \"nii_gz/6297.nii.gz\"}, {\"image\": \"nii_gz/6293.nii.gz\"}, {\"image\": \"nii_gz/6291.nii.gz\"}, {\"image\": \"nii_gz/6283.nii.gz\"}, {\"image\": \"nii_gz/628.nii.gz\"}, {\"image\": \"nii_gz/6273.nii.gz\"}, {\"image\": \"nii_gz/6272.nii.gz\"}, {\"image\": \"nii_gz/6267.nii.gz\"}, {\"image\": \"nii_gz/6266.nii.gz\"}, {\"image\": \"nii_gz/6257.nii.gz\"}, {\"image\": \"nii_gz/6254.nii.gz\"}, {\"image\": \"nii_gz/6250.nii.gz\"}, {\"image\": \"nii_gz/6236.nii.gz\"}, {\"image\": \"nii_gz/6232.nii.gz\"}, {\"image\": \"nii_gz/623.nii.gz\"}, {\"image\": \"nii_gz/6229.nii.gz\"}, {\"image\": \"nii_gz/6227.nii.gz\"}, {\"image\": \"nii_gz/6213.nii.gz\"}, {\"image\": \"nii_gz/6206.nii.gz\"}, {\"image\": \"nii_gz/6201.nii.gz\"}, {\"image\": \"nii_gz/620.nii.gz\"}, {\"image\": \"nii_gz/6187.nii.gz\"}, {\"image\": \"nii_gz/6185.nii.gz\"}, {\"image\": \"nii_gz/6184.nii.gz\"}, {\"image\": \"nii_gz/6178.nii.gz\"}, {\"image\": \"nii_gz/6167.nii.gz\"}, {\"image\": \"nii_gz/616.nii.gz\"}, {\"image\": \"nii_gz/6159.nii.gz\"}, {\"image\": \"nii_gz/6148.nii.gz\"}, {\"image\": \"nii_gz/6145.nii.gz\"}, {\"image\": \"nii_gz/6143.nii.gz\"}, {\"image\": \"nii_gz/6126.nii.gz\"}, {\"image\": \"nii_gz/6124.nii.gz\"}, {\"image\": \"nii_gz/6112.nii.gz\"}, {\"image\": \"nii_gz/6111.nii.gz\"}, {\"image\": \"nii_gz/6090.nii.gz\"}, {\"image\": \"nii_gz/6084.nii.gz\"}, {\"image\": \"nii_gz/6082.nii.gz\"}, {\"image\": \"nii_gz/6080.nii.gz\"}, {\"image\": \"nii_gz/608.nii.gz\"}, {\"image\": \"nii_gz/6077.nii.gz\"}, {\"image\": \"nii_gz/6071.nii.gz\"}, {\"image\": \"nii_gz/6066.nii.gz\"}, {\"image\": \"nii_gz/6058.nii.gz\"}, {\"image\": \"nii_gz/6050.nii.gz\"}, {\"image\": \"nii_gz/6041.nii.gz\"}, {\"image\": \"nii_gz/6035.nii.gz\"}, {\"image\": \"nii_gz/6033.nii.gz\"}, {\"image\": \"nii_gz/6023.nii.gz\"}, {\"image\": \"nii_gz/6019.nii.gz\"}, {\"image\": \"nii_gz/6016.nii.gz\"}, {\"image\": \"nii_gz/6013.nii.gz\"}, {\"image\": \"nii_gz/6.nii.gz\"}, {\"image\": \"nii_gz/5993.nii.gz\"}, {\"image\": \"nii_gz/5992.nii.gz\"}, {\"image\": \"nii_gz/5991.nii.gz\"}, {\"image\": \"nii_gz/5977.nii.gz\"}, {\"image\": \"nii_gz/5974.nii.gz\"}, {\"image\": \"nii_gz/597.nii.gz\"}, {\"image\": \"nii_gz/5959.nii.gz\"}, {\"image\": \"nii_gz/5958.nii.gz\"}, {\"image\": \"nii_gz/5955.nii.gz\"}, {\"image\": \"nii_gz/5954.nii.gz\"}, {\"image\": \"nii_gz/5947.nii.gz\"}, {\"image\": \"nii_gz/5945.nii.gz\"}, {\"image\": \"nii_gz/5944.nii.gz\"}, {\"image\": \"nii_gz/5927.nii.gz\"}, {\"image\": \"nii_gz/5924.nii.gz\"}, {\"image\": \"nii_gz/5918.nii.gz\"}, {\"image\": \"nii_gz/5910.nii.gz\"}, {\"image\": \"nii_gz/589.nii.gz\"}, {\"image\": \"nii_gz/5888.nii.gz\"}, {\"image\": \"nii_gz/588.nii.gz\"}, {\"image\": \"nii_gz/5874.nii.gz\"}, {\"image\": \"nii_gz/5870.nii.gz\"}, {\"image\": \"nii_gz/587.nii.gz\"}, {\"image\": \"nii_gz/5867.nii.gz\"}, {\"image\": \"nii_gz/5862.nii.gz\"}, {\"image\": \"nii_gz/5860.nii.gz\"}, {\"image\": \"nii_gz/5848.nii.gz\"}, {\"image\": \"nii_gz/5841.nii.gz\"}, {\"image\": \"nii_gz/5836.nii.gz\"}, {\"image\": \"nii_gz/5822.nii.gz\"}, {\"image\": \"nii_gz/5820.nii.gz\"}, {\"image\": \"nii_gz/582.nii.gz\"}, {\"image\": \"nii_gz/5818.nii.gz\"}, {\"image\": \"nii_gz/5812.nii.gz\"}, {\"image\": \"nii_gz/5808.nii.gz\"}, {\"image\": \"nii_gz/5806.nii.gz\"}, {\"image\": \"nii_gz/5804.nii.gz\"}, {\"image\": \"nii_gz/5793.nii.gz\"}, {\"image\": \"nii_gz/5791.nii.gz\"}, {\"image\": \"nii_gz/5786.nii.gz\"}, {\"image\": \"nii_gz/5785.nii.gz\"}, {\"image\": \"nii_gz/5783.nii.gz\"}, {\"image\": \"nii_gz/5779.nii.gz\"}, {\"image\": \"nii_gz/5765.nii.gz\"}, {\"image\": \"nii_gz/5762.nii.gz\"}, {\"image\": \"nii_gz/576.nii.gz\"}, {\"image\": \"nii_gz/5759.nii.gz\"}, {\"image\": \"nii_gz/5755.nii.gz\"}, {\"image\": \"nii_gz/5751.nii.gz\"}, {\"image\": \"nii_gz/5746.nii.gz\"}, {\"image\": \"nii_gz/5745.nii.gz\"}, {\"image\": \"nii_gz/5739.nii.gz\"}, {\"image\": \"nii_gz/5734.nii.gz\"}, {\"image\": \"nii_gz/5731.nii.gz\"}, {\"image\": \"nii_gz/5725.nii.gz\"}, {\"image\": \"nii_gz/5724.nii.gz\"}, {\"image\": \"nii_gz/5716.nii.gz\"}, {\"image\": \"nii_gz/5709.nii.gz\"}, {\"image\": \"nii_gz/5689.nii.gz\"}, {\"image\": \"nii_gz/568.nii.gz\"}, {\"image\": \"nii_gz/5676.nii.gz\"}, {\"image\": \"nii_gz/5672.nii.gz\"}, {\"image\": \"nii_gz/5670.nii.gz\"}, {\"image\": \"nii_gz/5665.nii.gz\"}, {\"image\": \"nii_gz/5662.nii.gz\"}, {\"image\": \"nii_gz/5658.nii.gz\"}, {\"image\": \"nii_gz/5650.nii.gz\"}, {\"image\": \"nii_gz/565.nii.gz\"}, {\"image\": \"nii_gz/5648.nii.gz\"}, {\"image\": \"nii_gz/5647.nii.gz\"}, {\"image\": \"nii_gz/5640.nii.gz\"}, {\"image\": \"nii_gz/564.nii.gz\"}, {\"image\": \"nii_gz/5631.nii.gz\"}, {\"image\": \"nii_gz/5630.nii.gz\"}, {\"image\": \"nii_gz/563.nii.gz\"}, {\"image\": \"nii_gz/5628.nii.gz\"}, {\"image\": \"nii_gz/5623.nii.gz\"}, {\"image\": \"nii_gz/5617.nii.gz\"}, {\"image\": \"nii_gz/5613.nii.gz\"}, {\"image\": \"nii_gz/5612.nii.gz\"}, {\"image\": \"nii_gz/5611.nii.gz\"}, {\"image\": \"nii_gz/5610.nii.gz\"}, {\"image\": \"nii_gz/5609.nii.gz\"}, {\"image\": \"nii_gz/5601.nii.gz\"}, {\"image\": \"nii_gz/5600.nii.gz\"}, {\"image\": \"nii_gz/560.nii.gz\"}, {\"image\": \"nii_gz/5598.nii.gz\"}, {\"image\": \"nii_gz/5595.nii.gz\"}, {\"image\": \"nii_gz/5591.nii.gz\"}, {\"image\": \"nii_gz/5579.nii.gz\"}, {\"image\": \"nii_gz/557.nii.gz\"}, {\"image\": \"nii_gz/5569.nii.gz\"}, {\"image\": \"nii_gz/5564.nii.gz\"}, {\"image\": \"nii_gz/5556.nii.gz\"}, {\"image\": \"nii_gz/555.nii.gz\"}, {\"image\": \"nii_gz/5542.nii.gz\"}, {\"image\": \"nii_gz/5541.nii.gz\"}, {\"image\": \"nii_gz/5537.nii.gz\"}, {\"image\": \"nii_gz/5535.nii.gz\"}, {\"image\": \"nii_gz/5513.nii.gz\"}, {\"image\": \"nii_gz/5511.nii.gz\"}, {\"image\": \"nii_gz/5509.nii.gz\"}, {\"image\": \"nii_gz/5507.nii.gz\"}, {\"image\": \"nii_gz/5503.nii.gz\"}, {\"image\": \"nii_gz/550.nii.gz\"}, {\"image\": \"nii_gz/55.nii.gz\"}, {\"image\": \"nii_gz/5496.nii.gz\"}, {\"image\": \"nii_gz/5491.nii.gz\"}, {\"image\": \"nii_gz/5490.nii.gz\"}, {\"image\": \"nii_gz/5486.nii.gz\"}, {\"image\": \"nii_gz/5480.nii.gz\"}, {\"image\": \"nii_gz/5478.nii.gz\"}, {\"image\": \"nii_gz/547.nii.gz\"}, {\"image\": \"nii_gz/5459.nii.gz\"}, {\"image\": \"nii_gz/5457.nii.gz\"}, {\"image\": \"nii_gz/5455.nii.gz\"}, {\"image\": \"nii_gz/5454.nii.gz\"}, {\"image\": \"nii_gz/5450.nii.gz\"}, {\"image\": \"nii_gz/5449.nii.gz\"}, {\"image\": \"nii_gz/5445.nii.gz\"}, {\"image\": \"nii_gz/5440.nii.gz\"}, {\"image\": \"nii_gz/544.nii.gz\"}, {\"image\": \"nii_gz/5436.nii.gz\"}, {\"image\": \"nii_gz/5434.nii.gz\"}, {\"image\": \"nii_gz/5433.nii.gz\"}, {\"image\": \"nii_gz/5432.nii.gz\"}, {\"image\": \"nii_gz/5431.nii.gz\"}, {\"image\": \"nii_gz/543.nii.gz\"}, {\"image\": \"nii_gz/5427.nii.gz\"}, {\"image\": \"nii_gz/5421.nii.gz\"}, {\"image\": \"nii_gz/5419.nii.gz\"}, {\"image\": \"nii_gz/5409.nii.gz\"}, {\"image\": \"nii_gz/5407.nii.gz\"}, {\"image\": \"nii_gz/5403.nii.gz\"}, {\"image\": \"nii_gz/54.nii.gz\"}, {\"image\": \"nii_gz/5397.nii.gz\"}, {\"image\": \"nii_gz/5390.nii.gz\"}, {\"image\": \"nii_gz/5385.nii.gz\"}, {\"image\": \"nii_gz/538.nii.gz\"}, {\"image\": \"nii_gz/5370.nii.gz\"}, {\"image\": \"nii_gz/5369.nii.gz\"}, {\"image\": \"nii_gz/5366.nii.gz\"}, {\"image\": \"nii_gz/5362.nii.gz\"}, {\"image\": \"nii_gz/5359.nii.gz\"}, {\"image\": \"nii_gz/5353.nii.gz\"}, {\"image\": \"nii_gz/5351.nii.gz\"}, {\"image\": \"nii_gz/5350.nii.gz\"}, {\"image\": \"nii_gz/5347.nii.gz\"}, {\"image\": \"nii_gz/5321.nii.gz\"}, {\"image\": \"nii_gz/5314.nii.gz\"}, {\"image\": \"nii_gz/5311.nii.gz\"}, {\"image\": \"nii_gz/5301.nii.gz\"}, {\"image\": \"nii_gz/5280.nii.gz\"}, {\"image\": \"nii_gz/5279.nii.gz\"}, {\"image\": \"nii_gz/5274.nii.gz\"}, {\"image\": \"nii_gz/527.nii.gz\"}, {\"image\": \"nii_gz/5260.nii.gz\"}, {\"image\": \"nii_gz/5256.nii.gz\"}, {\"image\": \"nii_gz/5255.nii.gz\"}, {\"image\": \"nii_gz/5252.nii.gz\"}, {\"image\": \"nii_gz/5251.nii.gz\"}, {\"image\": \"nii_gz/5250.nii.gz\"}, {\"image\": \"nii_gz/5239.nii.gz\"}, {\"image\": \"nii_gz/5236.nii.gz\"}, {\"image\": \"nii_gz/5234.nii.gz\"}, {\"image\": \"nii_gz/5229.nii.gz\"}, {\"image\": \"nii_gz/5220.nii.gz\"}, {\"image\": \"nii_gz/522.nii.gz\"}, {\"image\": \"nii_gz/5204.nii.gz\"}, {\"image\": \"nii_gz/52.nii.gz\"}, {\"image\": \"nii_gz/5198.nii.gz\"}, {\"image\": \"nii_gz/5192.nii.gz\"}, {\"image\": \"nii_gz/5188.nii.gz\"}, {\"image\": \"nii_gz/5187.nii.gz\"}, {\"image\": \"nii_gz/5183.nii.gz\"}, {\"image\": \"nii_gz/5174.nii.gz\"}, {\"image\": \"nii_gz/5171.nii.gz\"}, {\"image\": \"nii_gz/5169.nii.gz\"}, {\"image\": \"nii_gz/5162.nii.gz\"}, {\"image\": \"nii_gz/5160.nii.gz\"}, {\"image\": \"nii_gz/5158.nii.gz\"}, {\"image\": \"nii_gz/5157.nii.gz\"}, {\"image\": \"nii_gz/5151.nii.gz\"}, {\"image\": \"nii_gz/5147.nii.gz\"}, {\"image\": \"nii_gz/5146.nii.gz\"}, {\"image\": \"nii_gz/5144.nii.gz\"}, {\"image\": \"nii_gz/5142.nii.gz\"}, {\"image\": \"nii_gz/5128.nii.gz\"}, {\"image\": \"nii_gz/5118.nii.gz\"}, {\"image\": \"nii_gz/5114.nii.gz\"}, {\"image\": \"nii_gz/5108.nii.gz\"}, {\"image\": \"nii_gz/5101.nii.gz\"}, {\"image\": \"nii_gz/510.nii.gz\"}, {\"image\": \"nii_gz/51.nii.gz\"}, {\"image\": \"nii_gz/5090.nii.gz\"}, {\"image\": \"nii_gz/5087.nii.gz\"}, {\"image\": \"nii_gz/5084.nii.gz\"}, {\"image\": \"nii_gz/5072.nii.gz\"}, {\"image\": \"nii_gz/5071.nii.gz\"}, {\"image\": \"nii_gz/5067.nii.gz\"}, {\"image\": \"nii_gz/5066.nii.gz\"}, {\"image\": \"nii_gz/5063.nii.gz\"}, {\"image\": \"nii_gz/5062.nii.gz\"}, {\"image\": \"nii_gz/5053.nii.gz\"}, {\"image\": \"nii_gz/504.nii.gz\"}, {\"image\": \"nii_gz/5039.nii.gz\"}, {\"image\": \"nii_gz/5034.nii.gz\"}, {\"image\": \"nii_gz/5015.nii.gz\"}, {\"image\": \"nii_gz/5004.nii.gz\"}, {\"image\": \"nii_gz/5002.nii.gz\"}, {\"image\": \"nii_gz/4988.nii.gz\"}, {\"image\": \"nii_gz/4985.nii.gz\"}, {\"image\": \"nii_gz/4981.nii.gz\"}, {\"image\": \"nii_gz/4976.nii.gz\"}, {\"image\": \"nii_gz/4963.nii.gz\"}, {\"image\": \"nii_gz/4951.nii.gz\"}, {\"image\": \"nii_gz/4940.nii.gz\"}, {\"image\": \"nii_gz/4938.nii.gz\"}, {\"image\": \"nii_gz/4934.nii.gz\"}, {\"image\": \"nii_gz/4926.nii.gz\"}, {\"image\": \"nii_gz/492.nii.gz\"}, {\"image\": \"nii_gz/4918.nii.gz\"}, {\"image\": \"nii_gz/4914.nii.gz\"}, {\"image\": \"nii_gz/4913.nii.gz\"}, {\"image\": \"nii_gz/4910.nii.gz\"}, {\"image\": \"nii_gz/4909.nii.gz\"}, {\"image\": \"nii_gz/4907.nii.gz\"}, {\"image\": \"nii_gz/4905.nii.gz\"}, {\"image\": \"nii_gz/4903.nii.gz\"}, {\"image\": \"nii_gz/4899.nii.gz\"}, {\"image\": \"nii_gz/4894.nii.gz\"}, {\"image\": \"nii_gz/4893.nii.gz\"}, {\"image\": \"nii_gz/489.nii.gz\"}, {\"image\": \"nii_gz/4888.nii.gz\"}, {\"image\": \"nii_gz/4885.nii.gz\"}, {\"image\": \"nii_gz/4880.nii.gz\"}, {\"image\": \"nii_gz/4873.nii.gz\"}, {\"image\": \"nii_gz/4871.nii.gz\"}, {\"image\": \"nii_gz/4864.nii.gz\"}, {\"image\": \"nii_gz/4858.nii.gz\"}, {\"image\": \"nii_gz/4857.nii.gz\"}, {\"image\": \"nii_gz/4852.nii.gz\"}, {\"image\": \"nii_gz/4841.nii.gz\"}, {\"image\": \"nii_gz/4839.nii.gz\"}, {\"image\": \"nii_gz/4836.nii.gz\"}, {\"image\": \"nii_gz/4831.nii.gz\"}, {\"image\": \"nii_gz/4821.nii.gz\"}, {\"image\": \"nii_gz/482.nii.gz\"}, {\"image\": \"nii_gz/4812.nii.gz\"}, {\"image\": \"nii_gz/4811.nii.gz\"}, {\"image\": \"nii_gz/4807.nii.gz\"}, {\"image\": \"nii_gz/4794.nii.gz\"}, {\"image\": \"nii_gz/4789.nii.gz\"}, {\"image\": \"nii_gz/4788.nii.gz\"}, {\"image\": \"nii_gz/4779.nii.gz\"}, {\"image\": \"nii_gz/4776.nii.gz\"}, {\"image\": \"nii_gz/4772.nii.gz\"}, {\"image\": \"nii_gz/4770.nii.gz\"}, {\"image\": \"nii_gz/4767.nii.gz\"}, {\"image\": \"nii_gz/4765.nii.gz\"}, {\"image\": \"nii_gz/4760.nii.gz\"}, {\"image\": \"nii_gz/4758.nii.gz\"}, {\"image\": \"nii_gz/4756.nii.gz\"}, {\"image\": \"nii_gz/4754.nii.gz\"}, {\"image\": \"nii_gz/4749.nii.gz\"}, {\"image\": \"nii_gz/4747.nii.gz\"}, {\"image\": \"nii_gz/4743.nii.gz\"}, {\"image\": \"nii_gz/4741.nii.gz\"}, {\"image\": \"nii_gz/471.nii.gz\"}, {\"image\": \"nii_gz/4700.nii.gz\"}, {\"image\": \"nii_gz/4695.nii.gz\"}, {\"image\": \"nii_gz/4686.nii.gz\"}, {\"image\": \"nii_gz/4676.nii.gz\"}, {\"image\": \"nii_gz/4669.nii.gz\"}, {\"image\": \"nii_gz/4667.nii.gz\"}, {\"image\": \"nii_gz/4664.nii.gz\"}, {\"image\": \"nii_gz/4652.nii.gz\"}, {\"image\": \"nii_gz/4651.nii.gz\"}, {\"image\": \"nii_gz/4645.nii.gz\"}, {\"image\": \"nii_gz/4630.nii.gz\"}, {\"image\": \"nii_gz/4629.nii.gz\"}, {\"image\": \"nii_gz/4625.nii.gz\"}, {\"image\": \"nii_gz/4617.nii.gz\"}, {\"image\": \"nii_gz/4608.nii.gz\"}, {\"image\": \"nii_gz/4606.nii.gz\"}, {\"image\": \"nii_gz/4602.nii.gz\"}, {\"image\": \"nii_gz/4601.nii.gz\"}, {\"image\": \"nii_gz/4599.nii.gz\"}, {\"image\": \"nii_gz/4594.nii.gz\"}, {\"image\": \"nii_gz/4593.nii.gz\"}, {\"image\": \"nii_gz/459.nii.gz\"}, {\"image\": \"nii_gz/4586.nii.gz\"}, {\"image\": \"nii_gz/4585.nii.gz\"}, {\"image\": \"nii_gz/458.nii.gz\"}, {\"image\": \"nii_gz/4579.nii.gz\"}, {\"image\": \"nii_gz/4577.nii.gz\"}, {\"image\": \"nii_gz/457.nii.gz\"}, {\"image\": \"nii_gz/4563.nii.gz\"}, {\"image\": \"nii_gz/4556.nii.gz\"}, {\"image\": \"nii_gz/4551.nii.gz\"}, {\"image\": \"nii_gz/4545.nii.gz\"}, {\"image\": \"nii_gz/4539.nii.gz\"}, {\"image\": \"nii_gz/4516.nii.gz\"}, {\"image\": \"nii_gz/4515.nii.gz\"}, {\"image\": \"nii_gz/4514.nii.gz\"}, {\"image\": \"nii_gz/4513.nii.gz\"}, {\"image\": \"nii_gz/4509.nii.gz\"}, {\"image\": \"nii_gz/4501.nii.gz\"}, {\"image\": \"nii_gz/45.nii.gz\"}, {\"image\": \"nii_gz/4493.nii.gz\"}, {\"image\": \"nii_gz/4461.nii.gz\"}, {\"image\": \"nii_gz/4460.nii.gz\"}, {\"image\": \"nii_gz/4454.nii.gz\"}, {\"image\": \"nii_gz/4453.nii.gz\"}, {\"image\": \"nii_gz/4445.nii.gz\"}, {\"image\": \"nii_gz/4437.nii.gz\"}, {\"image\": \"nii_gz/4432.nii.gz\"}, {\"image\": \"nii_gz/443.nii.gz\"}, {\"image\": \"nii_gz/4428.nii.gz\"}, {\"image\": \"nii_gz/4427.nii.gz\"}, {\"image\": \"nii_gz/4425.nii.gz\"}, {\"image\": \"nii_gz/442.nii.gz\"}, {\"image\": \"nii_gz/4410.nii.gz\"}, {\"image\": \"nii_gz/441.nii.gz\"}, {\"image\": \"nii_gz/4405.nii.gz\"}, {\"image\": \"nii_gz/44.nii.gz\"}, {\"image\": \"nii_gz/4398.nii.gz\"}, {\"image\": \"nii_gz/4393.nii.gz\"}, {\"image\": \"nii_gz/4391.nii.gz\"}, {\"image\": \"nii_gz/4386.nii.gz\"}, {\"image\": \"nii_gz/4385.nii.gz\"}, {\"image\": \"nii_gz/4382.nii.gz\"}, {\"image\": \"nii_gz/438.nii.gz\"}, {\"image\": \"nii_gz/4369.nii.gz\"}, {\"image\": \"nii_gz/4367.nii.gz\"}, {\"image\": \"nii_gz/4353.nii.gz\"}, {\"image\": \"nii_gz/4351.nii.gz\"}, {\"image\": \"nii_gz/4339.nii.gz\"}, {\"image\": \"nii_gz/4333.nii.gz\"}, {\"image\": \"nii_gz/432.nii.gz\"}, {\"image\": \"nii_gz/4319.nii.gz\"}, {\"image\": \"nii_gz/4318.nii.gz\"}, {\"image\": \"nii_gz/4304.nii.gz\"}, {\"image\": \"nii_gz/4299.nii.gz\"}, {\"image\": \"nii_gz/4298.nii.gz\"}, {\"image\": \"nii_gz/4295.nii.gz\"}, {\"image\": \"nii_gz/4288.nii.gz\"}, {\"image\": \"nii_gz/4280.nii.gz\"}, {\"image\": \"nii_gz/4278.nii.gz\"}, {\"image\": \"nii_gz/4276.nii.gz\"}, {\"image\": \"nii_gz/4274.nii.gz\"}, {\"image\": \"nii_gz/4272.nii.gz\"}, {\"image\": \"nii_gz/4269.nii.gz\"}, {\"image\": \"nii_gz/4268.nii.gz\"}, {\"image\": \"nii_gz/4266.nii.gz\"}, {\"image\": \"nii_gz/4263.nii.gz\"}, {\"image\": \"nii_gz/4262.nii.gz\"}, {\"image\": \"nii_gz/4254.nii.gz\"}, {\"image\": \"nii_gz/425.nii.gz\"}, {\"image\": \"nii_gz/4247.nii.gz\"}, {\"image\": \"nii_gz/424.nii.gz\"}, {\"image\": \"nii_gz/4235.nii.gz\"}, {\"image\": \"nii_gz/4225.nii.gz\"}, {\"image\": \"nii_gz/4219.nii.gz\"}, {\"image\": \"nii_gz/4215.nii.gz\"}, {\"image\": \"nii_gz/4214.nii.gz\"}, {\"image\": \"nii_gz/4213.nii.gz\"}, {\"image\": \"nii_gz/4211.nii.gz\"}, {\"image\": \"nii_gz/4209.nii.gz\"}, {\"image\": \"nii_gz/4204.nii.gz\"}, {\"image\": \"nii_gz/4203.nii.gz\"}, {\"image\": \"nii_gz/4200.nii.gz\"}, {\"image\": \"nii_gz/4199.nii.gz\"}, {\"image\": \"nii_gz/4192.nii.gz\"}, {\"image\": \"nii_gz/4189.nii.gz\"}, {\"image\": \"nii_gz/4182.nii.gz\"}, {\"image\": \"nii_gz/418.nii.gz\"}, {\"image\": \"nii_gz/4173.nii.gz\"}, {\"image\": \"nii_gz/4168.nii.gz\"}, {\"image\": \"nii_gz/4155.nii.gz\"}, {\"image\": \"nii_gz/4152.nii.gz\"}, {\"image\": \"nii_gz/4145.nii.gz\"}, {\"image\": \"nii_gz/4142.nii.gz\"}, {\"image\": \"nii_gz/414.nii.gz\"}, {\"image\": \"nii_gz/4137.nii.gz\"}, {\"image\": \"nii_gz/4134.nii.gz\"}, {\"image\": \"nii_gz/4133.nii.gz\"}, {\"image\": \"nii_gz/412.nii.gz\"}, {\"image\": \"nii_gz/4111.nii.gz\"}, {\"image\": \"nii_gz/4106.nii.gz\"}, {\"image\": \"nii_gz/4103.nii.gz\"}, {\"image\": \"nii_gz/4099.nii.gz\"}, {\"image\": \"nii_gz/4094.nii.gz\"}, {\"image\": \"nii_gz/4088.nii.gz\"}, {\"image\": \"nii_gz/408.nii.gz\"}, {\"image\": \"nii_gz/4079.nii.gz\"}, {\"image\": \"nii_gz/4059.nii.gz\"}, {\"image\": \"nii_gz/4058.nii.gz\"}, {\"image\": \"nii_gz/4057.nii.gz\"}, {\"image\": \"nii_gz/4055.nii.gz\"}, {\"image\": \"nii_gz/4054.nii.gz\"}, {\"image\": \"nii_gz/4050.nii.gz\"}, {\"image\": \"nii_gz/4049.nii.gz\"}, {\"image\": \"nii_gz/4042.nii.gz\"}, {\"image\": \"nii_gz/4040.nii.gz\"}, {\"image\": \"nii_gz/4039.nii.gz\"}, {\"image\": \"nii_gz/4038.nii.gz\"}, {\"image\": \"nii_gz/4036.nii.gz\"}, {\"image\": \"nii_gz/4034.nii.gz\"}, {\"image\": \"nii_gz/403.nii.gz\"}, {\"image\": \"nii_gz/4026.nii.gz\"}, {\"image\": \"nii_gz/4023.nii.gz\"}, {\"image\": \"nii_gz/4022.nii.gz\"}, {\"image\": \"nii_gz/402.nii.gz\"}, {\"image\": \"nii_gz/4015.nii.gz\"}, {\"image\": \"nii_gz/4009.nii.gz\"}, {\"image\": \"nii_gz/4008.nii.gz\"}, {\"image\": \"nii_gz/4005.nii.gz\"}, {\"image\": \"nii_gz/3999.nii.gz\"}, {\"image\": \"nii_gz/3997.nii.gz\"}, {\"image\": \"nii_gz/3993.nii.gz\"}, {\"image\": \"nii_gz/3992.nii.gz\"}, {\"image\": \"nii_gz/3983.nii.gz\"}, {\"image\": \"nii_gz/3978.nii.gz\"}, {\"image\": \"nii_gz/3971.nii.gz\"}, {\"image\": \"nii_gz/3970.nii.gz\"}, {\"image\": \"nii_gz/394.nii.gz\"}, {\"image\": \"nii_gz/3925.nii.gz\"}, {\"image\": \"nii_gz/3920.nii.gz\"}, {\"image\": \"nii_gz/3919.nii.gz\"}, {\"image\": \"nii_gz/3918.nii.gz\"}, {\"image\": \"nii_gz/3917.nii.gz\"}, {\"image\": \"nii_gz/3890.nii.gz\"}, {\"image\": \"nii_gz/3884.nii.gz\"}, {\"image\": \"nii_gz/3876.nii.gz\"}, {\"image\": \"nii_gz/3871.nii.gz\"}, {\"image\": \"nii_gz/3866.nii.gz\"}, {\"image\": \"nii_gz/3862.nii.gz\"}, {\"image\": \"nii_gz/3858.nii.gz\"}, {\"image\": \"nii_gz/3842.nii.gz\"}, {\"image\": \"nii_gz/3841.nii.gz\"}, {\"image\": \"nii_gz/3835.nii.gz\"}, {\"image\": \"nii_gz/3826.nii.gz\"}, {\"image\": \"nii_gz/3819.nii.gz\"}, {\"image\": \"nii_gz/3817.nii.gz\"}, {\"image\": \"nii_gz/3816.nii.gz\"}, {\"image\": \"nii_gz/3810.nii.gz\"}, {\"image\": \"nii_gz/3801.nii.gz\"}, {\"image\": \"nii_gz/3790.nii.gz\"}, {\"image\": \"nii_gz/3788.nii.gz\"}, {\"image\": \"nii_gz/3775.nii.gz\"}, {\"image\": \"nii_gz/3772.nii.gz\"}, {\"image\": \"nii_gz/3768.nii.gz\"}, {\"image\": \"nii_gz/3765.nii.gz\"}, {\"image\": \"nii_gz/3759.nii.gz\"}, {\"image\": \"nii_gz/3758.nii.gz\"}, {\"image\": \"nii_gz/3752.nii.gz\"}, {\"image\": \"nii_gz/3751.nii.gz\"}, {\"image\": \"nii_gz/375.nii.gz\"}, {\"image\": \"nii_gz/3742.nii.gz\"}, {\"image\": \"nii_gz/374.nii.gz\"}, {\"image\": \"nii_gz/3734.nii.gz\"}, {\"image\": \"nii_gz/3732.nii.gz\"}, {\"image\": \"nii_gz/3729.nii.gz\"}, {\"image\": \"nii_gz/3728.nii.gz\"}, {\"image\": \"nii_gz/3727.nii.gz\"}, {\"image\": \"nii_gz/3719.nii.gz\"}, {\"image\": \"nii_gz/3717.nii.gz\"}, {\"image\": \"nii_gz/3705.nii.gz\"}, {\"image\": \"nii_gz/3704.nii.gz\"}, {\"image\": \"nii_gz/3702.nii.gz\"}, {\"image\": \"nii_gz/3699.nii.gz\"}, {\"image\": \"nii_gz/369.nii.gz\"}, {\"image\": \"nii_gz/3686.nii.gz\"}, {\"image\": \"nii_gz/3685.nii.gz\"}, {\"image\": \"nii_gz/368.nii.gz\"}, {\"image\": \"nii_gz/3679.nii.gz\"}, {\"image\": \"nii_gz/3677.nii.gz\"}, {\"image\": \"nii_gz/3674.nii.gz\"}, {\"image\": \"nii_gz/3660.nii.gz\"}, {\"image\": \"nii_gz/3649.nii.gz\"}, {\"image\": \"nii_gz/364.nii.gz\"}, {\"image\": \"nii_gz/3637.nii.gz\"}, {\"image\": \"nii_gz/3630.nii.gz\"}, {\"image\": \"nii_gz/3627.nii.gz\"}, {\"image\": \"nii_gz/3622.nii.gz\"}, {\"image\": \"nii_gz/362.nii.gz\"}, {\"image\": \"nii_gz/3617.nii.gz\"}, {\"image\": \"nii_gz/3616.nii.gz\"}, {\"image\": \"nii_gz/3613.nii.gz\"}, {\"image\": \"nii_gz/361.nii.gz\"}, {\"image\": \"nii_gz/3607.nii.gz\"}, {\"image\": \"nii_gz/3595.nii.gz\"}, {\"image\": \"nii_gz/3584.nii.gz\"}, {\"image\": \"nii_gz/3581.nii.gz\"}, {\"image\": \"nii_gz/3579.nii.gz\"}, {\"image\": \"nii_gz/3575.nii.gz\"}, {\"image\": \"nii_gz/3569.nii.gz\"}, {\"image\": \"nii_gz/3565.nii.gz\"}, {\"image\": \"nii_gz/3564.nii.gz\"}, {\"image\": \"nii_gz/3562.nii.gz\"}, {\"image\": \"nii_gz/3560.nii.gz\"}, {\"image\": \"nii_gz/3558.nii.gz\"}, {\"image\": \"nii_gz/3552.nii.gz\"}, {\"image\": \"nii_gz/3547.nii.gz\"}, {\"image\": \"nii_gz/3541.nii.gz\"}, {\"image\": \"nii_gz/3537.nii.gz\"}, {\"image\": \"nii_gz/3533.nii.gz\"}, {\"image\": \"nii_gz/3530.nii.gz\"}, {\"image\": \"nii_gz/3527.nii.gz\"}, {\"image\": \"nii_gz/3526.nii.gz\"}, {\"image\": \"nii_gz/3523.nii.gz\"}, {\"image\": \"nii_gz/352.nii.gz\"}, {\"image\": \"nii_gz/3510.nii.gz\"}, {\"image\": \"nii_gz/3507.nii.gz\"}, {\"image\": \"nii_gz/3490.nii.gz\"}, {\"image\": \"nii_gz/3477.nii.gz\"}, {\"image\": \"nii_gz/3455.nii.gz\"}, {\"image\": \"nii_gz/3450.nii.gz\"}, {\"image\": \"nii_gz/3448.nii.gz\"}, {\"image\": \"nii_gz/3443.nii.gz\"}, {\"image\": \"nii_gz/3441.nii.gz\"}, {\"image\": \"nii_gz/3434.nii.gz\"}, {\"image\": \"nii_gz/343.nii.gz\"}, {\"image\": \"nii_gz/3422.nii.gz\"}, {\"image\": \"nii_gz/3419.nii.gz\"}, {\"image\": \"nii_gz/3418.nii.gz\"}, {\"image\": \"nii_gz/3416.nii.gz\"}, {\"image\": \"nii_gz/340.nii.gz\"}, {\"image\": \"nii_gz/3399.nii.gz\"}, {\"image\": \"nii_gz/3395.nii.gz\"}, {\"image\": \"nii_gz/3386.nii.gz\"}, {\"image\": \"nii_gz/3376.nii.gz\"}, {\"image\": \"nii_gz/3375.nii.gz\"}, {\"image\": \"nii_gz/337.nii.gz\"}, {\"image\": \"nii_gz/3365.nii.gz\"}, {\"image\": \"nii_gz/3361.nii.gz\"}, {\"image\": \"nii_gz/336.nii.gz\"}, {\"image\": \"nii_gz/3327.nii.gz\"}, {\"image\": \"nii_gz/332.nii.gz\"}, {\"image\": \"nii_gz/3319.nii.gz\"}, {\"image\": \"nii_gz/3303.nii.gz\"}, {\"image\": \"nii_gz/3302.nii.gz\"}, {\"image\": \"nii_gz/3301.nii.gz\"}, {\"image\": \"nii_gz/3300.nii.gz\"}, {\"image\": \"nii_gz/3297.nii.gz\"}, {\"image\": \"nii_gz/3285.nii.gz\"}, {\"image\": \"nii_gz/3280.nii.gz\"}, {\"image\": \"nii_gz/3278.nii.gz\"}, {\"image\": \"nii_gz/3275.nii.gz\"}, {\"image\": \"nii_gz/3274.nii.gz\"}, {\"image\": \"nii_gz/3270.nii.gz\"}, {\"image\": \"nii_gz/3269.nii.gz\"}, {\"image\": \"nii_gz/3261.nii.gz\"}, {\"image\": \"nii_gz/3260.nii.gz\"}, {\"image\": \"nii_gz/3257.nii.gz\"}, {\"image\": \"nii_gz/3255.nii.gz\"}, {\"image\": \"nii_gz/3253.nii.gz\"}, {\"image\": \"nii_gz/3251.nii.gz\"}, {\"image\": \"nii_gz/3246.nii.gz\"}, {\"image\": \"nii_gz/3244.nii.gz\"}, {\"image\": \"nii_gz/3240.nii.gz\"}, {\"image\": \"nii_gz/3236.nii.gz\"}, {\"image\": \"nii_gz/3235.nii.gz\"}, {\"image\": \"nii_gz/3234.nii.gz\"}, {\"image\": \"nii_gz/3227.nii.gz\"}, {\"image\": \"nii_gz/3226.nii.gz\"}, {\"image\": \"nii_gz/3223.nii.gz\"}, {\"image\": \"nii_gz/3220.nii.gz\"}, {\"image\": \"nii_gz/3216.nii.gz\"}, {\"image\": \"nii_gz/3215.nii.gz\"}, {\"image\": \"nii_gz/3207.nii.gz\"}, {\"image\": \"nii_gz/3206.nii.gz\"}, {\"image\": \"nii_gz/3201.nii.gz\"}, {\"image\": \"nii_gz/3193.nii.gz\"}, {\"image\": \"nii_gz/3189.nii.gz\"}, {\"image\": \"nii_gz/3188.nii.gz\"}, {\"image\": \"nii_gz/3182.nii.gz\"}, {\"image\": \"nii_gz/3181.nii.gz\"}, {\"image\": \"nii_gz/3177.nii.gz\"}, {\"image\": \"nii_gz/3174.nii.gz\"}, {\"image\": \"nii_gz/3165.nii.gz\"}, {\"image\": \"nii_gz/3163.nii.gz\"}, {\"image\": \"nii_gz/3157.nii.gz\"}, {\"image\": \"nii_gz/3154.nii.gz\"}, {\"image\": \"nii_gz/3152.nii.gz\"}, {\"image\": \"nii_gz/3151.nii.gz\"}, {\"image\": \"nii_gz/3149.nii.gz\"}, {\"image\": \"nii_gz/3141.nii.gz\"}, {\"image\": \"nii_gz/3137.nii.gz\"}, {\"image\": \"nii_gz/3131.nii.gz\"}, {\"image\": \"nii_gz/3130.nii.gz\"}, {\"image\": \"nii_gz/3118.nii.gz\"}, {\"image\": \"nii_gz/3116.nii.gz\"}, {\"image\": \"nii_gz/3113.nii.gz\"}, {\"image\": \"nii_gz/3109.nii.gz\"}, {\"image\": \"nii_gz/3108.nii.gz\"}, {\"image\": \"nii_gz/310.nii.gz\"}, {\"image\": \"nii_gz/31.nii.gz\"}, {\"image\": \"nii_gz/3098.nii.gz\"}, {\"image\": \"nii_gz/3097.nii.gz\"}, {\"image\": \"nii_gz/3095.nii.gz\"}, {\"image\": \"nii_gz/3091.nii.gz\"}, {\"image\": \"nii_gz/3090.nii.gz\"}, {\"image\": \"nii_gz/3084.nii.gz\"}, {\"image\": \"nii_gz/3083.nii.gz\"}, {\"image\": \"nii_gz/3077.nii.gz\"}, {\"image\": \"nii_gz/3069.nii.gz\"}, {\"image\": \"nii_gz/3061.nii.gz\"}, {\"image\": \"nii_gz/3056.nii.gz\"}, {\"image\": \"nii_gz/3053.nii.gz\"}, {\"image\": \"nii_gz/305.nii.gz\"}, {\"image\": \"nii_gz/3046.nii.gz\"}, {\"image\": \"nii_gz/304.nii.gz\"}, {\"image\": \"nii_gz/3038.nii.gz\"}, {\"image\": \"nii_gz/3034.nii.gz\"}, {\"image\": \"nii_gz/3031.nii.gz\"}, {\"image\": \"nii_gz/3028.nii.gz\"}, {\"image\": \"nii_gz/3022.nii.gz\"}, {\"image\": \"nii_gz/3020.nii.gz\"}, {\"image\": \"nii_gz/3011.nii.gz\"}, {\"image\": \"nii_gz/3009.nii.gz\"}, {\"image\": \"nii_gz/300.nii.gz\"}, {\"image\": \"nii_gz/2996.nii.gz\"}, {\"image\": \"nii_gz/2993.nii.gz\"}, {\"image\": \"nii_gz/2992.nii.gz\"}, {\"image\": \"nii_gz/2982.nii.gz\"}, {\"image\": \"nii_gz/2973.nii.gz\"}, {\"image\": \"nii_gz/2972.nii.gz\"}, {\"image\": \"nii_gz/2968.nii.gz\"}, {\"image\": \"nii_gz/2960.nii.gz\"}, {\"image\": \"nii_gz/296.nii.gz\"}, {\"image\": \"nii_gz/2957.nii.gz\"}, {\"image\": \"nii_gz/2948.nii.gz\"}, {\"image\": \"nii_gz/2947.nii.gz\"}, {\"image\": \"nii_gz/2945.nii.gz\"}, {\"image\": \"nii_gz/2944.nii.gz\"}, {\"image\": \"nii_gz/294.nii.gz\"}, {\"image\": \"nii_gz/2938.nii.gz\"}, {\"image\": \"nii_gz/2937.nii.gz\"}, {\"image\": \"nii_gz/2934.nii.gz\"}, {\"image\": \"nii_gz/2932.nii.gz\"}, {\"image\": \"nii_gz/2925.nii.gz\"}, {\"image\": \"nii_gz/2924.nii.gz\"}, {\"image\": \"nii_gz/2922.nii.gz\"}, {\"image\": \"nii_gz/2916.nii.gz\"}, {\"image\": \"nii_gz/2915.nii.gz\"}, {\"image\": \"nii_gz/2910.nii.gz\"}, {\"image\": \"nii_gz/2909.nii.gz\"}, {\"image\": \"nii_gz/2908.nii.gz\"}, {\"image\": \"nii_gz/2907.nii.gz\"}, {\"image\": \"nii_gz/2906.nii.gz\"}, {\"image\": \"nii_gz/290.nii.gz\"}, {\"image\": \"nii_gz/2896.nii.gz\"}, {\"image\": \"nii_gz/2893.nii.gz\"}, {\"image\": \"nii_gz/2888.nii.gz\"}, {\"image\": \"nii_gz/2875.nii.gz\"}, {\"image\": \"nii_gz/2874.nii.gz\"}, {\"image\": \"nii_gz/2866.nii.gz\"}, {\"image\": \"nii_gz/2865.nii.gz\"}, {\"image\": \"nii_gz/2863.nii.gz\"}, {\"image\": \"nii_gz/2855.nii.gz\"}, {\"image\": \"nii_gz/2850.nii.gz\"}, {\"image\": \"nii_gz/2841.nii.gz\"}, {\"image\": \"nii_gz/2840.nii.gz\"}, {\"image\": \"nii_gz/2829.nii.gz\"}, {\"image\": \"nii_gz/282.nii.gz\"}, {\"image\": \"nii_gz/2817.nii.gz\"}, {\"image\": \"nii_gz/2815.nii.gz\"}, {\"image\": \"nii_gz/2802.nii.gz\"}, {\"image\": \"nii_gz/2800.nii.gz\"}, {\"image\": \"nii_gz/2797.nii.gz\"}, {\"image\": \"nii_gz/2793.nii.gz\"}, {\"image\": \"nii_gz/2791.nii.gz\"}, {\"image\": \"nii_gz/2780.nii.gz\"}, {\"image\": \"nii_gz/2777.nii.gz\"}, {\"image\": \"nii_gz/2774.nii.gz\"}, {\"image\": \"nii_gz/2765.nii.gz\"}, {\"image\": \"nii_gz/2761.nii.gz\"}, {\"image\": \"nii_gz/2756.nii.gz\"}, {\"image\": \"nii_gz/2740.nii.gz\"}, {\"image\": \"nii_gz/2735.nii.gz\"}, {\"image\": \"nii_gz/2710.nii.gz\"}, {\"image\": \"nii_gz/271.nii.gz\"}, {\"image\": \"nii_gz/2706.nii.gz\"}, {\"image\": \"nii_gz/2704.nii.gz\"}, {\"image\": \"nii_gz/2701.nii.gz\"}, {\"image\": \"nii_gz/2700.nii.gz\"}, {\"image\": \"nii_gz/27.nii.gz\"}, {\"image\": \"nii_gz/2699.nii.gz\"}, {\"image\": \"nii_gz/2695.nii.gz\"}, {\"image\": \"nii_gz/2685.nii.gz\"}, {\"image\": \"nii_gz/2682.nii.gz\"}, {\"image\": \"nii_gz/268.nii.gz\"}, {\"image\": \"nii_gz/2677.nii.gz\"}, {\"image\": \"nii_gz/2676.nii.gz\"}, {\"image\": \"nii_gz/2674.nii.gz\"}, {\"image\": \"nii_gz/2663.nii.gz\"}, {\"image\": \"nii_gz/2656.nii.gz\"}, {\"image\": \"nii_gz/2651.nii.gz\"}, {\"image\": \"nii_gz/2643.nii.gz\"}, {\"image\": \"nii_gz/2641.nii.gz\"}, {\"image\": \"nii_gz/264.nii.gz\"}, {\"image\": \"nii_gz/2638.nii.gz\"}, {\"image\": \"nii_gz/2632.nii.gz\"}, {\"image\": \"nii_gz/2631.nii.gz\"}, {\"image\": \"nii_gz/263.nii.gz\"}, {\"image\": \"nii_gz/2623.nii.gz\"}, {\"image\": \"nii_gz/2611.nii.gz\"}, {\"image\": \"nii_gz/2610.nii.gz\"}, {\"image\": \"nii_gz/261.nii.gz\"}, {\"image\": \"nii_gz/2607.nii.gz\"}, {\"image\": \"nii_gz/2604.nii.gz\"}, {\"image\": \"nii_gz/2587.nii.gz\"}, {\"image\": \"nii_gz/2575.nii.gz\"}, {\"image\": \"nii_gz/2574.nii.gz\"}, {\"image\": \"nii_gz/2573.nii.gz\"}, {\"image\": \"nii_gz/2570.nii.gz\"}, {\"image\": \"nii_gz/2563.nii.gz\"}, {\"image\": \"nii_gz/2558.nii.gz\"}, {\"image\": \"nii_gz/2550.nii.gz\"}, {\"image\": \"nii_gz/2548.nii.gz\"}, {\"image\": \"nii_gz/254.nii.gz\"}, {\"image\": \"nii_gz/2539.nii.gz\"}, {\"image\": \"nii_gz/2530.nii.gz\"}, {\"image\": \"nii_gz/2529.nii.gz\"}, {\"image\": \"nii_gz/2516.nii.gz\"}, {\"image\": \"nii_gz/2515.nii.gz\"}, {\"image\": \"nii_gz/2514.nii.gz\"}, {\"image\": \"nii_gz/2511.nii.gz\"}, {\"image\": \"nii_gz/2496.nii.gz\"}, {\"image\": \"nii_gz/2488.nii.gz\"}, {\"image\": \"nii_gz/2483.nii.gz\"}, {\"image\": \"nii_gz/2478.nii.gz\"}, {\"image\": \"nii_gz/2477.nii.gz\"}, {\"image\": \"nii_gz/2473.nii.gz\"}, {\"image\": \"nii_gz/2469.nii.gz\"}, {\"image\": \"nii_gz/2465.nii.gz\"}, {\"image\": \"nii_gz/2463.nii.gz\"}, {\"image\": \"nii_gz/2459.nii.gz\"}, {\"image\": \"nii_gz/2452.nii.gz\"}, {\"image\": \"nii_gz/2446.nii.gz\"}, {\"image\": \"nii_gz/2445.nii.gz\"}, {\"image\": \"nii_gz/244.nii.gz\"}, {\"image\": \"nii_gz/2439.nii.gz\"}, {\"image\": \"nii_gz/2438.nii.gz\"}, {\"image\": \"nii_gz/2437.nii.gz\"}, {\"image\": \"nii_gz/2429.nii.gz\"}, {\"image\": \"nii_gz/2425.nii.gz\"}, {\"image\": \"nii_gz/2418.nii.gz\"}, {\"image\": \"nii_gz/2417.nii.gz\"}, {\"image\": \"nii_gz/2413.nii.gz\"}, {\"image\": \"nii_gz/2408.nii.gz\"}, {\"image\": \"nii_gz/2406.nii.gz\"}, {\"image\": \"nii_gz/2405.nii.gz\"}, {\"image\": \"nii_gz/2396.nii.gz\"}, {\"image\": \"nii_gz/2394.nii.gz\"}, {\"image\": \"nii_gz/2393.nii.gz\"}, {\"image\": \"nii_gz/239.nii.gz\"}, {\"image\": \"nii_gz/2384.nii.gz\"}, {\"image\": \"nii_gz/2375.nii.gz\"}, {\"image\": \"nii_gz/2372.nii.gz\"}, {\"image\": \"nii_gz/2371.nii.gz\"}, {\"image\": \"nii_gz/2364.nii.gz\"}, {\"image\": \"nii_gz/2360.nii.gz\"}, {\"image\": \"nii_gz/2358.nii.gz\"}, {\"image\": \"nii_gz/2355.nii.gz\"}, {\"image\": \"nii_gz/2354.nii.gz\"}, {\"image\": \"nii_gz/2353.nii.gz\"}, {\"image\": \"nii_gz/2347.nii.gz\"}, {\"image\": \"nii_gz/234.nii.gz\"}, {\"image\": \"nii_gz/2335.nii.gz\"}, {\"image\": \"nii_gz/2325.nii.gz\"}, {\"image\": \"nii_gz/2318.nii.gz\"}, {\"image\": \"nii_gz/2315.nii.gz\"}, {\"image\": \"nii_gz/2313.nii.gz\"}, {\"image\": \"nii_gz/2311.nii.gz\"}, {\"image\": \"nii_gz/2306.nii.gz\"}, {\"image\": \"nii_gz/2304.nii.gz\"}, {\"image\": \"nii_gz/2303.nii.gz\"}, {\"image\": \"nii_gz/23.nii.gz\"}, {\"image\": \"nii_gz/2290.nii.gz\"}, {\"image\": \"nii_gz/2289.nii.gz\"}, {\"image\": \"nii_gz/2280.nii.gz\"}, {\"image\": \"nii_gz/228.nii.gz\"}, {\"image\": \"nii_gz/2276.nii.gz\"}, {\"image\": \"nii_gz/2274.nii.gz\"}, {\"image\": \"nii_gz/2272.nii.gz\"}, {\"image\": \"nii_gz/227.nii.gz\"}, {\"image\": \"nii_gz/2269.nii.gz\"}, {\"image\": \"nii_gz/2262.nii.gz\"}, {\"image\": \"nii_gz/2259.nii.gz\"}, {\"image\": \"nii_gz/2249.nii.gz\"}, {\"image\": \"nii_gz/2248.nii.gz\"}, {\"image\": \"nii_gz/2235.nii.gz\"}, {\"image\": \"nii_gz/2234.nii.gz\"}, {\"image\": \"nii_gz/2232.nii.gz\"}, {\"image\": \"nii_gz/2229.nii.gz\"}, {\"image\": \"nii_gz/2221.nii.gz\"}, {\"image\": \"nii_gz/222.nii.gz\"}, {\"image\": \"nii_gz/2213.nii.gz\"}, {\"image\": \"nii_gz/2208.nii.gz\"}, {\"image\": \"nii_gz/2205.nii.gz\"}, {\"image\": \"nii_gz/2203.nii.gz\"}, {\"image\": \"nii_gz/2202.nii.gz\"}, {\"image\": \"nii_gz/22.nii.gz\"}, {\"image\": \"nii_gz/2199.nii.gz\"}, {\"image\": \"nii_gz/2187.nii.gz\"}, {\"image\": \"nii_gz/2185.nii.gz\"}, {\"image\": \"nii_gz/2181.nii.gz\"}, {\"image\": \"nii_gz/2178.nii.gz\"}, {\"image\": \"nii_gz/2173.nii.gz\"}, {\"image\": \"nii_gz/217.nii.gz\"}, {\"image\": \"nii_gz/2157.nii.gz\"}, {\"image\": \"nii_gz/215.nii.gz\"}, {\"image\": \"nii_gz/2144.nii.gz\"}, {\"image\": \"nii_gz/214.nii.gz\"}, {\"image\": \"nii_gz/2139.nii.gz\"}, {\"image\": \"nii_gz/2137.nii.gz\"}, {\"image\": \"nii_gz/2133.nii.gz\"}, {\"image\": \"nii_gz/2132.nii.gz\"}, {\"image\": \"nii_gz/2130.nii.gz\"}, {\"image\": \"nii_gz/2124.nii.gz\"}, {\"image\": \"nii_gz/2121.nii.gz\"}, {\"image\": \"nii_gz/2117.nii.gz\"}, {\"image\": \"nii_gz/2110.nii.gz\"}, {\"image\": \"nii_gz/2106.nii.gz\"}, {\"image\": \"nii_gz/210.nii.gz\"}, {\"image\": \"nii_gz/2092.nii.gz\"}, {\"image\": \"nii_gz/209.nii.gz\"}, {\"image\": \"nii_gz/2088.nii.gz\"}, {\"image\": \"nii_gz/2086.nii.gz\"}, {\"image\": \"nii_gz/2079.nii.gz\"}, {\"image\": \"nii_gz/2074.nii.gz\"}, {\"image\": \"nii_gz/2071.nii.gz\"}, {\"image\": \"nii_gz/2066.nii.gz\"}, {\"image\": \"nii_gz/2063.nii.gz\"}, {\"image\": \"nii_gz/2060.nii.gz\"}, {\"image\": \"nii_gz/2054.nii.gz\"}, {\"image\": \"nii_gz/2052.nii.gz\"}, {\"image\": \"nii_gz/2042.nii.gz\"}, {\"image\": \"nii_gz/204.nii.gz\"}, {\"image\": \"nii_gz/2039.nii.gz\"}, {\"image\": \"nii_gz/2037.nii.gz\"}, {\"image\": \"nii_gz/2036.nii.gz\"}, {\"image\": \"nii_gz/2029.nii.gz\"}, {\"image\": \"nii_gz/2017.nii.gz\"}, {\"image\": \"nii_gz/2011.nii.gz\"}, {\"image\": \"nii_gz/2010.nii.gz\"}, {\"image\": \"nii_gz/201.nii.gz\"}, {\"image\": \"nii_gz/2002.nii.gz\"}, {\"image\": \"nii_gz/1999.nii.gz\"}, {\"image\": \"nii_gz/1995.nii.gz\"}, {\"image\": \"nii_gz/1992.nii.gz\"}, {\"image\": \"nii_gz/1990.nii.gz\"}, {\"image\": \"nii_gz/199.nii.gz\"}, {\"image\": \"nii_gz/1989.nii.gz\"}, {\"image\": \"nii_gz/1988.nii.gz\"}, {\"image\": \"nii_gz/1980.nii.gz\"}, {\"image\": \"nii_gz/198.nii.gz\"}, {\"image\": \"nii_gz/1976.nii.gz\"}, {\"image\": \"nii_gz/1967.nii.gz\"}, {\"image\": \"nii_gz/1964.nii.gz\"}, {\"image\": \"nii_gz/1961.nii.gz\"}, {\"image\": \"nii_gz/1958.nii.gz\"}, {\"image\": \"nii_gz/1952.nii.gz\"}, {\"image\": \"nii_gz/1947.nii.gz\"}, {\"image\": \"nii_gz/1945.nii.gz\"}, {\"image\": \"nii_gz/1944.nii.gz\"}, {\"image\": \"nii_gz/1943.nii.gz\"}, {\"image\": \"nii_gz/1940.nii.gz\"}, {\"image\": \"nii_gz/1929.nii.gz\"}, {\"image\": \"nii_gz/1928.nii.gz\"}, {\"image\": \"nii_gz/1912.nii.gz\"}, {\"image\": \"nii_gz/191.nii.gz\"}, {\"image\": \"nii_gz/1894.nii.gz\"}, {\"image\": \"nii_gz/1892.nii.gz\"}, {\"image\": \"nii_gz/1891.nii.gz\"}, {\"image\": \"nii_gz/1878.nii.gz\"}, {\"image\": \"nii_gz/1874.nii.gz\"}, {\"image\": \"nii_gz/1868.nii.gz\"}, {\"image\": \"nii_gz/1865.nii.gz\"}, {\"image\": \"nii_gz/1852.nii.gz\"}, {\"image\": \"nii_gz/1850.nii.gz\"}, {\"image\": \"nii_gz/1848.nii.gz\"}, {\"image\": \"nii_gz/1847.nii.gz\"}, {\"image\": \"nii_gz/1845.nii.gz\"}, {\"image\": \"nii_gz/1838.nii.gz\"}, {\"image\": \"nii_gz/1837.nii.gz\"}, {\"image\": \"nii_gz/1836.nii.gz\"}, {\"image\": \"nii_gz/183.nii.gz\"}, {\"image\": \"nii_gz/1827.nii.gz\"}, {\"image\": \"nii_gz/1825.nii.gz\"}, {\"image\": \"nii_gz/1816.nii.gz\"}, {\"image\": \"nii_gz/1814.nii.gz\"}, {\"image\": \"nii_gz/1812.nii.gz\"}, {\"image\": \"nii_gz/1810.nii.gz\"}, {\"image\": \"nii_gz/181.nii.gz\"}, {\"image\": \"nii_gz/1809.nii.gz\"}, {\"image\": \"nii_gz/1807.nii.gz\"}, {\"image\": \"nii_gz/1802.nii.gz\"}, {\"image\": \"nii_gz/1801.nii.gz\"}, {\"image\": \"nii_gz/180.nii.gz\"}, {\"image\": \"nii_gz/1799.nii.gz\"}, {\"image\": \"nii_gz/1793.nii.gz\"}, {\"image\": \"nii_gz/1790.nii.gz\"}, {\"image\": \"nii_gz/1789.nii.gz\"}, {\"image\": \"nii_gz/1788.nii.gz\"}, {\"image\": \"nii_gz/1787.nii.gz\"}, {\"image\": \"nii_gz/1768.nii.gz\"}, {\"image\": \"nii_gz/1766.nii.gz\"}, {\"image\": \"nii_gz/1757.nii.gz\"}, {\"image\": \"nii_gz/1754.nii.gz\"}, {\"image\": \"nii_gz/1744.nii.gz\"}, {\"image\": \"nii_gz/174.nii.gz\"}, {\"image\": \"nii_gz/1737.nii.gz\"}, {\"image\": \"nii_gz/1735.nii.gz\"}, {\"image\": \"nii_gz/1732.nii.gz\"}, {\"image\": \"nii_gz/1731.nii.gz\"}, {\"image\": \"nii_gz/1729.nii.gz\"}, {\"image\": \"nii_gz/1718.nii.gz\"}, {\"image\": \"nii_gz/1711.nii.gz\"}, {\"image\": \"nii_gz/1709.nii.gz\"}, {\"image\": \"nii_gz/1708.nii.gz\"}, {\"image\": \"nii_gz/1700.nii.gz\"}, {\"image\": \"nii_gz/17.nii.gz\"}, {\"image\": \"nii_gz/1696.nii.gz\"}, {\"image\": \"nii_gz/1683.nii.gz\"}, {\"image\": \"nii_gz/1682.nii.gz\"}, {\"image\": \"nii_gz/1680.nii.gz\"}, {\"image\": \"nii_gz/1674.nii.gz\"}, {\"image\": \"nii_gz/167.nii.gz\"}, {\"image\": \"nii_gz/1667.nii.gz\"}, {\"image\": \"nii_gz/1666.nii.gz\"}, {\"image\": \"nii_gz/1662.nii.gz\"}, {\"image\": \"nii_gz/1660.nii.gz\"}, {\"image\": \"nii_gz/166.nii.gz\"}, {\"image\": \"nii_gz/1659.nii.gz\"}, {\"image\": \"nii_gz/1656.nii.gz\"}, {\"image\": \"nii_gz/1653.nii.gz\"}, {\"image\": \"nii_gz/1641.nii.gz\"}, {\"image\": \"nii_gz/164.nii.gz\"}, {\"image\": \"nii_gz/1639.nii.gz\"}, {\"image\": \"nii_gz/1634.nii.gz\"}, {\"image\": \"nii_gz/1633.nii.gz\"}, {\"image\": \"nii_gz/1625.nii.gz\"}, {\"image\": \"nii_gz/1621.nii.gz\"}, {\"image\": \"nii_gz/1612.nii.gz\"}, {\"image\": \"nii_gz/1610.nii.gz\"}, {\"image\": \"nii_gz/1606.nii.gz\"}, {\"image\": \"nii_gz/1605.nii.gz\"}, {\"image\": \"nii_gz/1603.nii.gz\"}, {\"image\": \"nii_gz/1598.nii.gz\"}, {\"image\": \"nii_gz/1597.nii.gz\"}, {\"image\": \"nii_gz/1595.nii.gz\"}, {\"image\": \"nii_gz/1583.nii.gz\"}, {\"image\": \"nii_gz/1575.nii.gz\"}, {\"image\": \"nii_gz/1574.nii.gz\"}, {\"image\": \"nii_gz/1573.nii.gz\"}, {\"image\": \"nii_gz/1560.nii.gz\"}, {\"image\": \"nii_gz/1553.nii.gz\"}, {\"image\": \"nii_gz/1552.nii.gz\"}, {\"image\": \"nii_gz/1548.nii.gz\"}, {\"image\": \"nii_gz/1545.nii.gz\"}, {\"image\": \"nii_gz/1538.nii.gz\"}, {\"image\": \"nii_gz/1519.nii.gz\"}, {\"image\": \"nii_gz/1506.nii.gz\"}, {\"image\": \"nii_gz/1504.nii.gz\"}, {\"image\": \"nii_gz/1501.nii.gz\"}, {\"image\": \"nii_gz/1500.nii.gz\"}, {\"image\": \"nii_gz/150.nii.gz\"}, {\"image\": \"nii_gz/1490.nii.gz\"}, {\"image\": \"nii_gz/1486.nii.gz\"}, {\"image\": \"nii_gz/1481.nii.gz\"}, {\"image\": \"nii_gz/1480.nii.gz\"}, {\"image\": \"nii_gz/148.nii.gz\"}, {\"image\": \"nii_gz/1471.nii.gz\"}, {\"image\": \"nii_gz/1467.nii.gz\"}, {\"image\": \"nii_gz/1457.nii.gz\"}, {\"image\": \"nii_gz/1443.nii.gz\"}, {\"image\": \"nii_gz/1439.nii.gz\"}, {\"image\": \"nii_gz/1437.nii.gz\"}, {\"image\": \"nii_gz/1435.nii.gz\"}, {\"image\": \"nii_gz/1433.nii.gz\"}, {\"image\": \"nii_gz/1431.nii.gz\"}, {\"image\": \"nii_gz/1426.nii.gz\"}, {\"image\": \"nii_gz/1421.nii.gz\"}, {\"image\": \"nii_gz/1417.nii.gz\"}, {\"image\": \"nii_gz/1415.nii.gz\"}, {\"image\": \"nii_gz/1410.nii.gz\"}, {\"image\": \"nii_gz/1403.nii.gz\"}, {\"image\": \"nii_gz/1399.nii.gz\"}, {\"image\": \"nii_gz/139.nii.gz\"}, {\"image\": \"nii_gz/1383.nii.gz\"}, {\"image\": \"nii_gz/1375.nii.gz\"}, {\"image\": \"nii_gz/1372.nii.gz\"}, {\"image\": \"nii_gz/1364.nii.gz\"}, {\"image\": \"nii_gz/1363.nii.gz\"}, {\"image\": \"nii_gz/136.nii.gz\"}, {\"image\": \"nii_gz/1357.nii.gz\"}, {\"image\": \"nii_gz/1355.nii.gz\"}, {\"image\": \"nii_gz/1335.nii.gz\"}, {\"image\": \"nii_gz/1331.nii.gz\"}, {\"image\": \"nii_gz/1329.nii.gz\"}, {\"image\": \"nii_gz/1324.nii.gz\"}, {\"image\": \"nii_gz/1323.nii.gz\"}, {\"image\": \"nii_gz/1315.nii.gz\"}, {\"image\": \"nii_gz/1310.nii.gz\"}, {\"image\": \"nii_gz/1300.nii.gz\"}, {\"image\": \"nii_gz/1298.nii.gz\"}, {\"image\": \"nii_gz/1288.nii.gz\"}, {\"image\": \"nii_gz/1287.nii.gz\"}, {\"image\": \"nii_gz/1284.nii.gz\"}, {\"image\": \"nii_gz/1283.nii.gz\"}, {\"image\": \"nii_gz/128.nii.gz\"}, {\"image\": \"nii_gz/1269.nii.gz\"}, {\"image\": \"nii_gz/1266.nii.gz\"}, {\"image\": \"nii_gz/1264.nii.gz\"}, {\"image\": \"nii_gz/1256.nii.gz\"}, {\"image\": \"nii_gz/1249.nii.gz\"}, {\"image\": \"nii_gz/1248.nii.gz\"}, {\"image\": \"nii_gz/1242.nii.gz\"}, {\"image\": \"nii_gz/1241.nii.gz\"}, {\"image\": \"nii_gz/1239.nii.gz\"}, {\"image\": \"nii_gz/1234.nii.gz\"}, {\"image\": \"nii_gz/1231.nii.gz\"}, {\"image\": \"nii_gz/123.nii.gz\"}, {\"image\": \"nii_gz/1219.nii.gz\"}, {\"image\": \"nii_gz/1216.nii.gz\"}, {\"image\": \"nii_gz/1202.nii.gz\"}, {\"image\": \"nii_gz/120.nii.gz\"}, {\"image\": \"nii_gz/1195.nii.gz\"}, {\"image\": \"nii_gz/119.nii.gz\"}, {\"image\": \"nii_gz/1189.nii.gz\"}, {\"image\": \"nii_gz/1187.nii.gz\"}, {\"image\": \"nii_gz/1182.nii.gz\"}, {\"image\": \"nii_gz/1181.nii.gz\"}, {\"image\": \"nii_gz/1175.nii.gz\"}, {\"image\": \"nii_gz/1173.nii.gz\"}, {\"image\": \"nii_gz/1172.nii.gz\"}, {\"image\": \"nii_gz/117.nii.gz\"}, {\"image\": \"nii_gz/1169.nii.gz\"}, {\"image\": \"nii_gz/1164.nii.gz\"}, {\"image\": \"nii_gz/1162.nii.gz\"}, {\"image\": \"nii_gz/1161.nii.gz\"}, {\"image\": \"nii_gz/116.nii.gz\"}, {\"image\": \"nii_gz/1156.nii.gz\"}, {\"image\": \"nii_gz/1154.nii.gz\"}, {\"image\": \"nii_gz/1153.nii.gz\"}, {\"image\": \"nii_gz/1149.nii.gz\"}, {\"image\": \"nii_gz/1148.nii.gz\"}, {\"image\": \"nii_gz/1146.nii.gz\"}, {\"image\": \"nii_gz/1142.nii.gz\"}, {\"image\": \"nii_gz/114.nii.gz\"}, {\"image\": \"nii_gz/1128.nii.gz\"}, {\"image\": \"nii_gz/1119.nii.gz\"}, {\"image\": \"nii_gz/1115.nii.gz\"}, {\"image\": \"nii_gz/1111.nii.gz\"}, {\"image\": \"nii_gz/1107.nii.gz\"}, {\"image\": \"nii_gz/1103.nii.gz\"}, {\"image\": \"nii_gz/1099.nii.gz\"}, {\"image\": \"nii_gz/1094.nii.gz\"}, {\"image\": \"nii_gz/109.nii.gz\"}, {\"image\": \"nii_gz/1079.nii.gz\"}, {\"image\": \"nii_gz/1076.nii.gz\"}, {\"image\": \"nii_gz/10729.nii.gz\"}, {\"image\": \"nii_gz/1072.nii.gz\"}, {\"image\": \"nii_gz/10714.nii.gz\"}, {\"image\": \"nii_gz/10707.nii.gz\"}, {\"image\": \"nii_gz/107.nii.gz\"}, {\"image\": \"nii_gz/10694.nii.gz\"}, {\"image\": \"nii_gz/10681.nii.gz\"}, {\"image\": \"nii_gz/1068.nii.gz\"}, {\"image\": \"nii_gz/1067.nii.gz\"}, {\"image\": \"nii_gz/10665.nii.gz\"}, {\"image\": \"nii_gz/10664.nii.gz\"}, {\"image\": \"nii_gz/10657.nii.gz\"}, {\"image\": \"nii_gz/10653.nii.gz\"}, {\"image\": \"nii_gz/10652.nii.gz\"}, {\"image\": \"nii_gz/10651.nii.gz\"}, {\"image\": \"nii_gz/10650.nii.gz\"}, {\"image\": \"nii_gz/1065.nii.gz\"}, {\"image\": \"nii_gz/1063.nii.gz\"}, {\"image\": \"nii_gz/10621.nii.gz\"}, {\"image\": \"nii_gz/10620.nii.gz\"}, {\"image\": \"nii_gz/10616.nii.gz\"}, {\"image\": \"nii_gz/10610.nii.gz\"}, {\"image\": \"nii_gz/1061.nii.gz\"}, {\"image\": \"nii_gz/10606.nii.gz\"}, {\"image\": \"nii_gz/1060.nii.gz\"}, {\"image\": \"nii_gz/10595.nii.gz\"}, {\"image\": \"nii_gz/10593.nii.gz\"}, {\"image\": \"nii_gz/10591.nii.gz\"}, {\"image\": \"nii_gz/10590.nii.gz\"}, {\"image\": \"nii_gz/10583.nii.gz\"}, {\"image\": \"nii_gz/10582.nii.gz\"}, {\"image\": \"nii_gz/1058.nii.gz\"}, {\"image\": \"nii_gz/10577.nii.gz\"}, {\"image\": \"nii_gz/10576.nii.gz\"}, {\"image\": \"nii_gz/10575.nii.gz\"}, {\"image\": \"nii_gz/10573.nii.gz\"}, {\"image\": \"nii_gz/10572.nii.gz\"}, {\"image\": \"nii_gz/10571.nii.gz\"}, {\"image\": \"nii_gz/10570.nii.gz\"}, {\"image\": \"nii_gz/1057.nii.gz\"}, {\"image\": \"nii_gz/10569.nii.gz\"}, {\"image\": \"nii_gz/10561.nii.gz\"}, {\"image\": \"nii_gz/10541.nii.gz\"}, {\"image\": \"nii_gz/10531.nii.gz\"}, {\"image\": \"nii_gz/10529.nii.gz\"}, {\"image\": \"nii_gz/10528.nii.gz\"}, {\"image\": \"nii_gz/10525.nii.gz\"}, {\"image\": \"nii_gz/10522.nii.gz\"}, {\"image\": \"nii_gz/10521.nii.gz\"}, {\"image\": \"nii_gz/1052.nii.gz\"}, {\"image\": \"nii_gz/10516.nii.gz\"}, {\"image\": \"nii_gz/10507.nii.gz\"}, {\"image\": \"nii_gz/10502.nii.gz\"}, {\"image\": \"nii_gz/1050.nii.gz\"}, {\"image\": \"nii_gz/1049.nii.gz\"}, {\"image\": \"nii_gz/10486.nii.gz\"}, {\"image\": \"nii_gz/10484.nii.gz\"}, {\"image\": \"nii_gz/10480.nii.gz\"}, {\"image\": \"nii_gz/1048.nii.gz\"}, {\"image\": \"nii_gz/10471.nii.gz\"}, {\"image\": \"nii_gz/1047.nii.gz\"}, {\"image\": \"nii_gz/10456.nii.gz\"}, {\"image\": \"nii_gz/10451.nii.gz\"}, {\"image\": \"nii_gz/10440.nii.gz\"}, {\"image\": \"nii_gz/10435.nii.gz\"}, {\"image\": \"nii_gz/10429.nii.gz\"}, {\"image\": \"nii_gz/10424.nii.gz\"}, {\"image\": \"nii_gz/10418.nii.gz\"}, {\"image\": \"nii_gz/10417.nii.gz\"}, {\"image\": \"nii_gz/10413.nii.gz\"}, {\"image\": \"nii_gz/10407.nii.gz\"}, {\"image\": \"nii_gz/10403.nii.gz\"}, {\"image\": \"nii_gz/1040.nii.gz\"}, {\"image\": \"nii_gz/10397.nii.gz\"}, {\"image\": \"nii_gz/10389.nii.gz\"}, {\"image\": \"nii_gz/10388.nii.gz\"}, {\"image\": \"nii_gz/10385.nii.gz\"}, {\"image\": \"nii_gz/10383.nii.gz\"}, {\"image\": \"nii_gz/10373.nii.gz\"}, {\"image\": \"nii_gz/10368.nii.gz\"}, {\"image\": \"nii_gz/10364.nii.gz\"}, {\"image\": \"nii_gz/10360.nii.gz\"}, {\"image\": \"nii_gz/10359.nii.gz\"}, {\"image\": \"nii_gz/10351.nii.gz\"}, {\"image\": \"nii_gz/10348.nii.gz\"}, {\"image\": \"nii_gz/10341.nii.gz\"}, {\"image\": \"nii_gz/1034.nii.gz\"}, {\"image\": \"nii_gz/10338.nii.gz\"}, {\"image\": \"nii_gz/10334.nii.gz\"}, {\"image\": \"nii_gz/10307.nii.gz\"}, {\"image\": \"nii_gz/10303.nii.gz\"}, {\"image\": \"nii_gz/10300.nii.gz\"}, {\"image\": \"nii_gz/10292.nii.gz\"}, {\"image\": \"nii_gz/10291.nii.gz\"}, {\"image\": \"nii_gz/10288.nii.gz\"}, {\"image\": \"nii_gz/10286.nii.gz\"}, {\"image\": \"nii_gz/10285.nii.gz\"}, {\"image\": \"nii_gz/10282.nii.gz\"}, {\"image\": \"nii_gz/10277.nii.gz\"}, {\"image\": \"nii_gz/10276.nii.gz\"}, {\"image\": \"nii_gz/10273.nii.gz\"}, {\"image\": \"nii_gz/1027.nii.gz\"}, {\"image\": \"nii_gz/10261.nii.gz\"}, {\"image\": \"nii_gz/10260.nii.gz\"}, {\"image\": \"nii_gz/10256.nii.gz\"}, {\"image\": \"nii_gz/10254.nii.gz\"}, {\"image\": \"nii_gz/10248.nii.gz\"}, {\"image\": \"nii_gz/10242.nii.gz\"}, {\"image\": \"nii_gz/10239.nii.gz\"}, {\"image\": \"nii_gz/10226.nii.gz\"}, {\"image\": \"nii_gz/10225.nii.gz\"}, {\"image\": \"nii_gz/10223.nii.gz\"}, {\"image\": \"nii_gz/10221.nii.gz\"}, {\"image\": \"nii_gz/10212.nii.gz\"}, {\"image\": \"nii_gz/10206.nii.gz\"}, {\"image\": \"nii_gz/1020.nii.gz\"}, {\"image\": \"nii_gz/10192.nii.gz\"}, {\"image\": \"nii_gz/10190.nii.gz\"}, {\"image\": \"nii_gz/10177.nii.gz\"}, {\"image\": \"nii_gz/10175.nii.gz\"}, {\"image\": \"nii_gz/10169.nii.gz\"}, {\"image\": \"nii_gz/10164.nii.gz\"}, {\"image\": \"nii_gz/10157.nii.gz\"}, {\"image\": \"nii_gz/10155.nii.gz\"}, {\"image\": \"nii_gz/10147.nii.gz\"}, {\"image\": \"nii_gz/10134.nii.gz\"}, {\"image\": \"nii_gz/10131.nii.gz\"}, {\"image\": \"nii_gz/10127.nii.gz\"}, {\"image\": \"nii_gz/10126.nii.gz\"}, {\"image\": \"nii_gz/10112.nii.gz\"}, {\"image\": \"nii_gz/1011.nii.gz\"}, {\"image\": \"nii_gz/10109.nii.gz\"}, {\"image\": \"nii_gz/10095.nii.gz\"}, {\"image\": \"nii_gz/10085.nii.gz\"}, {\"image\": \"nii_gz/1008.nii.gz\"}, {\"image\": \"nii_gz/10074.nii.gz\"}, {\"image\": \"nii_gz/10072.nii.gz\"}, {\"image\": \"nii_gz/1007.nii.gz\"}, {\"image\": \"nii_gz/10064.nii.gz\"}, {\"image\": \"nii_gz/10058.nii.gz\"}, {\"image\": \"nii_gz/10044.nii.gz\"}, {\"image\": \"nii_gz/10036.nii.gz\"}, {\"image\": \"nii_gz/10033.nii.gz\"}, {\"image\": \"nii_gz/10032.nii.gz\"}, {\"image\": \"nii_gz/1003.nii.gz\"}, {\"image\": \"nii_gz/1002.nii.gz\"}, {\"image\": \"nii_gz/10011.nii.gz\"}, {\"image\": \"nii_gz/10010.nii.gz\"}]}"
  },
  {
    "path": "models/voco_head.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\nimport torch.nn as nn\nimport numpy as np\nfrom monai.networks.nets.swin_unetr import *\nfrom monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock\nfrom monai.networks.nets.swin_unetr import SwinTransformer as SwinViT\nfrom monai.utils import ensure_tuple_rep\nimport argparse\nimport torch.nn.functional as F\n\n\nclass projection_head(nn.Module):\n    def __init__(self, in_dim=768, hidden_dim=2048, out_dim=2048):\n        super().__init__()\n        self.layer1 = nn.Sequential(\n            nn.Linear(in_dim, hidden_dim),\n            nn.BatchNorm1d(hidden_dim, affine=False, track_running_stats=False),\n            nn.ReLU(inplace=True)\n        )\n        self.layer2 = nn.Sequential(\n            nn.Linear(hidden_dim, hidden_dim),\n            nn.BatchNorm1d(hidden_dim, affine=False, track_running_stats=False),\n            nn.ReLU(inplace=True)\n        )\n        self.layer3 = nn.Sequential(\n            nn.Linear(hidden_dim, out_dim),\n        )\n        self.out_dim = out_dim\n\n    def forward(self, input):\n        if torch.is_tensor(input):\n            x = input\n        else:\n            x = input[-1]\n            b = x.size()[0]\n            x = F.adaptive_avg_pool3d(x, (1, 1, 1)).view(b, -1)\n\n        x = self.layer1(x)\n        x = self.layer2(x)\n        x = self.layer3(x)\n\n        return x\n\n\nclass Swin(nn.Module):\n    def __init__(self, args):\n        super(Swin, self).__init__()\n        patch_size = ensure_tuple_rep(2, args.spatial_dims)\n        window_size = ensure_tuple_rep(7, args.spatial_dims)\n        self.swinViT = SwinViT(\n            in_chans=args.in_channels,\n            embed_dim=args.feature_size,\n            window_size=window_size,\n            patch_size=patch_size,\n            depths=[2, 2, 2, 2],\n            num_heads=[3, 6, 12, 24],\n            mlp_ratio=4.0,\n            qkv_bias=True,\n            drop_rate=0.0,\n            attn_drop_rate=0.0,\n            drop_path_rate=args.dropout_path_rate,\n            norm_layer=torch.nn.LayerNorm,\n            use_checkpoint=args.use_checkpoint,\n            spatial_dims=args.spatial_dims,\n            use_v2=True,\n        )\n        norm_name = 'instance'\n        self.encoder1 = UnetrBasicBlock(\n            spatial_dims=args.spatial_dims,\n            in_channels=args.in_channels,\n            out_channels=args.feature_size,\n            kernel_size=3,\n            stride=1,\n            norm_name=norm_name,\n            res_block=True,\n        )\n\n        self.encoder2 = UnetrBasicBlock(\n            spatial_dims=args.spatial_dims,\n            in_channels=args.feature_size,\n            out_channels=args.feature_size,\n            kernel_size=3,\n            stride=1,\n            norm_name=norm_name,\n            res_block=True,\n        )\n\n        self.encoder3 = UnetrBasicBlock(\n            spatial_dims=args.spatial_dims,\n            in_channels=2 * args.feature_size,\n            out_channels=2 * args.feature_size,\n            kernel_size=3,\n            stride=1,\n            norm_name=norm_name,\n            res_block=True,\n        )\n\n        self.encoder4 = UnetrBasicBlock(\n            spatial_dims=args.spatial_dims,\n            in_channels=4 * args.feature_size,\n            out_channels=4 * args.feature_size,\n            kernel_size=3,\n            stride=1,\n            norm_name=norm_name,\n            res_block=True,\n        )\n\n        self.encoder10 = UnetrBasicBlock(\n            spatial_dims=args.spatial_dims,\n            in_channels=16 * args.feature_size,\n            out_channels=16 * args.feature_size,\n            kernel_size=3,\n            stride=1,\n            norm_name=norm_name,\n            res_block=True,\n        )\n\n        self.proj_head = projection_head(in_dim=1152, hidden_dim=2048, out_dim=2048)\n\n    def forward_encs(self, encs):\n        b = encs[0].size()[0]\n        outs = []\n        for enc in encs:\n            out = F.adaptive_avg_pool3d(enc, (1, 1, 1))\n            outs.append(out.view(b, -1))\n        outs = torch.cat(outs, dim=1)\n        return outs\n\n    def forward(self, x_in):\n        b = x_in.size()[0]\n        hidden_states_out = self.swinViT(x_in)\n\n        enc0 = self.encoder1(x_in)\n        enc1 = self.encoder2(hidden_states_out[0])\n        enc2 = self.encoder3(hidden_states_out[1])\n        enc3 = self.encoder4(hidden_states_out[2])\n        dec4 = self.encoder10(hidden_states_out[4])\n\n        encs = [enc0, enc1, enc2, enc3, dec4]\n\n        # for enc in encs:\n        #     print(enc.shape)\n\n        out = self.forward_encs(encs)\n        out = self.proj_head(out.view(b, -1))\n        return out\n\n\nclass VoCoHead(nn.Module):\n    def __init__(self, args):\n        super(VoCoHead, self).__init__()\n        self.student = Swin(args)\n        self.teacher = Swin(args)\n\n    @torch.no_grad()\n    def _EMA_update_encoder_teacher(self):\n        ## no scheduler here\n        momentum = 0.9\n        for param, param_t in zip(self.student.parameters(), self.teacher.parameters()):\n            param_t.data = momentum * param_t.data + (1. - momentum) * param.data\n\n    def forward(self, img, crops, labels):\n        batch_size = labels.size()[0]\n        total_size = img.size()[0]\n        sw_size = total_size // batch_size\n        pos, neg, total_b_loss = 0.0, 0.0, 0.0\n\n        img, crops = img.as_tensor(), crops.as_tensor()\n        inputs = torch.cat([img, crops], dim=0)\n\n        # here we do norm on all instances\n        students_all = self.student(inputs)\n        self._EMA_update_encoder_teacher()\n        with torch.no_grad():\n            teachers_all = (self.teacher(inputs)).detach()\n\n        x_stu_all, bases_stu_all = students_all[:total_size], students_all[total_size:]\n        x_tea_all, bases_tea_all = teachers_all[:total_size], teachers_all[total_size:]\n\n        for i in range(batch_size):\n            label = labels[i]\n\n            x_stu, bases_stu = x_stu_all[i * sw_size:(i + 1) * sw_size], bases_stu_all[i * 16:(i + 1) * 16]\n            x_tea, bases_tea = x_tea_all[i * sw_size:(i + 1) * sw_size], bases_tea_all[i * 16:(i + 1) * 16]\n\n            logits1 = online_assign(x_stu, bases_tea)\n            logits2 = online_assign(x_tea, bases_stu)\n\n            logits = (logits1 + logits2) * 0.5\n\n            if i == 0:\n                print('labels and logits:', label[0].data, logits[0].data)\n\n            pos_loss, neg_loss = ce_loss(label, logits)\n            pos += pos_loss\n            neg += neg_loss\n\n            b_loss = regularization_loss(bases_stu)\n            total_b_loss += b_loss\n\n        pos, neg = pos / batch_size, neg / batch_size\n        total_b_loss = total_b_loss / batch_size\n\n        return pos, neg, total_b_loss\n\n\ndef online_assign(feats, bases):\n    b, c = feats.size()\n    k, _ = bases.size()\n    assert bases.size()[1] == c, print(feats.size(), bases.size())\n\n    logits = []\n    for i in range(b):\n        feat = feats[i].unsqueeze(0)\n        simi = F.cosine_similarity(feat, bases, dim=1).unsqueeze(0)\n        logits.append(simi)\n    logits = torch.concatenate(logits, dim=0)\n    logits = F.relu(logits)\n\n    return logits\n\n\ndef regularization_loss(bases):\n    k, c = bases.size()\n    loss_all = 0\n    num = 0\n    for i in range(k - 1):\n        for j in range(i + 1, k):\n            num += 1\n            simi = F.cosine_similarity(bases[i].unsqueeze(0), bases[j].unsqueeze(0).detach(), dim=1)\n            simi = F.relu(simi)\n            loss_all += simi ** 2\n    loss_all = loss_all / num\n\n    return loss_all\n\n\ndef ce_loss(labels, logits):\n    pos_dis = torch.abs(labels - logits)\n    pos_loss = - labels * torch.log(1 - pos_dis + 1e-6)\n    pos_loss = pos_loss.sum() / (labels.sum() + 1e-6)\n\n    neg_lab = (labels == 0).long()\n    neg_loss = neg_lab * (logits ** 2)\n    neg_loss = neg_loss.sum() / (neg_lab.sum() + 1e-6)\n    return pos_loss, neg_loss\n"
  },
  {
    "path": "optimizers/__init__.py",
    "content": ""
  },
  {
    "path": "optimizers/lr_scheduler.py",
    "content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nimport warnings\nfrom typing import List\n\nfrom torch import nn as nn\nfrom torch.optim import Adam, Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR, _LRScheduler\n\n__all__ = [\"LinearLR\", \"ExponentialLR\"]\n\n\nclass _LRSchedulerMONAI(_LRScheduler):\n    \"\"\"Base class for increasing the learning rate between two boundaries over a number\n    of iterations\"\"\"\n\n    def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            end_lr: the final learning rate.\n            num_iter: the number of iterations over which the test occurs.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.end_lr = end_lr\n        self.num_iter = num_iter\n        super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)\n\n\nclass LinearLR(_LRSchedulerMONAI):\n    \"\"\"Linearly increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]\n\n\nclass ExponentialLR(_LRSchedulerMONAI):\n    \"\"\"Exponentially increases the learning rate between two boundaries over a number of\n    iterations.\n    \"\"\"\n\n    def get_lr(self):\n        r = self.last_epoch / (self.num_iter - 1)\n        return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]\n\n\nclass WarmupCosineSchedule(LambdaLR):\n    \"\"\"Linear warmup and then cosine decay.\n    Based on https://huggingface.co/ implementation.\n    \"\"\"\n\n    def __init__(\n        self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer: wrapped optimizer.\n            warmup_steps: number of warmup iterations.\n            t_total: total number of training iterations.\n            cycles: cosine cycles parameter.\n            last_epoch: the index of last epoch.\n        Returns:\n            None\n        \"\"\"\n        self.warmup_steps = warmup_steps\n        self.t_total = t_total\n        self.cycles = cycles\n        super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)\n\n    def lr_lambda(self, step):\n        if step < self.warmup_steps:\n            return float(step) / float(max(1.0, self.warmup_steps))\n        progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))\n\n\nclass LinearWarmupCosineAnnealingLR(_LRScheduler):\n    def __init__(\n        self,\n        optimizer: Optimizer,\n        warmup_epochs: int,\n        max_epochs: int,\n        warmup_start_lr: float = 0.0,\n        eta_min: float = 0.0,\n        last_epoch: int = -1,\n    ) -> None:\n        \"\"\"\n        Args:\n            optimizer (Optimizer): Wrapped optimizer.\n            warmup_epochs (int): Maximum number of iterations for linear warmup\n            max_epochs (int): Maximum number of iterations\n            warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.\n            eta_min (float): Minimum learning rate. Default: 0.\n            last_epoch (int): The index of last epoch. Default: -1.\n        \"\"\"\n        self.warmup_epochs = warmup_epochs\n        self.max_epochs = max_epochs\n        self.warmup_start_lr = warmup_start_lr\n        self.eta_min = eta_min\n\n        super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        \"\"\"\n        Compute learning rate using chainable form of the scheduler\n        \"\"\"\n        if not self._get_lr_called_within_step:\n            warnings.warn(\n                \"To get the last learning rate computed by the scheduler, \" \"please use `get_last_lr()`.\", UserWarning\n            )\n\n        if self.last_epoch == 0:\n            return [self.warmup_start_lr] * len(self.base_lrs)\n        elif self.last_epoch < self.warmup_epochs:\n            return [\n                group[\"lr\"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n        elif self.last_epoch == self.warmup_epochs:\n            return self.base_lrs\n        elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:\n            return [\n                group[\"lr\"]\n                + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2\n                for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)\n            ]\n\n        return [\n            (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            / (\n                1\n                + math.cos(\n                    math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)\n                )\n            )\n            * (group[\"lr\"] - self.eta_min)\n            + self.eta_min\n            for group in self.optimizer.param_groups\n        ]\n\n    def _get_closed_form_lr(self) -> List[float]:\n        \"\"\"\n        Called when epoch is passed as a param to the `step` function of the scheduler.\n        \"\"\"\n        if self.last_epoch < self.warmup_epochs:\n            return [\n                self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)\n                for base_lr in self.base_lrs\n            ]\n\n        return [\n            self.eta_min\n            + 0.5\n            * (base_lr - self.eta_min)\n            * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))\n            for base_lr in self.base_lrs\n        ]\n"
  },
  {
    "path": "requirements.txt",
    "content": "# packages in environment at /home/lwubf/anaconda3/envs/nnunet:\n#\n# Name                    Version                   Build  Channel\n_libgcc_mutex             0.1                        main  \nabsl-py                   2.1.0                     <pip>\nca-certificates           2023.12.12           h06a4308_0  \ncertifi                   2022.12.7                 <pip>\ncharset-normalizer        2.1.1                     <pip>\ncmake                     3.25.0                    <pip>\ncontourpy                 1.2.0                     <pip>\ncycler                    0.12.1                    <pip>\neinops                    0.7.0                     <pip>\nelasticdeform             0.5.0                     <pip>\nfilelock                  3.9.0                     <pip>\nfonttools                 4.50.0                    <pip>\nfsspec                    2024.2.0                  <pip>\ngrpcio                    1.62.0                    <pip>\nhuggingface-hub           0.21.4                    <pip>\nidna                      3.4                       <pip>\nimportlib-metadata        7.0.1                     <pip>\nimportlib_resources       6.4.0                     <pip>\ninquirerpy                0.3.4                     <pip>\nJinja2                    3.1.2                     <pip>\nkiwisolver                1.4.5                     <pip>\nld_impl_linux-64          2.38                 h1181459_1  \nlibffi                    3.3                  he6710b0_2  \nlibgcc-ng                 9.1.0                hdf63c60_0  \nlibstdcxx-ng              9.1.0                hdf63c60_0  \nlit                       15.0.7                    <pip>\nMarkdown                  3.5.2                     <pip>\nMarkupSafe                2.1.5                     <pip>\nmatplotlib                3.8.3                     <pip>\nmonai                     1.3.0                     <pip>\nmpmath                    1.3.0                     <pip>\nncurses                   6.3                  h7f8727e_2  \nnetworkx                  3.2.1                     <pip>\nnibabel                   5.2.0                     <pip>\nnumpy                     1.26.4                    <pip>\nopencv-python             4.9.0.80                  <pip>\nopenssl                   1.1.1w               h7f8727e_0  \npackaging                 23.2                      <pip>\npfzy                      0.3.4                     <pip>\npillow                    10.2.0                    <pip>\npip                       23.3.1           py39h06a4308_0  \nprompt-toolkit            3.0.43                    <pip>\nprotobuf                  4.25.3                    <pip>\npyparsing                 3.1.2                     <pip>\npython                    3.9.12               h12debd9_1  \npython-dateutil           2.9.0.post0               <pip>\nPyYAML                    6.0.1                     <pip>\nreadline                  8.1.2                h7f8727e_1  \nrequests                  2.28.1                    <pip>\nscipy                     1.12.0                    <pip>\nsetuptools                68.2.2           py39h06a4308_0  \nSimpleITK                 2.0.2                     <pip>\nsix                       1.16.0                    <pip>\nsqlite                    3.38.5               hc218d9a_0  \nsympy                     1.12                      <pip>\ntensorboard               2.16.2                    <pip>\ntensorboard-data-server   0.7.2                     <pip>\ntensorboardX              2.6.2.2                   <pip>\ntk                        8.6.12               h1ccaba5_0  \ntorch                     2.0.1+cu118               <pip>\ntorchaudio                2.0.2+cu118               <pip>\ntorchvision               0.15.2+cu118              <pip>\ntqdm                      4.66.2                    <pip>\ntriton                    2.0.0                     <pip>\ntyping_extensions         4.8.0                     <pip>\ntzdata                    2024a                h04d1e81_0  \nurllib3                   1.26.13                   <pip>\nwcwidth                   0.2.13                    <pip>\nWerkzeug                  3.0.1                     <pip>\nwheel                     0.41.2           py39h06a4308_0  \nxz                        5.2.5                h7f8727e_1  \nzipp                      3.17.0                    <pip>\nzlib                      1.2.12               h7f8727e_2  \n"
  },
  {
    "path": "train.sh",
    "content": "now=$(date +\"%Y%m%d_%H%M%S\")\nlogdir=runs/logs_10k\nmkdir -p $logdir\n\ntorchrun --master_port=28802 voco_train.py \\\n    --logdir $logdir | tee $logdir/$now.txt"
  },
  {
    "path": "utils/__init__.py",
    "content": ""
  },
  {
    "path": "utils/data_utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom collections.abc import Callable, Sequence\nfrom torch.utils.data import Dataset as _TorchDataset\nfrom torch.utils.data import Subset\nimport collections\nimport numpy as np\nfrom monai.data import *\nimport pickle\nfrom monai.transforms import *\nfrom math import *\n\n\ndef get_loader_1k(args):\n    splits1 = \"/btcv.json\"\n    splits2 = \"/dataset_TCIAcovid19_0.json\"\n    splits3 = \"/dataset_LUNA16_0.json\"\n    # splits3 = \"/dataset_HNSCC_0.json\"\n    # splits4 = \"/dataset_TCIAcolon_v2_0.json\"\n    # splits5 = \"/dataset_LIDC_0.json\"\n    list_dir = \"./jsons\"\n    jsonlist1 = list_dir + splits1\n    jsonlist2 = list_dir + splits2\n    jsonlist3 = list_dir + splits3\n    # jsonlist4 = list_dir + splits4\n    # jsonlist5 = list_dir + splits5\n    datadir1 = \"/data/linshan/CTs/BTCV\"\n    datadir2 = \"/data/linshan/CTs/TCIAcovid19\"\n    datadir3 = \"/data/linshan/CTs/Luna16-jx\"\n    num_workers = 8\n    datalist1 = load_decathlon_datalist(jsonlist1, False, \"training\", base_dir=datadir1)\n    print(\"Dataset 1 BTCV: number of data: {}\".format(len(datalist1)))\n    new_datalist1 = []\n    for item in datalist1:\n        item_dict = {\"image\": item[\"image\"]}\n        new_datalist1.append(item_dict)\n\n    datalist2 = load_decathlon_datalist(jsonlist2, False, \"training\", base_dir=datadir2)\n    print(\"Dataset 2 Covid 19: number of data: {}\".format(len(datalist2)))\n\n    datalist3 = load_decathlon_datalist(jsonlist3, False, \"training\", base_dir=datadir3)\n    print(\"Dataset 3 Luna: number of data: {}\".format(len(datalist3)))\n    new_datalist3 = []\n    for item in datalist3:\n        item_dict = {\"image\": item[\"image\"]}\n        new_datalist3.append(item_dict)\n\n    vallist1 = load_decathlon_datalist(jsonlist1, False, \"validation\", base_dir=datadir1)\n    vallist2 = load_decathlon_datalist(jsonlist2, False, \"validation\", base_dir=datadir2)\n    vallist3 = load_decathlon_datalist(jsonlist3, False, \"validation\", base_dir=datadir3)\n    # vallist4 = load_decathlon_datalist(jsonlist4, False, \"validation\", base_dir=datadir4)\n    # vallist5 = load_decathlon_datalist(jsonlist5, False, \"validation\", base_dir=datadir5)\n    datalist = new_datalist1 + datalist2 + new_datalist3  # + datalist4 + datalist5\n    # datalist = new_datalist1\n    val_files = vallist1 + vallist2 + vallist3  # + vallist4 + vallist5\n    print(\"Dataset all training: number of data: {}\".format(len(datalist)))\n    print(\"Dataset all validation: number of data: {}\".format(len(val_files)))\n\n    train_transforms = Compose([LoadImaged(keys=[\"image\"], image_only=True, dtype=np.int16),\n                                EnsureChannelFirstd(keys=[\"image\"]),\n                                Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n                                ScaleIntensityRanged(\n                                    keys=[\"image\"], a_min=args.a_min, a_max=args.a_max,\n                                    b_min=args.b_min, b_max=args.b_max, clip=True),\n                                SpatialPadd(keys=\"image\", spatial_size=[args.roi_x, args.roi_y,\n                                                                        args.roi_z]),\n                                CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n                                SpatialCropd(keys=[\"image\"], roi_start=[60, 80, 0],\n                                             roi_end=[440, 380, 10000]),\n                                Resized(keys=[\"image\"], mode=\"trilinear\", align_corners=True,\n                                        spatial_size=(384, 384, 96)),\n\n                                # Random\n                                RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=0.0),\n                                CropForegroundd(keys=\"image\", source_key=\"image\", select_fn=threshold),\n                                Resized(keys=\"image\", mode=\"bilinear\", align_corners=True,\n                                        spatial_size=(384, 384, 96)),\n\n                                VoCoAugmentation(args, aug=True)\n                                ])\n\n    val_transforms = Compose([LoadImaged(keys=[\"image\"], image_only=True, dtype=np.int16),\n                              EnsureChannelFirstd(keys=[\"image\"]),\n                              Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n                              ScaleIntensityRanged(\n                                  keys=[\"image\"], a_min=args.a_min, a_max=args.a_max,\n                                  b_min=args.b_min, b_max=args.b_max, clip=True),\n                              SpatialPadd(keys=\"image\", spatial_size=[args.roi_x, args.roi_y,\n                                                                      args.roi_z]),\n                              CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n                              SpatialCropd(keys=[\"image\"], roi_start=[60, 80, 0],\n                                           roi_end=[440, 380, 10000]),\n                              Resized(keys=[\"image\"], mode=\"trilinear\", align_corners=True,\n                                      spatial_size=(384, 384, 96)),\n                              VoCoAugmentation(args, aug=False)\n                              ])\n\n    if args.cache_dataset:\n        print(\"Using MONAI Cache Dataset\")\n        train_ds = CacheDataset(data=datalist, transform=train_transforms,\n                                cache_rate=0.5, num_workers=num_workers)\n    elif args.smartcache_dataset:\n        print(\"Using MONAI SmartCache Dataset\")\n        train_ds = SmartCacheDataset(\n            data=datalist,\n            transform=train_transforms,\n            replace_rate=1.0,\n            cache_num=2 * args.batch_size * args.sw_batch_size,\n        )\n    else:\n        print(\"Using Persistent dataset\")\n        # train_ds = Dataset(data=datalist, transform=train_transforms)\n        train_ds = PersistentDataset(data=datalist,\n                                     transform=train_transforms,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/1.5k')\n\n    if args.distributed:\n        train_sampler = DistributedSampler(dataset=train_ds, even_divisible=True, shuffle=True)\n    else:\n        train_sampler = None\n    train_loader = DataLoader(\n        train_ds, batch_size=args.batch_size, num_workers=num_workers, sampler=train_sampler,\n        drop_last=True, pin_memory=True\n    )\n\n    val_ds = PersistentDataset(data=val_files,\n                               transform=val_transforms,\n                               pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                               cache_dir='/data/linshan/cache/1.5k')\n    val_loader = DataLoader(val_ds, batch_size=args.batch_size, num_workers=num_workers, shuffle=False, drop_last=True)\n\n    return train_loader, val_loader\n\n\ndef random_split(ls):\n    length = len(ls)\n    train_ls = ls[:ceil(length * 0.9)]\n    val_ls = ls[ceil(length * 0.9):]\n    return train_ls, val_ls\n\n\ndef get_loader(args):\n    splits1 = \"/btcv.json\"\n    splits2 = \"/dataset_TCIAcovid19_0.json\"\n    splits3 = \"/dataset_LUNA16_0.json\"\n    splits4 = \"/stoic21.json\"\n    splits5 = \"/Totalsegmentator_dataset.json\"\n    splits6 = \"/flare23.json\"\n    splits7 = \"/HNSCC.json\"\n\n    list_dir = \"./jsons/\"\n    jsonlist1 = list_dir + splits1\n    jsonlist2 = list_dir + splits2\n    jsonlist3 = list_dir + splits3\n    jsonlist4 = list_dir + splits4\n    jsonlist5 = list_dir + splits5\n    jsonlist6 = list_dir + splits6\n    jsonlist7 = list_dir + splits7\n\n    datadir1 = \"./data/BTCV\"\n    datadir2 = \"./data/TCIAcovid19\"\n    datadir3 = \"./data/Luna16-jx\"\n    datadir4 = \"./data/stoic21\"\n    datadir5 = \"./data/Totalsegmentator_dataset\"\n    datadir6 = \"./data/Flare23\"\n    datadir7 = \"./data/HNSCC_convert_v1\"\n\n    num_workers = 16\n    datalist1 = load_decathlon_datalist(jsonlist1, False, \"training\", base_dir=datadir1)\n    print(\"Dataset 1 BTCV: number of data: {}\".format(len(datalist1)))\n    new_datalist1 = []\n    for item in datalist1:\n        item_dict = {\"image\": item[\"image\"]}\n        new_datalist1.append(item_dict)\n\n    datalist2 = load_decathlon_datalist(jsonlist2, False, \"training\", base_dir=datadir2)\n    print(\"Dataset 2 Covid 19: number of data: {}\".format(len(datalist2)))\n\n    datalist3 = load_decathlon_datalist(jsonlist3, False, \"training\", base_dir=datadir3)\n    print(\"Dataset 3 Luna: number of data: {}\".format(len(datalist3)))\n    new_datalist3 = []\n    for item in datalist3:\n        item_dict = {\"image\": item[\"image\"]}\n        new_datalist3.append(item_dict)\n\n    datalist4 = load_decathlon_datalist(jsonlist4, False, \"training\", base_dir=datadir4)\n    # datalist4, vallist4 = random_split(datalist4)\n    print(\"Dataset 4 TCIA Colon: number of data: {}\".format(len(datalist4)))\n\n    datalist5 = load_decathlon_datalist(jsonlist5, False, \"training\", base_dir=datadir5)\n    # datalist5, vallist5 = random_split(datalist5)\n    print(\"Dataset 5 Totalsegmentator: number of data: {}\".format(len(datalist5)))\n\n    datalist6 = load_decathlon_datalist(jsonlist6, False, \"training\", base_dir=datadir6)\n    # datalist6, vallist6 = random_split(datalist6)\n    print(\"Dataset 6 Flare23: number of data: {}\".format(len(datalist6)))\n\n    datalist7 = load_decathlon_datalist(jsonlist7, False, \"training\", base_dir=datadir7)\n    # datalist7, vallist7 = random_split(datalist7)\n    print(\"Dataset 7 HNSCC: number of data: {}\".format(len(datalist7)))\n\n    vallist1 = load_decathlon_datalist(jsonlist1, False, \"validation\", base_dir=datadir1)\n    vallist2 = load_decathlon_datalist(jsonlist2, False, \"validation\", base_dir=datadir2)\n    vallist3 = load_decathlon_datalist(jsonlist3, False, \"validation\", base_dir=datadir3)\n\n    datalist = new_datalist1 + datalist2 + new_datalist3 + datalist4 + datalist5 + datalist6 + datalist7\n    val_files = vallist1 + vallist2 + vallist3  # + vallist4 + vallist5 + vallist6 + vallist7\n    print(\"Dataset all training: number of data: {}\".format(len(datalist)))\n    print(\"Dataset all validation: number of data: {}\".format(len(val_files)))\n\n    train_transforms = Compose([LoadImaged(keys=[\"image\"], image_only=True, dtype=np.int16),\n                                EnsureChannelFirstd(keys=[\"image\"]),\n                                Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n                                ScaleIntensityRanged(\n                                    keys=[\"image\"], a_min=args.a_min, a_max=args.a_max,\n                                    b_min=args.b_min, b_max=args.b_max, clip=True),\n                                CropForegroundd(keys=\"image\", source_key=\"image\", select_fn=threshold),\n                                Resized(keys=\"image\", mode=\"bilinear\", align_corners=True,\n                                        spatial_size=(384, 384, 96)),\n                                VoCoAugmentation(args, aug=True)\n                                ])\n\n    if args.cache_dataset:\n        print(\"Using MONAI Cache Dataset\")\n        train_ds = CacheDataset(data=datalist, transform=train_transforms,\n                                cache_rate=0.5, num_workers=num_workers)\n    elif args.smartcache_dataset:\n        print(\"Using MONAI SmartCache Dataset\")\n        train_ds = SmartCacheDataset(\n            data=datalist,\n            transform=train_transforms,\n            replace_rate=1.0,\n            cache_num=2 * args.batch_size * args.sw_batch_size,\n        )\n    else:\n        print(\"Using Persistent dataset\")\n        # train_ds = Dataset(data=datalist, transform=train_transforms)\n        train_ds = PersistentDataset(data=datalist,\n                                     transform=train_transforms,\n                                     pickle_protocol=pickle.HIGHEST_PROTOCOL,\n                                     cache_dir='/data/linshan/cache/10k')\n\n    if args.distributed:\n        train_sampler = DistributedSampler(dataset=train_ds, even_divisible=True, shuffle=True)\n    else:\n        train_sampler = None\n\n    train_loader = DataLoader(\n        train_ds, batch_size=args.batch_size, num_workers=num_workers, sampler=train_sampler, shuffle=True,\n        drop_last=True, pin_memory=True\n    )\n\n    return train_loader\n\n\ndef threshold(x):\n    # threshold at 0\n    return x > 0.3\n\n\nclass VoCoAugmentation():\n    def __init__(self, args, aug):\n        self.args = args\n        self.aug = aug\n\n    def __call__(self, x_in):\n        crops_trans = get_crop_transform(roi_small=self.args.roi_x, aug=self.aug)\n\n        vanilla_trans, labels = get_vanilla_transform(num=self.args.sw_batch_size,\n                                                      roi_small=self.args.roi_x, aug=self.aug)\n\n        imgs = []\n        for trans in vanilla_trans:\n            img = trans(x_in)\n            imgs.append(img)\n\n        crops = []\n        for trans in crops_trans:\n            crop = trans(x_in)\n            crops.append(crop)\n\n        return imgs, labels, crops\n\n\ndef get_vanilla_transform(num=2, num_crops=4, roi_small=64, roi=96, max_roi=384, aug=False):\n    vanilla_trans = []\n    labels = []\n    for i in range(num):\n        center_x, center_y, label = get_position_label(roi=roi,\n                                                       max_roi=max_roi,\n                                                       num_crops=num_crops)\n        if aug:\n            trans = Compose([\n                SpatialCropd(keys=['image'],\n                             roi_center=[center_x, center_y, roi // 2],\n                             roi_size=[roi, roi, roi]),\n                Resized(keys=[\"image\"], mode=\"bilinear\", align_corners=True,\n                        spatial_size=(roi_small, roi_small, roi_small)),\n                RandFlipd(keys=[\"image\"], prob=0.2, spatial_axis=0),\n                RandFlipd(keys=[\"image\"], prob=0.2, spatial_axis=1),\n                RandFlipd(keys=[\"image\"], prob=0.2, spatial_axis=2),\n                RandRotate90d(keys=[\"image\"], prob=0.2, max_k=3),\n                RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=0.1),\n                ToTensord(keys=[\"image\"])])\n        else:\n            trans = Compose([\n                SpatialCropd(keys=['image'],\n                             roi_center=[center_x, center_y, roi // 2],\n                             roi_size=[roi, roi, roi]),\n                Resized(keys=[\"image\"], mode=\"bilinear\", align_corners=True,\n                        spatial_size=(roi_small, roi_small, roi_small)),\n                ToTensord(keys=[\"image\"])])\n\n        vanilla_trans.append(trans)\n        labels.append(label)\n\n    labels = np.concatenate(labels, 0).reshape(num, num_crops * num_crops)\n\n    return vanilla_trans, labels\n\n\ndef get_crop_transform(num=4, roi_small=64, roi=96, aug=False):\n    voco_trans = []\n    # not symmetric at axis x !!!\n    for i in range(num):\n        for j in range(num):\n            center_x = (i + 1 / 2) * roi\n            center_y = (j + 1 / 2) * roi\n            center_z = roi // 2\n\n            if aug:\n                trans = Compose([\n                    SpatialCropd(keys=['image'],\n                                 roi_center=[center_x, center_y, center_z],\n                                 roi_size=[roi, roi, roi]),\n                    Resized(keys=[\"image\"],\n                            mode=\"bilinear\",\n                            align_corners=True,\n                            spatial_size=(roi_small, roi_small, roi_small)\n                            ),\n                    Resized(keys=[\"image\"], mode=\"bilinear\", align_corners=True,\n                            spatial_size=(roi_small, roi_small, roi_small)),\n                    RandFlipd(keys=[\"image\"], prob=0.2, spatial_axis=0),\n                    RandFlipd(keys=[\"image\"], prob=0.2, spatial_axis=1),\n                    RandFlipd(keys=[\"image\"], prob=0.2, spatial_axis=2),\n                    RandRotate90d(keys=[\"image\"], prob=0.2, max_k=3),\n                    RandShiftIntensityd(keys=\"image\", offsets=0.1, prob=0.1),\n                    ToTensord(keys=[\"image\"])],\n                )\n            else:\n                trans = Compose([\n                    SpatialCropd(keys=['image'],\n                                 roi_center=[center_x, center_y, center_z],\n                                 roi_size=[roi, roi, roi]),\n                    Resized(keys=[\"image\"],\n                            mode=\"bilinear\",\n                            align_corners=True,\n                            spatial_size=(roi_small, roi_small, roi_small)\n                            ),\n                    ToTensord(keys=[\"image\"])],\n                )\n\n            voco_trans.append(trans)\n\n    return voco_trans\n\n\ndef get_position_label(roi=96, base_roi=96, max_roi=384, num_crops=4):\n    half = roi // 2\n    center_x, center_y = np.random.randint(low=half, high=max_roi - half), \\\n        np.random.randint(low=half, high=max_roi - half)\n    # center_x, center_y = np.random.randint(low=half, high=half+1), \\\n    #     np.random.randint(low=half, high=half+1)\n    # center_x, center_y = roi + half, roi + half\n    # print(center_x, center_y)\n\n    x_min, x_max = center_x - half, center_x + half\n    y_min, y_max = center_y - half, center_y + half\n\n    total_area = roi * roi\n    labels = []\n    for i in range(num_crops):\n        for j in range(num_crops):\n            crop_x_min, crop_x_max = i * base_roi, (i + 1) * base_roi\n            crop_y_min, crop_y_max = j * base_roi, (j + 1) * base_roi\n\n            dx = min(crop_x_max, x_max) - max(crop_x_min, x_min)\n            dy = min(crop_y_max, y_max) - max(crop_y_min, y_min)\n            if dx <= 0 or dy <= 0:\n                area = 0\n            else:\n                area = (dx * dy) / total_area\n            labels.append(area)\n\n    labels = np.asarray(labels).reshape(1, num_crops * num_crops)\n\n    return center_x, center_y, labels\n\n\nif __name__ == '__main__':\n    center_x, center_y, labels = get_position_label()\n    print(center_x, center_y, labels)"
  },
  {
    "path": "utils/ops.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport torch\nfrom numpy.random import randint\n\n\ndef patch_rand_drop(args, x, x_rep=None, max_drop=0.3, max_block_sz=0.25, tolr=0.05):\n    c, h, w, z = x.size()\n    n_drop_pix = np.random.uniform(0, max_drop) * h * w * z\n    mx_blk_height = int(h * max_block_sz)\n    mx_blk_width = int(w * max_block_sz)\n    mx_blk_slices = int(z * max_block_sz)\n    tolr = (int(tolr * h), int(tolr * w), int(tolr * z))\n    total_pix = 0\n    while total_pix < n_drop_pix:\n        rnd_r = randint(0, h - tolr[0])\n        rnd_c = randint(0, w - tolr[1])\n        rnd_s = randint(0, z - tolr[2])\n        rnd_h = min(randint(tolr[0], mx_blk_height) + rnd_r, h)\n        rnd_w = min(randint(tolr[1], mx_blk_width) + rnd_c, w)\n        rnd_z = min(randint(tolr[2], mx_blk_slices) + rnd_s, z)\n        if x_rep is None:\n            x_uninitialized = torch.empty(\n                (c, rnd_h - rnd_r, rnd_w - rnd_c, rnd_z - rnd_s), dtype=x.dtype, device=args.local_rank\n            ).normal_()\n            x_uninitialized = (x_uninitialized - torch.min(x_uninitialized)) / (\n                torch.max(x_uninitialized) - torch.min(x_uninitialized)\n            )\n            x[:, rnd_r:rnd_h, rnd_c:rnd_w, rnd_s:rnd_z] = x_uninitialized\n        else:\n            x[:, rnd_r:rnd_h, rnd_c:rnd_w, rnd_s:rnd_z] = x_rep[:, rnd_r:rnd_h, rnd_c:rnd_w, rnd_s:rnd_z]\n        total_pix = total_pix + (rnd_h - rnd_r) * (rnd_w - rnd_c) * (rnd_z - rnd_s)\n    return x\n\n\ndef rot_rand(args, x_s):\n    img_n = x_s.size()[0]\n    x_aug = x_s.detach().clone()\n    device = torch.device(f\"cuda:{args.local_rank}\")\n    x_rot = torch.zeros(img_n).long().to(device)\n    for i in range(img_n):\n        x = x_s[i]\n        orientation = np.random.randint(0, 4)\n        if orientation == 0:\n            pass\n        elif orientation == 1:\n            x = x.rot90(1, (2, 3))\n        elif orientation == 2:\n            x = x.rot90(2, (2, 3))\n        elif orientation == 3:\n            x = x.rot90(3, (2, 3))\n        x_aug[i] = x\n        x_rot[i] = orientation\n    return x_aug, x_rot\n\n\ndef aug_rand(args, samples):\n    img_n = samples.size()[0]\n    x_aug = samples.detach().clone()\n    for i in range(img_n):\n        x_aug[i] = patch_rand_drop(args, x_aug[i])\n        idx_rnd = randint(0, img_n)\n        if idx_rnd != i:\n            x_aug[i] = patch_rand_drop(args, x_aug[i], x_aug[idx_rnd])\n    return x_aug\n\n\ndef concat_image(imgs):\n    output = []\n    for img in imgs:\n        img = img['image']\n        output.append(img)\n    output = torch.concatenate(output, dim=1)\n    bs, sw_s, x, y, z = output.size()\n    output = output.view(-1, 1, x, y, z)\n    return output\n    \n\ndef concat_label(labels):\n    output = []\n    for lab in labels:\n        output.append(lab)\n    output = torch.concatenate(output, dim=0)\n    return output"
  },
  {
    "path": "utils/utils.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport scipy.ndimage as ndimage\nimport torch\n\n\ndef resample_3d(img, target_size):\n    imx, imy, imz = img.shape\n    tx, ty, tz = target_size\n    zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))\n    img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)\n    return img_resampled\n\n\ndef dice(x, y):\n    intersect = np.sum(np.sum(np.sum(x * y)))\n    y_sum = np.sum(np.sum(np.sum(y)))\n    if y_sum == 0:\n        return 0.0\n    x_sum = np.sum(np.sum(np.sum(x)))\n    return 2 * intersect / (x_sum + y_sum)\n\n\nclass AverageMeter(object):\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)\n\n\ndef distributed_all_gather(\n    tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None\n):\n    if world_size is None:\n        world_size = torch.distributed.get_world_size()\n    if valid_batch_size is not None:\n        valid_batch_size = min(valid_batch_size, world_size)\n    elif is_valid is not None:\n        is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)\n    if not no_barrier:\n        torch.distributed.barrier()\n    tensor_list_out = []\n    with torch.no_grad():\n        if is_valid is not None:\n            is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]\n            torch.distributed.all_gather(is_valid_list, is_valid)\n            is_valid = [x.item() for x in is_valid_list]\n        for tensor in tensor_list:\n            gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]\n            torch.distributed.all_gather(gather_list, tensor)\n            if valid_batch_size is not None:\n                gather_list = gather_list[:valid_batch_size]\n            elif is_valid is not None:\n                gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]\n            if out_numpy:\n                gather_list = [t.cpu().numpy() for t in gather_list]\n            tensor_list_out.append(gather_list)\n    return tensor_list_out\n"
  },
  {
    "path": "voco_train.py",
    "content": "# Copyright 2020 - 2022 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#     http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nfrom time import time\nimport logging\nimport numpy as np\nimport torch\nimport torch.distributed as dist\nimport torch.optim as optim\nfrom models.voco_head import VoCoHead\nfrom optimizers.lr_scheduler import WarmupCosineSchedule\nfrom torch.cuda.amp import GradScaler, autocast\nfrom torch.nn.parallel import DistributedDataParallel\nfrom torch.utils.tensorboard import SummaryWriter\nfrom utils.data_utils import *\nfrom utils.ops import *\nfrom utils.utils import AverageMeter, distributed_all_gather\nimport torch.multiprocessing\n\ntorch.multiprocessing.set_sharing_strategy('file_system')\nos.environ['CUDA_VISIBLE_DEVICES'] = \"0\"\nos.environ['MASTER_ADDR'] = 'localhost'\nos.environ['MASTER_PORT'] = '28890'\n\nimport resource\n\nrlimit = resource.getrlimit(resource.RLIMIT_NOFILE)\nresource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))\nprint('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))\n\n\ndef main():\n    def save_ckp(state, checkpoint_dir):\n        torch.save(state, checkpoint_dir)\n\n    def train(args, global_step, train_loader, val_best, scaler):\n        model.train()\n        loss_train = []\n        run_loss = AverageMeter()\n        pos_avg, neg_avg, base_avg = AverageMeter(), AverageMeter(), AverageMeter()\n\n        for step, batch in enumerate(train_loader):\n            t1 = time()\n            img, labels, crops = batch\n            img, crops = concat_image(img), concat_image(crops)\n            # print(img.size(), crops.size(), labels.size())\n            img, crops, labels = img.cuda(), crops.cuda(), labels.cuda()\n\n            with autocast(enabled=args.amp):\n                # loss = model(img, crops, labels)\n                pos, neg, b_loss = model(img, crops, labels)\n                loss = pos + neg + b_loss\n                loss_train.append(loss.item())\n\n            if args.amp:\n                scaler.scale(loss).backward()\n                scaler.step(optimizer)\n                scaler.update()\n            else:\n                loss.backward()\n                if args.grad_clip:\n                    torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)\n                optimizer.step()\n\n            if args.lrdecay:\n                scheduler.step()\n\n            optimizer.zero_grad()\n\n            run_loss.update(loss.item(), n=args.batch_size)\n\n            pos_avg.update(pos.item(), n=args.batch_size)\n            neg_avg.update(neg.item(), n=args.batch_size)\n            base_avg.update(b_loss.item(), n=args.batch_size)\n\n            lr = optimizer.param_groups[0][\"lr\"]\n\n            if args.distributed:\n                if dist.get_rank() == 0:\n                    print(\"Step:{}/{}, Loss:{:.4f}, Time:{:.4f}\".format\n                          (global_step, args.num_steps, loss, time() - t1))\n            else:\n                print(\"Step:{}/{}, Loss:{:.4f}, pos:{:.4f}, neg:{:.4f}, base:{:.4f}, \"\n                      \"lr:{:.8f}, Time:{:.4f}\".format(global_step, args.num_steps,\n                                                               run_loss.avg, pos_avg.avg, neg_avg.avg, base_avg.avg,\n                                                               lr, time() - t1))\n\n            global_step += 1\n            if args.distributed:\n                val_cond = (dist.get_rank() == 0) and (global_step % args.eval_num == 0)\n            else:\n                val_cond = global_step % args.eval_num == 0\n\n            freq = 1000\n            val_freq = global_step % freq == 0\n            if val_cond:\n                checkpoint = {\n                    \"global_step\": global_step,\n                    \"state_dict\": model.state_dict(),\n                    \"optimizer\": optimizer,\n                }\n                save_ckp(checkpoint, logdir + \"/model_current_epoch.pt\")\n\n            if val_freq:\n                checkpoint = {\n                    \"global_step\": global_step,\n                    \"state_dict\": model.state_dict(),\n                    \"optimizer\": optimizer,\n                }\n                save_ckp(checkpoint, logdir + \"/model_step\" + str(global_step) + \".pt\")\n\n        return global_step, loss, val_best\n\n    roi = 64\n    parser = argparse.ArgumentParser(description=\"PyTorch Training\")\n    parser.add_argument(\"--logdir\", default=\"logs\", type=str, help=\"directory to save logs\")\n    parser.add_argument(\"--epochs\", default=100, type=int, help=\"number of training epochs\")\n    parser.add_argument(\"--num_steps\", default=250000, type=int, help=\"number of training iterations\")\n    parser.add_argument(\"--eval_num\", default=100, type=int, help=\"evaluation frequency\")\n    parser.add_argument(\"--warmup_steps\", default=5000, type=int, help=\"warmup steps\")\n    parser.add_argument(\"--in_channels\", default=1, type=int, help=\"number of input channels\")\n    parser.add_argument(\"--feature_size\", default=48, type=int, help=\"embedding size\")\n    parser.add_argument(\"--dropout_path_rate\", default=0.0, type=float, help=\"drop path rate\")\n    parser.add_argument(\"--use_checkpoint\", default=True, help=\"use gradient checkpointing to save memory\")\n    parser.add_argument(\"--spatial_dims\", default=3, type=int, help=\"spatial dimension of input data\")\n    parser.add_argument(\"--a_min\", default=-175.0, type=float, help=\"a_min in ScaleIntensityRanged\")\n    parser.add_argument(\"--a_max\", default=250.0, type=float, help=\"a_max in ScaleIntensityRanged\")\n    parser.add_argument(\"--b_min\", default=0.0, type=float, help=\"b_min in ScaleIntensityRanged\")\n    parser.add_argument(\"--b_max\", default=1.0, type=float, help=\"b_max in ScaleIntensityRanged\")\n    parser.add_argument(\"--space_x\", default=1.5, type=float, help=\"spacing in x direction\")\n    parser.add_argument(\"--space_y\", default=1.5, type=float, help=\"spacing in y direction\")\n    parser.add_argument(\"--space_z\", default=1.5, type=float, help=\"spacing in z direction\")\n    parser.add_argument(\"--roi_x\", default=roi, type=int, help=\"roi size in x direction\")\n    parser.add_argument(\"--roi_y\", default=roi, type=int, help=\"roi size in y direction\")\n    parser.add_argument(\"--roi_z\", default=roi, type=int, help=\"roi size in z direction\")\n    parser.add_argument(\"--batch_size\", default=2, type=int, help=\"number of batch size\")\n    parser.add_argument(\"--sw_batch_size\", default=2, type=int, help=\"number of sliding window batch size\")\n    parser.add_argument(\"--lr\", default=1e-4, type=float, help=\"learning rate\")\n    parser.add_argument(\"--decay\", default=0.1, type=float, help=\"decay rate\")\n    parser.add_argument(\"--momentum\", default=0.9, type=float, help=\"momentum\")\n    parser.add_argument(\"--lrdecay\", default=True, help=\"enable learning rate decay\")\n    parser.add_argument(\"--max_grad_norm\", default=1.0, type=float, help=\"maximum gradient norm\")\n    parser.add_argument(\"--loss_type\", default=\"SSL\", type=str)\n    parser.add_argument(\"--opt\", default=\"adamw\", type=str, help=\"optimization algorithm\")\n    parser.add_argument(\"--lr_schedule\", default=\"warmup_cosine\", type=str)\n    # './runs/logs_10k/model_current_epoch.pt'\n    parser.add_argument(\"--resume\", default=None, type=str,\n                        help=\"resume training\")\n    parser.add_argument(\"--local_rank\", type=int, default=0, help=\"local rank\")\n    parser.add_argument(\"--grad_clip\", action=\"store_true\", help=\"gradient clip\")\n    parser.add_argument(\"--noamp\", default=True, help=\"do NOT use amp for training\")\n    parser.add_argument(\"--dist-url\", default=\"env://\", help=\"url used to set up distributed training\")\n    parser.add_argument(\"--smartcache_dataset\", default=False, help=\"use monai smartcache Dataset\")\n    parser.add_argument(\"--cache_dataset\", action=\"store_true\", help=\"use monai cache Dataset\")\n\n    args = parser.parse_args()\n    logdir = args.logdir\n\n    torch.cuda.set_device(0)\n\n    args.amp = True\n    torch.backends.cudnn.benchmark = True\n    # torch.autograd.set_detect_anomaly(True)\n    args.distributed = False\n    if \"WORLD_SIZE\" in os.environ:\n        args.distributed = int(os.environ[\"WORLD_SIZE\"]) > 1\n    args.world_size = 1\n    args.rank = 0\n\n    if args.distributed:\n        args.device = \"cuda:%d\" % args.local_rank\n        torch.cuda.set_device(args.local_rank)\n        torch.distributed.init_process_group(backend=\"nccl\", init_method=args.dist_url)\n        args.world_size = torch.distributed.get_world_size()\n        args.rank = torch.distributed.get_rank()\n        print(\n            \"Training in distributed mode with multiple processes, 1 GPU per process. Process %d, total %d.\"\n            % (args.rank, args.world_size)\n        )\n    else:\n        print(\"Training with a single process on 1 GPUs.\")\n    assert args.rank >= 0\n\n    if args.rank == 0:\n        os.makedirs(logdir, exist_ok=True)\n    logger = init_log('global', logging.INFO)\n    logger.propagate = 0\n\n    model = VoCoHead(args)\n    model.cuda()\n\n    if args.opt == \"adam\":\n        optimizer = optim.Adam(params=model.parameters(), lr=args.lr, weight_decay=args.decay)\n\n    elif args.opt == \"adamw\":\n        optimizer = optim.AdamW(params=model.parameters(), lr=args.lr, amsgrad=True)\n\n    elif args.opt == \"sgd\":\n        optimizer = optim.SGD(params=model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.decay)\n\n    global_step = 0\n    if args.resume:\n        print('resume from previous checkpoints')\n        model_pth = args.resume\n        model_dict = torch.load(model_pth)\n        model.load_state_dict(model_dict, strict=False)\n        global_step = model_dict[\"global_step\"]\n        # optimizer = model_dict[\"optimizer\"][\"state_dict\"]\n\n    if args.lrdecay:\n        if args.lr_schedule == \"warmup_cosine\":\n            scheduler = WarmupCosineSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=args.num_steps)\n\n        elif args.lr_schedule == \"poly\":\n\n            def lambdas(epoch):\n                return (1 - float(epoch) / float(args.epochs)) ** 0.9\n\n            scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambdas)\n\n    if args.distributed:\n        model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)\n        model = DistributedDataParallel(model, device_ids=[args.local_rank])\n\n    train_loader = get_loader(args)\n\n    best_val = 1e8\n    if args.amp:\n        scaler = GradScaler()\n    else:\n        scaler = None\n    while global_step < args.num_steps:\n        global_step, loss, best_val = train(args, global_step, train_loader, best_val, scaler)\n    checkpoint = {\"epoch\": args.epochs, \"state_dict\": model.state_dict(), \"optimizer\": optimizer.state_dict()}\n\n    if args.distributed:\n        if dist.get_rank() == 0:\n            torch.save(model.state_dict(), logdir + \"final_model.pth\")\n        dist.destroy_process_group()\n    else:\n        torch.save(model.state_dict(), logdir + \"final_model.pth\")\n    save_ckp(checkpoint, logdir + \"/model_final_epoch.pt\")\n\n\nlogs = set()\n\n\ndef init_log(name, level=logging.INFO):\n    if (name, level) in logs:\n        return\n    logs.add((name, level))\n    logger = logging.getLogger(name)\n    logger.setLevel(level)\n    ch = logging.StreamHandler()\n    ch.setLevel(level)\n    if \"SLURM_PROCID\" in os.environ:\n        rank = int(os.environ[\"SLURM_PROCID\"])\n        logger.addFilter(lambda record: rank == 0)\n    else:\n        rank = 0\n    format_str = \"[%(asctime)s][%(levelname)8s] %(message)s\"\n    formatter = logging.Formatter(format_str)\n    ch.setFormatter(formatter)\n    logger.addHandler(ch)\n    return logger\n\n\nif __name__ == \"__main__\":\n    main()\n"
  }
]